You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by ni...@apache.org on 2017/02/26 23:36:49 UTC

[01/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Repository: lucenenet
Updated Branches:
  refs/heads/api-work cfdda330c -> 7a4940010


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestPhraseQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestPhraseQuery.cs b/src/Lucene.Net.Tests/Search/TestPhraseQuery.cs
new file mode 100644
index 0000000..14dde30
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestPhraseQuery.cs
@@ -0,0 +1,758 @@
+using Lucene.Net.Analysis.TokenAttributes;
+using System;
+using System.Collections.Generic;
+using System.Text;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    using Lucene.Net.Analysis;
+    
+    using Lucene.Net.Index;
+    using Lucene.Net.Util;
+    using NUnit.Framework;
+    using System.IO;
+    using DefaultSimilarity = Lucene.Net.Search.Similarities.DefaultSimilarity;
+    using Directory = Lucene.Net.Store.Directory;
+    using OpenMode = Lucene.Net.Index.OpenMode;
+
+    /// <summary>
+    /// Tests <seealso cref="PhraseQuery"/>.
+    /// </summary>
+    /// <seealso cref= TestPositionIncrement </seealso>
+    /*
+     * Remove ThreadLeaks and run with (Eclipse or command line):
+     * -ea -Drt.seed=AFD1E7E84B35D2B1
+     * to get leaked thread errors.
+     */
+
+    [TestFixture]
+    public class TestPhraseQuery : LuceneTestCase
+    {
+        /// <summary>
+        /// threshold for comparing floats </summary>
+        public const float SCORE_COMP_THRESH = 1e-6f;
+
+        private static IndexSearcher Searcher;
+        private static IndexReader Reader;
+        private PhraseQuery Query;
+        private static Directory Directory;
+
+        [OneTimeSetUp]
+        public void BeforeClass()
+        {
+            Directory = NewDirectory();
+            Analyzer analyzer = new AnalyzerAnonymousInnerClassHelper();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), Directory, analyzer, Similarity, TimeZone);
+
+            Documents.Document doc = new Documents.Document();
+            doc.Add(NewTextField("field", "one two three four five", Field.Store.YES));
+            doc.Add(NewTextField("repeated", "this is a repeated field - first part", Field.Store.YES));
+            IIndexableField repeatedField = NewTextField("repeated", "second part of a repeated field", Field.Store.YES);
+            doc.Add(repeatedField);
+            doc.Add(NewTextField("palindrome", "one two three two one", Field.Store.YES));
+            writer.AddDocument(doc);
+
+            doc = new Documents.Document();
+            doc.Add(NewTextField("nonexist", "phrase exist notexist exist found", Field.Store.YES));
+            writer.AddDocument(doc);
+
+            doc = new Documents.Document();
+            doc.Add(NewTextField("nonexist", "phrase exist notexist exist found", Field.Store.YES));
+            writer.AddDocument(doc);
+
+            Reader = writer.Reader;
+            writer.Dispose();
+
+            Searcher = NewSearcher(Reader);
+        }
+
+        private class AnalyzerAnonymousInnerClassHelper : Analyzer
+        {
+            public AnalyzerAnonymousInnerClassHelper()
+            {
+            }
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                return new TokenStreamComponents(new MockTokenizer(reader, MockTokenizer.WHITESPACE, false));
+            }
+
+            public override int GetPositionIncrementGap(string fieldName)
+            {
+                return 100;
+            }
+        }
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Query = new PhraseQuery();
+        }
+
+        [OneTimeTearDown]
+        public static void AfterClass()
+        {
+            Searcher = null;
+            Reader.Dispose();
+            Reader = null;
+            Directory.Dispose();
+            Directory = null;
+        }
+
+        [Test]
+        public virtual void TestNotCloseEnough()
+        {
+            Query.Slop = 2;
+            Query.Add(new Term("field", "one"));
+            Query.Add(new Term("field", "five"));
+            ScoreDoc[] hits = Searcher.Search(Query, null, 1000).ScoreDocs;
+            Assert.AreEqual(0, hits.Length);
+            QueryUtils.Check(Random(), Query, Searcher, Similarity);
+        }
+
+        [Test]
+        public virtual void TestBarelyCloseEnough()
+        {
+            Query.Slop = 3;
+            Query.Add(new Term("field", "one"));
+            Query.Add(new Term("field", "five"));
+            ScoreDoc[] hits = Searcher.Search(Query, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+            QueryUtils.Check(Random(), Query, Searcher, Similarity);
+        }
+
+        /// <summary>
+        /// Ensures slop of 0 works for exact matches, but not reversed
+        /// </summary>
+        [Test]
+        public virtual void TestExact()
+        {
+            // slop is zero by default
+            Query.Add(new Term("field", "four"));
+            Query.Add(new Term("field", "five"));
+            ScoreDoc[] hits = Searcher.Search(Query, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length, "exact match");
+            QueryUtils.Check(Random(), Query, Searcher, Similarity);
+
+            Query = new PhraseQuery();
+            Query.Add(new Term("field", "two"));
+            Query.Add(new Term("field", "one"));
+            hits = Searcher.Search(Query, null, 1000).ScoreDocs;
+            Assert.AreEqual(0, hits.Length, "reverse not exact");
+            QueryUtils.Check(Random(), Query, Searcher, Similarity);
+        }
+
+        [Test]
+        public virtual void TestSlop1()
+        {
+            // Ensures slop of 1 works with terms in order.
+            Query.Slop = 1;
+            Query.Add(new Term("field", "one"));
+            Query.Add(new Term("field", "two"));
+            ScoreDoc[] hits = Searcher.Search(Query, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length, "in order");
+            QueryUtils.Check(Random(), Query, Searcher, Similarity);
+
+            // Ensures slop of 1 does not work for phrases out of order;
+            // must be at least 2.
+            Query = new PhraseQuery();
+            Query.Slop = 1;
+            Query.Add(new Term("field", "two"));
+            Query.Add(new Term("field", "one"));
+            hits = Searcher.Search(Query, null, 1000).ScoreDocs;
+            Assert.AreEqual(0, hits.Length, "reversed, slop not 2 or more");
+            QueryUtils.Check(Random(), Query, Searcher, Similarity);
+        }
+
+        /// <summary>
+        /// As long as slop is at least 2, terms can be reversed
+        /// </summary>
+        [Test]
+        public virtual void TestOrderDoesntMatter()
+        {
+            Query.Slop = 2; // must be at least two for reverse order match
+            Query.Add(new Term("field", "two"));
+            Query.Add(new Term("field", "one"));
+            ScoreDoc[] hits = Searcher.Search(Query, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length, "just sloppy enough");
+            QueryUtils.Check(Random(), Query, Searcher, Similarity);
+
+            Query = new PhraseQuery();
+            Query.Slop = 2;
+            Query.Add(new Term("field", "three"));
+            Query.Add(new Term("field", "one"));
+            hits = Searcher.Search(Query, null, 1000).ScoreDocs;
+            Assert.AreEqual(0, hits.Length, "not sloppy enough");
+            QueryUtils.Check(Random(), Query, Searcher, Similarity);
+        }
+
+        /// <summary>
+        /// slop is the total number of positional moves allowed
+        /// to line up a phrase
+        /// </summary>
+        [Test]
+        public virtual void TestMulipleTerms()
+        {
+            Query.Slop = 2;
+            Query.Add(new Term("field", "one"));
+            Query.Add(new Term("field", "three"));
+            Query.Add(new Term("field", "five"));
+            ScoreDoc[] hits = Searcher.Search(Query, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length, "two total moves");
+            QueryUtils.Check(Random(), Query, Searcher, Similarity);
+
+            Query = new PhraseQuery();
+            Query.Slop = 5; // it takes six moves to match this phrase
+            Query.Add(new Term("field", "five"));
+            Query.Add(new Term("field", "three"));
+            Query.Add(new Term("field", "one"));
+            hits = Searcher.Search(Query, null, 1000).ScoreDocs;
+            Assert.AreEqual(0, hits.Length, "slop of 5 not close enough");
+            QueryUtils.Check(Random(), Query, Searcher, Similarity);
+
+            Query.Slop = 6;
+            hits = Searcher.Search(Query, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length, "slop of 6 just right");
+            QueryUtils.Check(Random(), Query, Searcher, Similarity);
+        }
+
+        [Test]
+        public virtual void TestPhraseQueryWithStopAnalyzer()
+        {
+            Directory directory = NewDirectory();
+            Analyzer stopAnalyzer = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET);
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, stopAnalyzer));
+            Documents.Document doc = new Documents.Document();
+            doc.Add(NewTextField("field", "the stop words are here", Field.Store.YES));
+            writer.AddDocument(doc);
+            IndexReader reader = writer.Reader;
+            writer.Dispose();
+
+            IndexSearcher searcher = NewSearcher(reader);
+
+            // valid exact phrase query
+            PhraseQuery query = new PhraseQuery();
+            query.Add(new Term("field", "stop"));
+            query.Add(new Term("field", "words"));
+            ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+            QueryUtils.Check(Random(), query, searcher, Similarity);
+
+            reader.Dispose();
+            directory.Dispose();
+        }
+
+        [Test]
+        public virtual void TestPhraseQueryInConjunctionScorer()
+        {
+            Directory directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, Similarity, TimeZone);
+
+            Documents.Document doc = new Documents.Document();
+            doc.Add(NewTextField("source", "marketing info", Field.Store.YES));
+            writer.AddDocument(doc);
+
+            doc = new Documents.Document();
+            doc.Add(NewTextField("contents", "foobar", Field.Store.YES));
+            doc.Add(NewTextField("source", "marketing info", Field.Store.YES));
+            writer.AddDocument(doc);
+
+            IndexReader reader = writer.Reader;
+            writer.Dispose();
+
+            IndexSearcher searcher = NewSearcher(reader);
+
+            PhraseQuery phraseQuery = new PhraseQuery();
+            phraseQuery.Add(new Term("source", "marketing"));
+            phraseQuery.Add(new Term("source", "info"));
+            ScoreDoc[] hits = searcher.Search(phraseQuery, null, 1000).ScoreDocs;
+            Assert.AreEqual(2, hits.Length);
+            QueryUtils.Check(Random(), phraseQuery, searcher, Similarity);
+
+            TermQuery termQuery = new TermQuery(new Term("contents", "foobar"));
+            BooleanQuery booleanQuery = new BooleanQuery();
+            booleanQuery.Add(termQuery, Occur.MUST);
+            booleanQuery.Add(phraseQuery, Occur.MUST);
+            hits = searcher.Search(booleanQuery, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+            QueryUtils.Check(Random(), termQuery, searcher, Similarity);
+
+            reader.Dispose();
+
+            writer = new RandomIndexWriter(Random(), directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE));
+            doc = new Documents.Document();
+            doc.Add(NewTextField("contents", "map entry woo", Field.Store.YES));
+            writer.AddDocument(doc);
+
+            doc = new Documents.Document();
+            doc.Add(NewTextField("contents", "woo map entry", Field.Store.YES));
+            writer.AddDocument(doc);
+
+            doc = new Documents.Document();
+            doc.Add(NewTextField("contents", "map foobarword entry woo", Field.Store.YES));
+            writer.AddDocument(doc);
+
+            reader = writer.Reader;
+            writer.Dispose();
+
+            searcher = NewSearcher(reader);
+
+            termQuery = new TermQuery(new Term("contents", "woo"));
+            phraseQuery = new PhraseQuery();
+            phraseQuery.Add(new Term("contents", "map"));
+            phraseQuery.Add(new Term("contents", "entry"));
+
+            hits = searcher.Search(termQuery, null, 1000).ScoreDocs;
+            Assert.AreEqual(3, hits.Length);
+            hits = searcher.Search(phraseQuery, null, 1000).ScoreDocs;
+            Assert.AreEqual(2, hits.Length);
+
+            booleanQuery = new BooleanQuery();
+            booleanQuery.Add(termQuery, Occur.MUST);
+            booleanQuery.Add(phraseQuery, Occur.MUST);
+            hits = searcher.Search(booleanQuery, null, 1000).ScoreDocs;
+            Assert.AreEqual(2, hits.Length);
+
+            booleanQuery = new BooleanQuery();
+            booleanQuery.Add(phraseQuery, Occur.MUST);
+            booleanQuery.Add(termQuery, Occur.MUST);
+            hits = searcher.Search(booleanQuery, null, 1000).ScoreDocs;
+            Assert.AreEqual(2, hits.Length);
+            QueryUtils.Check(Random(), booleanQuery, searcher, Similarity);
+
+            reader.Dispose();
+            directory.Dispose();
+        }
+
+        [Test]
+        public virtual void TestSlopScoring()
+        {
+            Directory directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()).SetSimilarity(new DefaultSimilarity()));
+
+            Documents.Document doc = new Documents.Document();
+            doc.Add(NewTextField("field", "foo firstname lastname foo", Field.Store.YES));
+            writer.AddDocument(doc);
+
+            Documents.Document doc2 = new Documents.Document();
+            doc2.Add(NewTextField("field", "foo firstname zzz lastname foo", Field.Store.YES));
+            writer.AddDocument(doc2);
+
+            Documents.Document doc3 = new Documents.Document();
+            doc3.Add(NewTextField("field", "foo firstname zzz yyy lastname foo", Field.Store.YES));
+            writer.AddDocument(doc3);
+
+            IndexReader reader = writer.Reader;
+            writer.Dispose();
+
+            IndexSearcher searcher = NewSearcher(reader);
+            searcher.Similarity = new DefaultSimilarity();
+            PhraseQuery query = new PhraseQuery();
+            query.Add(new Term("field", "firstname"));
+            query.Add(new Term("field", "lastname"));
+            query.Slop = int.MaxValue;
+            ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(3, hits.Length);
+            // Make sure that those matches where the terms appear closer to
+            // each other get a higher score:
+            Assert.AreEqual(0.71, hits[0].Score, 0.01);
+            Assert.AreEqual(0, hits[0].Doc);
+            Assert.AreEqual(0.44, hits[1].Score, 0.01);
+            Assert.AreEqual(1, hits[1].Doc);
+            Assert.AreEqual(0.31, hits[2].Score, 0.01);
+            Assert.AreEqual(2, hits[2].Doc);
+            QueryUtils.Check(Random(), query, searcher, Similarity);
+            reader.Dispose();
+            directory.Dispose();
+        }
+
+        [Test]
+        public virtual void TestToString()
+        {
+            PhraseQuery q = new PhraseQuery(); // Query "this hi this is a test is"
+            q.Add(new Term("field", "hi"), 1);
+            q.Add(new Term("field", "test"), 5);
+
+            Assert.AreEqual(q.ToString(), "field:\"? hi ? ? ? test\"");
+            q.Add(new Term("field", "hello"), 1);
+            Assert.AreEqual(q.ToString(), "field:\"? hi|hello ? ? ? test\"");
+        }
+
+        [Test]
+        public virtual void TestWrappedPhrase()
+        {
+            Query.Add(new Term("repeated", "first"));
+            Query.Add(new Term("repeated", "part"));
+            Query.Add(new Term("repeated", "second"));
+            Query.Add(new Term("repeated", "part"));
+            Query.Slop = 100;
+
+            ScoreDoc[] hits = Searcher.Search(Query, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length, "slop of 100 just right");
+            QueryUtils.Check(Random(), Query, Searcher, Similarity);
+
+            Query.Slop = 99;
+
+            hits = Searcher.Search(Query, null, 1000).ScoreDocs;
+            Assert.AreEqual(0, hits.Length, "slop of 99 not enough");
+            QueryUtils.Check(Random(), Query, Searcher, Similarity);
+        }
+
+        // work on two docs like this: "phrase exist notexist exist found"
+        [Test]
+        public virtual void TestNonExistingPhrase()
+        {
+            // phrase without repetitions that exists in 2 docs
+            Query.Add(new Term("nonexist", "phrase"));
+            Query.Add(new Term("nonexist", "notexist"));
+            Query.Add(new Term("nonexist", "found"));
+            Query.Slop = 2; // would be found this way
+
+            ScoreDoc[] hits = Searcher.Search(Query, null, 1000).ScoreDocs;
+            Assert.AreEqual(2, hits.Length, "phrase without repetitions exists in 2 docs");
+            QueryUtils.Check(Random(), Query, Searcher, Similarity);
+
+            // phrase with repetitions that exists in 2 docs
+            Query = new PhraseQuery();
+            Query.Add(new Term("nonexist", "phrase"));
+            Query.Add(new Term("nonexist", "exist"));
+            Query.Add(new Term("nonexist", "exist"));
+            Query.Slop = 1; // would be found
+
+            hits = Searcher.Search(Query, null, 1000).ScoreDocs;
+            Assert.AreEqual(2, hits.Length, "phrase with repetitions exists in two docs");
+            QueryUtils.Check(Random(), Query, Searcher, Similarity);
+
+            // phrase I with repetitions that does not exist in any doc
+            Query = new PhraseQuery();
+            Query.Add(new Term("nonexist", "phrase"));
+            Query.Add(new Term("nonexist", "notexist"));
+            Query.Add(new Term("nonexist", "phrase"));
+            Query.Slop = 1000; // would not be found no matter how high the slop is
+
+            hits = Searcher.Search(Query, null, 1000).ScoreDocs;
+            Assert.AreEqual(0, hits.Length, "nonexisting phrase with repetitions does not exist in any doc");
+            QueryUtils.Check(Random(), Query, Searcher, Similarity);
+
+            // phrase II with repetitions that does not exist in any doc
+            Query = new PhraseQuery();
+            Query.Add(new Term("nonexist", "phrase"));
+            Query.Add(new Term("nonexist", "exist"));
+            Query.Add(new Term("nonexist", "exist"));
+            Query.Add(new Term("nonexist", "exist"));
+            Query.Slop = 1000; // would not be found no matter how high the slop is
+
+            hits = Searcher.Search(Query, null, 1000).ScoreDocs;
+            Assert.AreEqual(0, hits.Length, "nonexisting phrase with repetitions does not exist in any doc");
+            QueryUtils.Check(Random(), Query, Searcher, Similarity);
+        }
+
+        /// <summary>
+        /// Working on a 2 fields like this:
+        ///    Field("field", "one two three four five")
+        ///    Field("palindrome", "one two three two one")
+        /// Phrase of size 2 occuriong twice, once in order and once in reverse,
+        /// because doc is a palyndrome, is counted twice.
+        /// Also, in this case order in query does not matter.
+        /// Also, when an exact match is found, both sloppy scorer and exact scorer scores the same.
+        /// </summary>
+        [Test]
+        public virtual void TestPalyndrome2()
+        {
+            // search on non palyndrome, find phrase with no slop, using exact phrase scorer
+            Query.Slop = 0; // to use exact phrase scorer
+            Query.Add(new Term("field", "two"));
+            Query.Add(new Term("field", "three"));
+            ScoreDoc[] hits = Searcher.Search(Query, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length, "phrase found with exact phrase scorer");
+            float score0 = hits[0].Score;
+            //System.out.println("(exact) field: two three: "+score0);
+            QueryUtils.Check(Random(), Query, Searcher, Similarity);
+
+            // search on non palyndrome, find phrase with slop 2, though no slop required here.
+            Query.Slop = 2; // to use sloppy scorer
+            hits = Searcher.Search(Query, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length, "just sloppy enough");
+            float score1 = hits[0].Score;
+            //System.out.println("(sloppy) field: two three: "+score1);
+            Assert.AreEqual(score0, score1, SCORE_COMP_THRESH, "exact scorer and sloppy scorer score the same when slop does not matter");
+            QueryUtils.Check(Random(), Query, Searcher, Similarity);
+
+            // search ordered in palyndrome, find it twice
+            Query = new PhraseQuery();
+            Query.Slop = 2; // must be at least two for both ordered and reversed to match
+            Query.Add(new Term("palindrome", "two"));
+            Query.Add(new Term("palindrome", "three"));
+            hits = Searcher.Search(Query, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length, "just sloppy enough");
+            //float score2 = hits[0].Score;
+            //System.out.println("palindrome: two three: "+score2);
+            QueryUtils.Check(Random(), Query, Searcher, Similarity);
+
+            //commented out for sloppy-phrase efficiency (issue 736) - see SloppyPhraseScorer.phraseFreq().
+            //Assert.IsTrue("ordered scores higher in palindrome",score1+SCORE_COMP_THRESH<score2);
+
+            // search reveresed in palyndrome, find it twice
+            Query = new PhraseQuery();
+            Query.Slop = 2; // must be at least two for both ordered and reversed to match
+            Query.Add(new Term("palindrome", "three"));
+            Query.Add(new Term("palindrome", "two"));
+            hits = Searcher.Search(Query, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length, "just sloppy enough");
+            //float score3 = hits[0].Score;
+            //System.out.println("palindrome: three two: "+score3);
+            QueryUtils.Check(Random(), Query, Searcher, Similarity);
+
+            //commented out for sloppy-phrase efficiency (issue 736) - see SloppyPhraseScorer.phraseFreq().
+            //Assert.IsTrue("reversed scores higher in palindrome",score1+SCORE_COMP_THRESH<score3);
+            //Assert.AreEqual("ordered or reversed does not matter",score2, score3, SCORE_COMP_THRESH);
+        }
+
+        /// <summary>
+        /// Working on a 2 fields like this:
+        ///    Field("field", "one two three four five")
+        ///    Field("palindrome", "one two three two one")
+        /// Phrase of size 3 occuriong twice, once in order and once in reverse,
+        /// because doc is a palyndrome, is counted twice.
+        /// Also, in this case order in query does not matter.
+        /// Also, when an exact match is found, both sloppy scorer and exact scorer scores the same.
+        /// </summary>
+        [Test]
+        public virtual void TestPalyndrome3()
+        {
+            // search on non palyndrome, find phrase with no slop, using exact phrase scorer
+            Query.Slop = 0; // to use exact phrase scorer
+            Query.Add(new Term("field", "one"));
+            Query.Add(new Term("field", "two"));
+            Query.Add(new Term("field", "three"));
+            ScoreDoc[] hits = Searcher.Search(Query, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length, "phrase found with exact phrase scorer");
+            float score0 = hits[0].Score;
+            //System.out.println("(exact) field: one two three: "+score0);
+            QueryUtils.Check(Random(), Query, Searcher, Similarity);
+
+            // just make sure no exc:
+            Searcher.Explain(Query, 0);
+
+            // search on non palyndrome, find phrase with slop 3, though no slop required here.
+            Query.Slop = 4; // to use sloppy scorer
+            hits = Searcher.Search(Query, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length, "just sloppy enough");
+            float score1 = hits[0].Score;
+            //System.out.println("(sloppy) field: one two three: "+score1);
+            Assert.AreEqual(score0, score1, SCORE_COMP_THRESH, "exact scorer and sloppy scorer score the same when slop does not matter");
+            QueryUtils.Check(Random(), Query, Searcher, Similarity);
+
+            // search ordered in palyndrome, find it twice
+            Query = new PhraseQuery();
+            Query.Slop = 4; // must be at least four for both ordered and reversed to match
+            Query.Add(new Term("palindrome", "one"));
+            Query.Add(new Term("palindrome", "two"));
+            Query.Add(new Term("palindrome", "three"));
+            hits = Searcher.Search(Query, null, 1000).ScoreDocs;
+
+            // just make sure no exc:
+            Searcher.Explain(Query, 0);
+
+            Assert.AreEqual(1, hits.Length, "just sloppy enough");
+            //float score2 = hits[0].Score;
+            //System.out.println("palindrome: one two three: "+score2);
+            QueryUtils.Check(Random(), Query, Searcher, Similarity);
+
+            //commented out for sloppy-phrase efficiency (issue 736) - see SloppyPhraseScorer.phraseFreq().
+            //Assert.IsTrue("ordered scores higher in palindrome",score1+SCORE_COMP_THRESH<score2);
+
+            // search reveresed in palyndrome, find it twice
+            Query = new PhraseQuery();
+            Query.Slop = 4; // must be at least four for both ordered and reversed to match
+            Query.Add(new Term("palindrome", "three"));
+            Query.Add(new Term("palindrome", "two"));
+            Query.Add(new Term("palindrome", "one"));
+            hits = Searcher.Search(Query, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length, "just sloppy enough");
+            //float score3 = hits[0].Score;
+            //System.out.println("palindrome: three two one: "+score3);
+            QueryUtils.Check(Random(), Query, Searcher, Similarity);
+
+            //commented out for sloppy-phrase efficiency (issue 736) - see SloppyPhraseScorer.phraseFreq().
+            //Assert.IsTrue("reversed scores higher in palindrome",score1+SCORE_COMP_THRESH<score3);
+            //Assert.AreEqual("ordered or reversed does not matter",score2, score3, SCORE_COMP_THRESH);
+        }
+
+        // LUCENE-1280
+        [Test]
+        public virtual void TestEmptyPhraseQuery()
+        {
+            BooleanQuery q2 = new BooleanQuery();
+            q2.Add(new PhraseQuery(), Occur.MUST);
+            q2.ToString();
+        }
+
+        /* test that a single term is rewritten to a term query */
+
+        [Test]
+        public virtual void TestRewrite()
+        {
+            PhraseQuery pq = new PhraseQuery();
+            pq.Add(new Term("foo", "bar"));
+            Query rewritten = pq.Rewrite(Searcher.IndexReader);
+            Assert.IsTrue(rewritten is TermQuery);
+        }
+
+        [Test]
+        public virtual void TestRandomPhrases()
+        {
+            Directory dir = NewDirectory();
+            Analyzer analyzer = new MockAnalyzer(Random());
+
+            RandomIndexWriter w = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetMergePolicy(NewLogMergePolicy()));
+            IList<IList<string>> docs = new List<IList<string>>();
+            Documents.Document d = new Documents.Document();
+            Field f = NewTextField("f", "", Field.Store.NO);
+            d.Add(f);
+
+            Random r = Random();
+
+            int NUM_DOCS = AtLeast(10);
+            for (int i = 0; i < NUM_DOCS; i++)
+            {
+                // must be > 4096 so it spans multiple chunks
+                int termCount = TestUtil.NextInt(Random(), 4097, 8200);
+
+                IList<string> doc = new List<string>();
+
+                StringBuilder sb = new StringBuilder();
+                while (doc.Count < termCount)
+                {
+                    if (r.Next(5) == 1 || docs.Count == 0)
+                    {
+                        // make new non-empty-string term
+                        string term;
+                        while (true)
+                        {
+                            term = TestUtil.RandomUnicodeString(r);
+                            if (term.Length > 0)
+                            {
+                                break;
+                            }
+                        }
+                        IOException priorException = null;
+                        TokenStream ts = analyzer.TokenStream("ignore", new StringReader(term));
+                        try
+                        {
+                            ICharTermAttribute termAttr = ts.AddAttribute<ICharTermAttribute>();
+                            ts.Reset();
+                            while (ts.IncrementToken())
+                            {
+                                string text = termAttr.ToString();
+                                doc.Add(text);
+                                sb.Append(text).Append(' ');
+                            }
+                            ts.End();
+                        }
+                        catch (IOException e)
+                        {
+                            priorException = e;
+                        }
+                        finally
+                        {
+                            IOUtils.CloseWhileHandlingException(priorException, ts);
+                        }
+                    }
+                    else
+                    {
+                        // pick existing sub-phrase
+                        IList<string> lastDoc = docs[r.Next(docs.Count)];
+                        int len = TestUtil.NextInt(r, 1, 10);
+                        int start = r.Next(lastDoc.Count - len);
+                        for (int k = start; k < start + len; k++)
+                        {
+                            string t = lastDoc[k];
+                            doc.Add(t);
+                            sb.Append(t).Append(' ');
+                        }
+                    }
+                }
+                docs.Add(doc);
+                f.SetStringValue(sb.ToString());
+                w.AddDocument(d);
+            }
+
+            IndexReader reader = w.Reader;
+            IndexSearcher s = NewSearcher(reader);
+            w.Dispose();
+
+            // now search
+            int num = AtLeast(10);
+            for (int i = 0; i < num; i++)
+            {
+                int docID = r.Next(docs.Count);
+                IList<string> doc = docs[docID];
+
+                int numTerm = TestUtil.NextInt(r, 2, 20);
+                int start = r.Next(doc.Count - numTerm);
+                PhraseQuery pq = new PhraseQuery();
+                StringBuilder sb = new StringBuilder();
+                for (int t = start; t < start + numTerm; t++)
+                {
+                    pq.Add(new Term("f", doc[t]));
+                    sb.Append(doc[t]).Append(' ');
+                }
+
+                TopDocs hits = s.Search(pq, NUM_DOCS);
+                bool found = false;
+                for (int j = 0; j < hits.ScoreDocs.Length; j++)
+                {
+                    if (hits.ScoreDocs[j].Doc == docID)
+                    {
+                        found = true;
+                        break;
+                    }
+                }
+
+                Assert.IsTrue(found, "phrase '" + sb + "' not found; start=" + start);
+            }
+
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestNegativeSlop()
+        {
+            PhraseQuery query = new PhraseQuery();
+            query.Add(new Term("field", "two"));
+            query.Add(new Term("field", "one"));
+            try
+            {
+                query.Slop = -2;
+                Assert.Fail("didn't get expected exception");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException expected)
+#pragma warning restore 168
+            {
+                // expected exception
+            }
+        }
+    }
+}
\ No newline at end of file


[65/72] [abbrv] lucenenet git commit: Lucene.Net.TestFramework: Renamed Util\fst\ to Util\Fst\

Posted by ni...@apache.org.
Lucene.Net.TestFramework: Renamed Util\fst\ to Util\Fst\


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/84ad7a30
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/84ad7a30
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/84ad7a30

Branch: refs/heads/api-work
Commit: 84ad7a307d0376599bcf0be1895f212268044681
Parents: 6a55c21
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 26 03:40:33 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Feb 27 06:18:00 2017 +0700

----------------------------------------------------------------------
 .../Lucene.Net.TestFramework.csproj             |    2 +-
 .../Util/Fst/FSTTester.cs                       | 1013 ++++++++++++++++++
 .../Util/fst/FSTTester.cs                       | 1013 ------------------
 3 files changed, 1014 insertions(+), 1014 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/84ad7a30/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj b/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
index 788338e..c7b9446 100644
--- a/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
+++ b/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
@@ -446,7 +446,7 @@
     <Compile Include="Util\FailOnNonBulkMergesInfoStream.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Util\fst\FSTTester.cs">
+    <Compile Include="Util\Fst\FSTTester.cs">
       <SubType>Code</SubType>
     </Compile>
     <Compile Include="Util\LineFileDocs.cs">

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/84ad7a30/src/Lucene.Net.TestFramework/Util/Fst/FSTTester.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Util/Fst/FSTTester.cs b/src/Lucene.Net.TestFramework/Util/Fst/FSTTester.cs
new file mode 100644
index 0000000..46c6f61
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Util/Fst/FSTTester.cs
@@ -0,0 +1,1013 @@
+using Lucene.Net.Randomized.Generators;
+using Lucene.Net.Support;
+using NUnit.Framework;
+using System;
+using System.Collections;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.IO;
+using System.Linq;
+using System.Text;
+
+namespace Lucene.Net.Util.Fst
+{
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         * <p/>
+         * http://www.apache.org/licenses/LICENSE-2.0
+         * <p/>
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Directory = Lucene.Net.Store.Directory;
+    using IndexInput = Lucene.Net.Store.IndexInput;
+    using IndexOutput = Lucene.Net.Store.IndexOutput;
+    using IOContext = Lucene.Net.Store.IOContext;
+    using PackedInt32s = Lucene.Net.Util.Packed.PackedInt32s;
+
+    /// <summary>
+    /// Helper class to test FSTs. </summary>
+    public class FSTTester<T>
+    {
+        internal readonly Random Random;
+        internal readonly List<InputOutput<T>> Pairs;
+        internal readonly int InputMode;
+        internal readonly Outputs<T> Outputs;
+        internal readonly Directory Dir;
+        internal readonly bool DoReverseLookup;
+
+        public FSTTester(Random random, Directory dir, int inputMode, List<InputOutput<T>> pairs, Outputs<T> outputs, bool doReverseLookup)
+        {
+            this.Random = random;
+            this.Dir = dir;
+            this.InputMode = inputMode;
+            this.Pairs = pairs;
+            this.Outputs = outputs;
+            this.DoReverseLookup = doReverseLookup;
+        }
+
+        internal static string InputToString(int inputMode, Int32sRef term)
+        {
+            return InputToString(inputMode, term, true);
+        }
+
+        internal static string InputToString(int inputMode, Int32sRef term, bool isValidUnicode)
+        {
+            if (!isValidUnicode)
+            {
+                return term.ToString();
+            }
+            else if (inputMode == 0)
+            {
+                // utf8
+                return ToBytesRef(term).Utf8ToString() + " " + term;
+            }
+            else
+            {
+                // utf32
+                return UnicodeUtil.NewString(term.Int32s, term.Offset, term.Length) + " " + term;
+            }
+        }
+
+        private static BytesRef ToBytesRef(Int32sRef ir)
+        {
+            BytesRef br = new BytesRef(ir.Length);
+            for (int i = 0; i < ir.Length; i++)
+            {
+                int x = ir.Int32s[ir.Offset + i];
+                Debug.Assert(x >= 0 && x <= 255);
+                br.Bytes[i] = (byte)x;
+            }
+            br.Length = ir.Length;
+            return br;
+        }
+
+        internal static string GetRandomString(Random random)
+        {
+            string term;
+            if (random.NextBoolean())
+            {
+                term = TestUtil.RandomRealisticUnicodeString(random);
+            }
+            else
+            {
+                // we want to mix in limited-alphabet symbols so
+                // we get more sharing of the nodes given how few
+                // terms we are testing...
+                term = SimpleRandomString(random);
+            }
+            return term;
+        }
+
+        internal static string SimpleRandomString(Random r)
+        {
+            int end = r.Next(10);
+            if (end == 0)
+            {
+                // allow 0 length
+                return "";
+            }
+            char[] buffer = new char[end];
+            for (int i = 0; i < end; i++)
+            {
+                buffer[i] = (char)TestUtil.NextInt(r, 97, 102);
+            }
+            return new string(buffer, 0, end);
+        }
+
+        internal static Int32sRef ToIntsRef(string s, int inputMode)
+        {
+            return ToIntsRef(s, inputMode, new Int32sRef(10));
+        }
+
+        internal static Int32sRef ToIntsRef(string s, int inputMode, Int32sRef ir)
+        {
+            if (inputMode == 0)
+            {
+                // utf8
+                return ToIntsRef(new BytesRef(s), ir);
+            }
+            else
+            {
+                // utf32
+                return ToIntsRefUTF32(s, ir);
+            }
+        }
+
+        internal static Int32sRef ToIntsRefUTF32(string s, Int32sRef ir)
+        {
+            int charLength = s.Length;
+            int charIdx = 0;
+            int intIdx = 0;
+            while (charIdx < charLength)
+            {
+                if (intIdx == ir.Int32s.Length)
+                {
+                    ir.Grow(intIdx + 1);
+                }
+                int utf32 = Character.CodePointAt(s, charIdx);
+                ir.Int32s[intIdx] = utf32;
+                charIdx += Character.CharCount(utf32);
+                intIdx++;
+            }
+            ir.Length = intIdx;
+            return ir;
+        }
+
+        internal static Int32sRef ToIntsRef(BytesRef br, Int32sRef ir)
+        {
+            if (br.Length > ir.Int32s.Length)
+            {
+                ir.Grow(br.Length);
+            }
+            for (int i = 0; i < br.Length; i++)
+            {
+                ir.Int32s[i] = br.Bytes[br.Offset + i] & 0xFF;
+            }
+            ir.Length = br.Length;
+            return ir;
+        }
+
+        /// <summary>
+        /// Holds one input/output pair. </summary>
+        public class InputOutput<T1> : IComparable<InputOutput<T1>>
+        {
+            public readonly Int32sRef Input;
+            public readonly T1 Output;
+
+            public InputOutput(Int32sRef input, T1 output)
+            {
+                this.Input = input;
+                this.Output = output;
+            }
+
+            public virtual int CompareTo(InputOutput<T1> other)
+            {
+                return this.Input.CompareTo(other.Input);
+            }
+        }
+
+        public virtual void DoTest(bool testPruning)
+        {
+            // no pruning
+            DoTest(0, 0, true);
+
+            if (testPruning)
+            {
+                // simple pruning
+                DoTest(TestUtil.NextInt(Random, 1, 1 + Pairs.Count), 0, true);
+
+                // leafy pruning
+                DoTest(0, TestUtil.NextInt(Random, 1, 1 + Pairs.Count), true);
+            }
+        }
+
+        // runs the term, returning the output, or null if term
+        // isn't accepted.  if prefixLength is non-null it must be
+        // length 1 int array; prefixLength[0] is set to the length
+        // of the term prefix that matches
+        private T Run(FST<T> fst, Int32sRef term, int[] prefixLength)
+        {
+            Debug.Assert(prefixLength == null || prefixLength.Length == 1);
+            FST.Arc<T> arc = fst.GetFirstArc(new FST.Arc<T>());
+            T NO_OUTPUT = fst.Outputs.NoOutput;
+            T output = NO_OUTPUT;
+            FST.BytesReader fstReader = fst.GetBytesReader();
+
+            for (int i = 0; i <= term.Length; i++)
+            {
+                int label;
+                if (i == term.Length)
+                {
+                    label = FST.END_LABEL;
+                }
+                else
+                {
+                    label = term.Int32s[term.Offset + i];
+                }
+                // System.out.println("   loop i=" + i + " label=" + label + " output=" + fst.Outputs.outputToString(output) + " curArc: target=" + arc.target + " isFinal?=" + arc.isFinal());
+                if (fst.FindTargetArc(label, arc, arc, fstReader) == null)
+                {
+                    // System.out.println("    not found");
+                    if (prefixLength != null)
+                    {
+                        prefixLength[0] = i;
+                        return output;
+                    }
+                    else
+                    {
+                        return default(T);
+                    }
+                }
+                output = fst.Outputs.Add(output, arc.Output);
+            }
+
+            if (prefixLength != null)
+            {
+                prefixLength[0] = term.Length;
+            }
+
+            return output;
+        }
+
+        private T RandomAcceptedWord(FST<T> fst, Int32sRef @in)
+        {
+            FST.Arc<T> arc = fst.GetFirstArc(new FST.Arc<T>());
+
+            IList<FST.Arc<T>> arcs = new List<FST.Arc<T>>();
+            @in.Length = 0;
+            @in.Offset = 0;
+            T NO_OUTPUT = fst.Outputs.NoOutput;
+            T output = NO_OUTPUT;
+            FST.BytesReader fstReader = fst.GetBytesReader();
+
+            while (true)
+            {
+                // read all arcs:
+                fst.ReadFirstTargetArc(arc, arc, fstReader);
+                arcs.Add((new FST.Arc<T>()).CopyFrom(arc));
+                while (!arc.IsLast)
+                {
+                    fst.ReadNextArc(arc, fstReader);
+                    arcs.Add((new FST.Arc<T>()).CopyFrom(arc));
+                }
+
+                // pick one
+                arc = arcs[Random.Next(arcs.Count)];
+                arcs.Clear();
+
+                // accumulate output
+                output = fst.Outputs.Add(output, arc.Output);
+
+                // append label
+                if (arc.Label == FST.END_LABEL)
+                {
+                    break;
+                }
+
+                if (@in.Int32s.Length == @in.Length)
+                {
+                    @in.Grow(1 + @in.Length);
+                }
+                @in.Int32s[@in.Length++] = arc.Label;
+            }
+
+            return output;
+        }
+
+        internal virtual FST<T> DoTest(int prune1, int prune2, bool allowRandomSuffixSharing)
+        {
+            if (LuceneTestCase.VERBOSE)
+            {
+                Console.WriteLine("\nTEST: prune1=" + prune1 + " prune2=" + prune2);
+            }
+
+            bool willRewrite = Random.NextBoolean();
+
+            Builder<T> builder = new Builder<T>(InputMode == 0 ? FST.INPUT_TYPE.BYTE1 : FST.INPUT_TYPE.BYTE4, 
+                                                prune1, prune2, 
+                                                prune1 == 0 && prune2 == 0, 
+                                                allowRandomSuffixSharing ? Random.NextBoolean() : true, 
+                                                allowRandomSuffixSharing ? TestUtil.NextInt(Random, 1, 10) : int.MaxValue, 
+                                                Outputs, 
+                                                null, 
+                                                willRewrite, 
+                                                PackedInt32s.DEFAULT, 
+                                                true, 
+                                                15);
+            if (LuceneTestCase.VERBOSE)
+            {
+                if (willRewrite)
+                {
+                    Console.WriteLine("TEST: packed FST");
+                }
+                else
+                {
+                    Console.WriteLine("TEST: non-packed FST");
+                }
+            }
+
+            foreach (InputOutput<T> pair in Pairs)
+            {
+                if (pair.Output is IEnumerable)
+                {
+                    Builder<object> builderObject = builder as Builder<object>;
+                    var values = pair.Output as IEnumerable;
+                    foreach (object value in values)
+                    {
+                        builderObject.Add(pair.Input, value);
+                    }
+                }
+                else
+                {
+                    builder.Add(pair.Input, pair.Output);
+                }
+            }
+            FST<T> fst = builder.Finish();
+
+            if (Random.NextBoolean() && fst != null && !willRewrite)
+            {
+                IOContext context = LuceneTestCase.NewIOContext(Random);
+                using (IndexOutput @out = Dir.CreateOutput("fst.bin", context))
+                {
+                    fst.Save(@out);
+                }
+                IndexInput @in = Dir.OpenInput("fst.bin", context);
+                try
+                {
+                    fst = new FST<T>(@in, Outputs);
+                }
+                finally
+                {
+                    @in.Dispose();
+                    Dir.DeleteFile("fst.bin");
+                }
+            }
+
+            if (LuceneTestCase.VERBOSE && Pairs.Count <= 20 && fst != null)
+            {
+                using (TextWriter w = new StreamWriter(new FileStream("out.dot", FileMode.OpenOrCreate), Encoding.UTF8))
+                {
+                    Util.ToDot(fst, w, false, false);
+                }
+                Console.WriteLine("SAVED out.dot");
+            }
+
+            if (LuceneTestCase.VERBOSE)
+            {
+                if (fst == null)
+                {
+                    Console.WriteLine("  fst has 0 nodes (fully pruned)");
+                }
+                else
+                {
+                    Console.WriteLine("  fst has " + fst.NodeCount + " nodes and " + fst.ArcCount + " arcs");
+                }
+            }
+
+            if (prune1 == 0 && prune2 == 0)
+            {
+                VerifyUnPruned(InputMode, fst);
+            }
+            else
+            {
+                VerifyPruned(InputMode, fst, prune1, prune2);
+            }
+
+            return fst;
+        }
+
+        protected internal virtual bool OutputsEqual(T a, T b)
+        {
+            // LUCENENET: In .NET, IEnumerables do not automatically test to ensure
+            // their values are equal, so we need to do that manually.
+            // Note that we are testing the values without regard to whether
+            // the enumerable type is nullable.
+            return a.ValueEquals(b);
+        }
+
+        // FST is complete
+        private void VerifyUnPruned(int inputMode, FST<T> fst)
+        {
+            FST<long?> fstLong;
+            ISet<long?> validOutputs;
+            long minLong = long.MaxValue;
+            long maxLong = long.MinValue;
+
+            if (DoReverseLookup)
+            {
+                FST<long?> fstLong0 = fst as FST<long?>;
+                fstLong = fstLong0;
+                validOutputs = new HashSet<long?>();
+                foreach (InputOutput<T> pair in Pairs)
+                {
+                    long? output = pair.Output as long?;
+                    maxLong = Math.Max(maxLong, output.Value);
+                    minLong = Math.Min(minLong, output.Value);
+                    validOutputs.Add(output.Value);
+                }
+            }
+            else
+            {
+                fstLong = null;
+                validOutputs = null;
+            }
+
+            if (Pairs.Count == 0)
+            {
+                Assert.IsNull(fst);
+                return;
+            }
+
+            if (LuceneTestCase.VERBOSE)
+            {
+                Console.WriteLine("TEST: now verify " + Pairs.Count + " terms");
+                foreach (InputOutput<T> pair in Pairs)
+                {
+                    Assert.IsNotNull(pair);
+                    Assert.IsNotNull(pair.Input);
+                    Assert.IsNotNull(pair.Output);
+                    Console.WriteLine("  " + InputToString(inputMode, pair.Input) + ": " + Outputs.OutputToString(pair.Output));
+                }
+            }
+
+            Assert.IsNotNull(fst);
+
+            // visit valid pairs in order -- make sure all words
+            // are accepted, and FSTEnum's next() steps through
+            // them correctly
+            if (LuceneTestCase.VERBOSE)
+            {
+                Console.WriteLine("TEST: check valid terms/next()");
+            }
+            {
+                Int32sRefFSTEnum<T> fstEnum = new Int32sRefFSTEnum<T>(fst);
+                foreach (InputOutput<T> pair in Pairs)
+                {
+                    Int32sRef term = pair.Input;
+                    if (LuceneTestCase.VERBOSE)
+                    {
+                        Console.WriteLine("TEST: check term=" + InputToString(inputMode, term) + " output=" + fst.Outputs.OutputToString(pair.Output));
+                    }
+                    T output = Run(fst, term, null);
+                    Assert.IsNotNull(output, "term " + InputToString(inputMode, term) + " is not accepted");
+                    Assert.IsTrue(OutputsEqual(pair.Output, output));
+
+                    // verify enum's next
+                    Int32sRefFSTEnum.InputOutput<T> t = fstEnum.Next();
+                    Assert.IsNotNull(t);
+                    Assert.AreEqual(term, t.Input, "expected input=" + InputToString(inputMode, term) + " but fstEnum returned " + InputToString(inputMode, t.Input));
+                    Assert.IsTrue(OutputsEqual(pair.Output, t.Output));
+                }
+                Assert.IsNull(fstEnum.Next());
+            }
+
+            IDictionary<Int32sRef, T> termsMap = new Dictionary<Int32sRef, T>();
+            foreach (InputOutput<T> pair in Pairs)
+            {
+                termsMap[pair.Input] = pair.Output;
+            }
+
+            if (DoReverseLookup && maxLong > minLong)
+            {
+                // Do random lookups so we test null (output doesn't
+                // exist) case:
+                Assert.IsNull(Util.GetByOutput(fstLong, minLong - 7));
+                Assert.IsNull(Util.GetByOutput(fstLong, maxLong + 7));
+
+                int num = LuceneTestCase.AtLeast(Random, 100);
+                for (int iter = 0; iter < num; iter++)
+                {
+                    long v = TestUtil.NextLong(Random, minLong, maxLong);
+                    Int32sRef input = Util.GetByOutput(fstLong, v);
+                    Assert.IsTrue(validOutputs.Contains(v) || input == null);
+                }
+            }
+
+            // find random matching word and make sure it's valid
+            if (LuceneTestCase.VERBOSE)
+            {
+                Console.WriteLine("TEST: verify random accepted terms");
+            }
+            Int32sRef scratch = new Int32sRef(10);
+            int num_ = LuceneTestCase.AtLeast(Random, 500);
+            for (int iter = 0; iter < num_; iter++)
+            {
+                T output = RandomAcceptedWord(fst, scratch);
+                Assert.IsTrue(termsMap.ContainsKey(scratch), "accepted word " + InputToString(inputMode, scratch) + " is not valid");
+                Assert.IsTrue(OutputsEqual(termsMap[scratch], output));
+
+                if (DoReverseLookup)
+                {
+                    //System.out.println("lookup output=" + output + " outs=" + fst.Outputs);
+                    Int32sRef input = Util.GetByOutput(fstLong, (output as long?).Value);
+                    Assert.IsNotNull(input);
+                    //System.out.println("  got " + Util.toBytesRef(input, new BytesRef()).utf8ToString());
+                    Assert.AreEqual(scratch, input);
+                }
+            }
+
+            // test IntsRefFSTEnum.Seek:
+            if (LuceneTestCase.VERBOSE)
+            {
+                Console.WriteLine("TEST: verify seek");
+            }
+            Int32sRefFSTEnum<T> fstEnum_ = new Int32sRefFSTEnum<T>(fst);
+            num_ = LuceneTestCase.AtLeast(Random, 100);
+            for (int iter = 0; iter < num_; iter++)
+            {
+                if (LuceneTestCase.VERBOSE)
+                {
+                    Console.WriteLine("  iter=" + iter);
+                }
+                if (Random.NextBoolean())
+                {
+                    // seek to term that doesn't exist:
+                    while (true)
+                    {
+                        Int32sRef term = ToIntsRef(GetRandomString(Random), inputMode);
+                        int pos = Pairs.BinarySearch(new InputOutput<T>(term, default(T)));
+                        if (pos < 0)
+                        {
+                            pos = -(pos + 1);
+                            // ok doesn't exist
+                            //System.out.println("  seek " + inputToString(inputMode, term));
+                            Int32sRefFSTEnum.InputOutput<T> seekResult;
+                            if (Random.Next(3) == 0)
+                            {
+                                if (LuceneTestCase.VERBOSE)
+                                {
+                                    Console.WriteLine("  do non-exist seekExact term=" + InputToString(inputMode, term));
+                                }
+                                seekResult = fstEnum_.SeekExact(term);
+                                pos = -1;
+                            }
+                            else if (Random.NextBoolean())
+                            {
+                                if (LuceneTestCase.VERBOSE)
+                                {
+                                    Console.WriteLine("  do non-exist seekFloor term=" + InputToString(inputMode, term));
+                                }
+                                seekResult = fstEnum_.SeekFloor(term);
+                                pos--;
+                            }
+                            else
+                            {
+                                if (LuceneTestCase.VERBOSE)
+                                {
+                                    Console.WriteLine("  do non-exist seekCeil term=" + InputToString(inputMode, term));
+                                }
+                                seekResult = fstEnum_.SeekCeil(term);
+                            }
+
+                            if (pos != -1 && pos < Pairs.Count)
+                            {
+                                //System.out.println("    got " + inputToString(inputMode,seekResult.input) + " output=" + fst.Outputs.outputToString(seekResult.Output));
+                                Assert.IsNotNull(seekResult, "got null but expected term=" + InputToString(inputMode, Pairs[pos].Input));
+                                if (LuceneTestCase.VERBOSE)
+                                {
+                                    Console.WriteLine("    got " + InputToString(inputMode, seekResult.Input));
+                                }
+                                Assert.AreEqual(Pairs[pos].Input, seekResult.Input, "expected " + InputToString(inputMode, Pairs[pos].Input) + " but got " + InputToString(inputMode, seekResult.Input));
+                                Assert.IsTrue(OutputsEqual(Pairs[pos].Output, seekResult.Output));
+                            }
+                            else
+                            {
+                                // seeked before start or beyond end
+                                //System.out.println("seek=" + seekTerm);
+                                Assert.IsNull(seekResult, "expected null but got " + (seekResult == null ? "null" : InputToString(inputMode, seekResult.Input)));
+                                if (LuceneTestCase.VERBOSE)
+                                {
+                                    Console.WriteLine("    got null");
+                                }
+                            }
+
+                            break;
+                        }
+                    }
+                }
+                else
+                {
+                    // seek to term that does exist:
+                    InputOutput<T> pair = Pairs[Random.Next(Pairs.Count)];
+                    Int32sRefFSTEnum.InputOutput<T> seekResult;
+                    if (Random.Next(3) == 2)
+                    {
+                        if (LuceneTestCase.VERBOSE)
+                        {
+                            Console.WriteLine("  do exists seekExact term=" + InputToString(inputMode, pair.Input));
+                        }
+                        seekResult = fstEnum_.SeekExact(pair.Input);
+                    }
+                    else if (Random.NextBoolean())
+                    {
+                        if (LuceneTestCase.VERBOSE)
+                        {
+                            Console.WriteLine("  do exists seekFloor " + InputToString(inputMode, pair.Input));
+                        }
+                        seekResult = fstEnum_.SeekFloor(pair.Input);
+                    }
+                    else
+                    {
+                        if (LuceneTestCase.VERBOSE)
+                        {
+                            Console.WriteLine("  do exists seekCeil " + InputToString(inputMode, pair.Input));
+                        }
+                        seekResult = fstEnum_.SeekCeil(pair.Input);
+                    }
+                    Assert.IsNotNull(seekResult);
+                    Assert.AreEqual(pair.Input, seekResult.Input, "got " + InputToString(inputMode, seekResult.Input) + " but expected " + InputToString(inputMode, pair.Input));
+                    Assert.IsTrue(OutputsEqual(pair.Output, seekResult.Output));
+                }
+            }
+
+            if (LuceneTestCase.VERBOSE)
+            {
+                Console.WriteLine("TEST: mixed next/seek");
+            }
+
+            // test mixed next/seek
+            num_ = LuceneTestCase.AtLeast(Random, 100);
+            for (int iter = 0; iter < num_; iter++)
+            {
+                if (LuceneTestCase.VERBOSE)
+                {
+                    Console.WriteLine("TEST: iter " + iter);
+                }
+                // reset:
+                fstEnum_ = new Int32sRefFSTEnum<T>(fst);
+                int upto = -1;
+                while (true)
+                {
+                    bool isDone = false;
+                    if (upto == Pairs.Count - 1 || Random.NextBoolean())
+                    {
+                        // next
+                        upto++;
+                        if (LuceneTestCase.VERBOSE)
+                        {
+                            Console.WriteLine("  do next");
+                        }
+                        isDone = fstEnum_.Next() == null;
+                    }
+                    else if (upto != -1 && upto < 0.75 * Pairs.Count && Random.NextBoolean())
+                    {
+                        int attempt = 0;
+                        for (; attempt < 10; attempt++)
+                        {
+                            Int32sRef term = ToIntsRef(GetRandomString(Random), inputMode);
+                            if (!termsMap.ContainsKey(term) && term.CompareTo(Pairs[upto].Input) > 0)
+                            {
+                                int pos = Pairs.BinarySearch(new InputOutput<T>(term, default(T)));
+                                Debug.Assert(pos < 0);
+                                upto = -(pos + 1);
+
+                                if (Random.NextBoolean())
+                                {
+                                    upto--;
+                                    Assert.IsTrue(upto != -1);
+                                    if (LuceneTestCase.VERBOSE)
+                                    {
+                                        Console.WriteLine("  do non-exist seekFloor(" + InputToString(inputMode, term) + ")");
+                                    }
+                                    isDone = fstEnum_.SeekFloor(term) == null;
+                                }
+                                else
+                                {
+                                    if (LuceneTestCase.VERBOSE)
+                                    {
+                                        Console.WriteLine("  do non-exist seekCeil(" + InputToString(inputMode, term) + ")");
+                                    }
+                                    isDone = fstEnum_.SeekCeil(term) == null;
+                                }
+
+                                break;
+                            }
+                        }
+                        if (attempt == 10)
+                        {
+                            continue;
+                        }
+                    }
+                    else
+                    {
+                        int inc = Random.Next(Pairs.Count - upto - 1);
+                        upto += inc;
+                        if (upto == -1)
+                        {
+                            upto = 0;
+                        }
+
+                        if (Random.NextBoolean())
+                        {
+                            if (LuceneTestCase.VERBOSE)
+                            {
+                                Console.WriteLine("  do seekCeil(" + InputToString(inputMode, Pairs[upto].Input) + ")");
+                            }
+                            isDone = fstEnum_.SeekCeil(Pairs[upto].Input) == null;
+                        }
+                        else
+                        {
+                            if (LuceneTestCase.VERBOSE)
+                            {
+                                Console.WriteLine("  do seekFloor(" + InputToString(inputMode, Pairs[upto].Input) + ")");
+                            }
+                            isDone = fstEnum_.SeekFloor(Pairs[upto].Input) == null;
+                        }
+                    }
+                    if (LuceneTestCase.VERBOSE)
+                    {
+                        if (!isDone)
+                        {
+                            Console.WriteLine("    got " + InputToString(inputMode, fstEnum_.Current.Input));
+                        }
+                        else
+                        {
+                            Console.WriteLine("    got null");
+                        }
+                    }
+
+                    if (upto == Pairs.Count)
+                    {
+                        Assert.IsTrue(isDone);
+                        break;
+                    }
+                    else
+                    {
+                        Assert.IsFalse(isDone);
+                        Assert.AreEqual(Pairs[upto].Input, fstEnum_.Current.Input);
+                        Assert.IsTrue(OutputsEqual(Pairs[upto].Output, fstEnum_.Current.Output));
+
+                        /*
+                          if (upto < pairs.size()-1) {
+                          int tryCount = 0;
+                          while(tryCount < 10) {
+                          final IntsRef t = toIntsRef(getRandomString(), inputMode);
+                          if (pairs.get(upto).input.compareTo(t) < 0) {
+                          final boolean expected = t.compareTo(pairs.get(upto+1).input) < 0;
+                          if (LuceneTestCase.VERBOSE) {
+                          System.out.println("TEST: call beforeNext(" + inputToString(inputMode, t) + "); current=" + inputToString(inputMode, pairs.get(upto).input) + " next=" + inputToString(inputMode, pairs.get(upto+1).input) + " expected=" + expected);
+                          }
+                          Assert.AreEqual(expected, fstEnum.beforeNext(t));
+                          break;
+                          }
+                          tryCount++;
+                          }
+                          }
+                        */
+                    }
+                }
+            }
+        }
+
+        private class CountMinOutput<S>
+        {
+            internal int Count;
+            internal S Output;
+            internal S FinalOutput;
+            internal bool IsLeaf = true;
+            internal bool IsFinal;
+        }
+
+        // FST is pruned
+        private void VerifyPruned(int inputMode, FST<T> fst, int prune1, int prune2)
+        {
+            if (LuceneTestCase.VERBOSE)
+            {
+                Console.WriteLine("TEST: now verify pruned " + Pairs.Count + " terms; outputs=" + Outputs);
+                foreach (InputOutput<T> pair in Pairs)
+                {
+                    Console.WriteLine("  " + InputToString(inputMode, pair.Input) + ": " + Outputs.OutputToString(pair.Output));
+                }
+            }
+
+            // To validate the FST, we brute-force compute all prefixes
+            // in the terms, matched to their "common" outputs, prune that
+            // set according to the prune thresholds, then assert the FST
+            // matches that same set.
+
+            // NOTE: Crazy RAM intensive!!
+
+            //System.out.println("TEST: tally prefixes");
+
+            // build all prefixes
+            IDictionary<Int32sRef, CountMinOutput<T>> prefixes = new HashMap<Int32sRef, CountMinOutput<T>>();
+            Int32sRef scratch = new Int32sRef(10);
+            foreach (InputOutput<T> pair in Pairs)
+            {
+                scratch.CopyInt32s(pair.Input);
+                for (int idx = 0; idx <= pair.Input.Length; idx++)
+                {
+                    scratch.Length = idx;
+                    CountMinOutput<T> cmo = prefixes.ContainsKey(scratch) ? prefixes[scratch] : null;
+                    if (cmo == null)
+                    {
+                        cmo = new CountMinOutput<T>();
+                        cmo.Count = 1;
+                        cmo.Output = pair.Output;
+                        prefixes[Int32sRef.DeepCopyOf(scratch)] = cmo;
+                    }
+                    else
+                    {
+                        cmo.Count++;
+                        T output1 = cmo.Output;
+                        if (output1.Equals(Outputs.NoOutput))
+                        {
+                            output1 = Outputs.NoOutput;
+                        }
+                        T output2 = pair.Output;
+                        if (output2.Equals(Outputs.NoOutput))
+                        {
+                            output2 = Outputs.NoOutput;
+                        }
+                        cmo.Output = Outputs.Common(output1, output2);
+                    }
+                    if (idx == pair.Input.Length)
+                    {
+                        cmo.IsFinal = true;
+                        cmo.FinalOutput = cmo.Output;
+                    }
+                }
+            }
+
+            if (LuceneTestCase.VERBOSE)
+            {
+                Console.WriteLine("TEST: now prune");
+            }
+
+
+            // prune 'em
+            // LUCENENET NOTE: Altered this a bit to go in reverse rather than use an enumerator since
+            // in .NET you cannot delete records while enumerating forward through a dictionary.
+            for (int i = prefixes.Count - 1; i >= 0; i--)
+            {
+                KeyValuePair<Int32sRef, CountMinOutput<T>> ent = prefixes.ElementAt(i);
+                Int32sRef prefix = ent.Key;
+                CountMinOutput<T> cmo = ent.Value;
+                if (LuceneTestCase.VERBOSE)
+                {
+                    Console.WriteLine("  term prefix=" + InputToString(inputMode, prefix, false) + " count=" + cmo.Count + " isLeaf=" + cmo.IsLeaf + " output=" + Outputs.OutputToString(cmo.Output) + " isFinal=" + cmo.IsFinal);
+                }
+                bool keep;
+                if (prune1 > 0)
+                {
+                    keep = cmo.Count >= prune1;
+                }
+                else
+                {
+                    Debug.Assert(prune2 > 0);
+                    if (prune2 > 1 && cmo.Count >= prune2)
+                    {
+                        keep = true;
+                    }
+                    else if (prefix.Length > 0)
+                    {
+                        // consult our parent
+                        scratch.Length = prefix.Length - 1;
+                        Array.Copy(prefix.Int32s, prefix.Offset, scratch.Int32s, 0, scratch.Length);
+                        CountMinOutput<T> cmo2 = prefixes.ContainsKey(scratch) ? prefixes[scratch] : null;
+                        //System.out.println("    parent count = " + (cmo2 == null ? -1 : cmo2.count));
+                        keep = cmo2 != null && ((prune2 > 1 && cmo2.Count >= prune2) || (prune2 == 1 && (cmo2.Count >= 2 || prefix.Length <= 1)));
+                    }
+                    else if (cmo.Count >= prune2)
+                    {
+                        keep = true;
+                    }
+                    else
+                    {
+                        keep = false;
+                    }
+                }
+
+                if (!keep)
+                {
+                    prefixes.Remove(prefix);
+                    //System.out.println("    remove");
+                }
+                else
+                {
+                    // clear isLeaf for all ancestors
+                    //System.out.println("    keep");
+                    scratch.CopyInt32s(prefix);
+                    scratch.Length--;
+                    while (scratch.Length >= 0)
+                    {
+                        CountMinOutput<T> cmo2 = prefixes.ContainsKey(scratch) ? prefixes[scratch] : null;
+                        if (cmo2 != null)
+                        {
+                            //System.out.println("    clear isLeaf " + inputToString(inputMode, scratch));
+                            cmo2.IsLeaf = false;
+                        }
+                        scratch.Length--;
+                    }
+                }
+            }
+
+            if (LuceneTestCase.VERBOSE)
+            {
+                Console.WriteLine("TEST: after prune");
+                foreach (KeyValuePair<Int32sRef, CountMinOutput<T>> ent in prefixes)
+                {
+                    Console.WriteLine("  " + InputToString(inputMode, ent.Key, false) + ": isLeaf=" + ent.Value.IsLeaf + " isFinal=" + ent.Value.IsFinal);
+                    if (ent.Value.IsFinal)
+                    {
+                        Console.WriteLine("    finalOutput=" + Outputs.OutputToString(ent.Value.FinalOutput));
+                    }
+                }
+            }
+
+            if (prefixes.Count <= 1)
+            {
+                Assert.IsNull(fst);
+                return;
+            }
+
+            Assert.IsNotNull(fst);
+
+            // make sure FST only enums valid prefixes
+            if (LuceneTestCase.VERBOSE)
+            {
+                Console.WriteLine("TEST: check pruned enum");
+            }
+            Int32sRefFSTEnum<T> fstEnum = new Int32sRefFSTEnum<T>(fst);
+            Int32sRefFSTEnum.InputOutput<T> current;
+            while ((current = fstEnum.Next()) != null)
+            {
+                if (LuceneTestCase.VERBOSE)
+                {
+                    Console.WriteLine("  fstEnum.next prefix=" + InputToString(inputMode, current.Input, false) + " output=" + Outputs.OutputToString(current.Output));
+                }
+                CountMinOutput<T> cmo = prefixes.ContainsKey(current.Input) ? prefixes[current.Input] : null;
+                Assert.IsNotNull(cmo);
+                Assert.IsTrue(cmo.IsLeaf || cmo.IsFinal);
+                //if (cmo.isFinal && !cmo.isLeaf) {
+                if (cmo.IsFinal)
+                {
+                    Assert.AreEqual(cmo.FinalOutput, current.Output);
+                }
+                else
+                {
+                    Assert.AreEqual(cmo.Output, current.Output);
+                }
+            }
+
+            // make sure all non-pruned prefixes are present in the FST
+            if (LuceneTestCase.VERBOSE)
+            {
+                Console.WriteLine("TEST: verify all prefixes");
+            }
+            int[] stopNode = new int[1];
+            foreach (KeyValuePair<Int32sRef, CountMinOutput<T>> ent in prefixes)
+            {
+                if (ent.Key.Length > 0)
+                {
+                    CountMinOutput<T> cmo = ent.Value;
+                    T output = Run(fst, ent.Key, stopNode);
+                    if (LuceneTestCase.VERBOSE)
+                    {
+                        Console.WriteLine("TEST: verify prefix=" + InputToString(inputMode, ent.Key, false) + " output=" + Outputs.OutputToString(cmo.Output));
+                    }
+                    // if (cmo.isFinal && !cmo.isLeaf) {
+                    if (cmo.IsFinal)
+                    {
+                        Assert.AreEqual(cmo.FinalOutput, output);
+                    }
+                    else
+                    {
+                        Assert.AreEqual(cmo.Output, output);
+                    }
+                    Assert.AreEqual(ent.Key.Length, stopNode[0]);
+                }
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/84ad7a30/src/Lucene.Net.TestFramework/Util/fst/FSTTester.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Util/fst/FSTTester.cs b/src/Lucene.Net.TestFramework/Util/fst/FSTTester.cs
deleted file mode 100644
index 46c6f61..0000000
--- a/src/Lucene.Net.TestFramework/Util/fst/FSTTester.cs
+++ /dev/null
@@ -1,1013 +0,0 @@
-using Lucene.Net.Randomized.Generators;
-using Lucene.Net.Support;
-using NUnit.Framework;
-using System;
-using System.Collections;
-using System.Collections.Generic;
-using System.Diagnostics;
-using System.IO;
-using System.Linq;
-using System.Text;
-
-namespace Lucene.Net.Util.Fst
-{
-    /*
-         * Licensed to the Apache Software Foundation (ASF) under one or more
-         * contributor license agreements.  See the NOTICE file distributed with
-         * this work for additional information regarding copyright ownership.
-         * The ASF licenses this file to You under the Apache License, Version 2.0
-         * (the "License"); you may not use this file except in compliance with
-         * the License.  You may obtain a copy of the License at
-         * <p/>
-         * http://www.apache.org/licenses/LICENSE-2.0
-         * <p/>
-         * Unless required by applicable law or agreed to in writing, software
-         * distributed under the License is distributed on an "AS IS" BASIS,
-         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-         * See the License for the specific language governing permissions and
-         * limitations under the License.
-         */
-
-    using Directory = Lucene.Net.Store.Directory;
-    using IndexInput = Lucene.Net.Store.IndexInput;
-    using IndexOutput = Lucene.Net.Store.IndexOutput;
-    using IOContext = Lucene.Net.Store.IOContext;
-    using PackedInt32s = Lucene.Net.Util.Packed.PackedInt32s;
-
-    /// <summary>
-    /// Helper class to test FSTs. </summary>
-    public class FSTTester<T>
-    {
-        internal readonly Random Random;
-        internal readonly List<InputOutput<T>> Pairs;
-        internal readonly int InputMode;
-        internal readonly Outputs<T> Outputs;
-        internal readonly Directory Dir;
-        internal readonly bool DoReverseLookup;
-
-        public FSTTester(Random random, Directory dir, int inputMode, List<InputOutput<T>> pairs, Outputs<T> outputs, bool doReverseLookup)
-        {
-            this.Random = random;
-            this.Dir = dir;
-            this.InputMode = inputMode;
-            this.Pairs = pairs;
-            this.Outputs = outputs;
-            this.DoReverseLookup = doReverseLookup;
-        }
-
-        internal static string InputToString(int inputMode, Int32sRef term)
-        {
-            return InputToString(inputMode, term, true);
-        }
-
-        internal static string InputToString(int inputMode, Int32sRef term, bool isValidUnicode)
-        {
-            if (!isValidUnicode)
-            {
-                return term.ToString();
-            }
-            else if (inputMode == 0)
-            {
-                // utf8
-                return ToBytesRef(term).Utf8ToString() + " " + term;
-            }
-            else
-            {
-                // utf32
-                return UnicodeUtil.NewString(term.Int32s, term.Offset, term.Length) + " " + term;
-            }
-        }
-
-        private static BytesRef ToBytesRef(Int32sRef ir)
-        {
-            BytesRef br = new BytesRef(ir.Length);
-            for (int i = 0; i < ir.Length; i++)
-            {
-                int x = ir.Int32s[ir.Offset + i];
-                Debug.Assert(x >= 0 && x <= 255);
-                br.Bytes[i] = (byte)x;
-            }
-            br.Length = ir.Length;
-            return br;
-        }
-
-        internal static string GetRandomString(Random random)
-        {
-            string term;
-            if (random.NextBoolean())
-            {
-                term = TestUtil.RandomRealisticUnicodeString(random);
-            }
-            else
-            {
-                // we want to mix in limited-alphabet symbols so
-                // we get more sharing of the nodes given how few
-                // terms we are testing...
-                term = SimpleRandomString(random);
-            }
-            return term;
-        }
-
-        internal static string SimpleRandomString(Random r)
-        {
-            int end = r.Next(10);
-            if (end == 0)
-            {
-                // allow 0 length
-                return "";
-            }
-            char[] buffer = new char[end];
-            for (int i = 0; i < end; i++)
-            {
-                buffer[i] = (char)TestUtil.NextInt(r, 97, 102);
-            }
-            return new string(buffer, 0, end);
-        }
-
-        internal static Int32sRef ToIntsRef(string s, int inputMode)
-        {
-            return ToIntsRef(s, inputMode, new Int32sRef(10));
-        }
-
-        internal static Int32sRef ToIntsRef(string s, int inputMode, Int32sRef ir)
-        {
-            if (inputMode == 0)
-            {
-                // utf8
-                return ToIntsRef(new BytesRef(s), ir);
-            }
-            else
-            {
-                // utf32
-                return ToIntsRefUTF32(s, ir);
-            }
-        }
-
-        internal static Int32sRef ToIntsRefUTF32(string s, Int32sRef ir)
-        {
-            int charLength = s.Length;
-            int charIdx = 0;
-            int intIdx = 0;
-            while (charIdx < charLength)
-            {
-                if (intIdx == ir.Int32s.Length)
-                {
-                    ir.Grow(intIdx + 1);
-                }
-                int utf32 = Character.CodePointAt(s, charIdx);
-                ir.Int32s[intIdx] = utf32;
-                charIdx += Character.CharCount(utf32);
-                intIdx++;
-            }
-            ir.Length = intIdx;
-            return ir;
-        }
-
-        internal static Int32sRef ToIntsRef(BytesRef br, Int32sRef ir)
-        {
-            if (br.Length > ir.Int32s.Length)
-            {
-                ir.Grow(br.Length);
-            }
-            for (int i = 0; i < br.Length; i++)
-            {
-                ir.Int32s[i] = br.Bytes[br.Offset + i] & 0xFF;
-            }
-            ir.Length = br.Length;
-            return ir;
-        }
-
-        /// <summary>
-        /// Holds one input/output pair. </summary>
-        public class InputOutput<T1> : IComparable<InputOutput<T1>>
-        {
-            public readonly Int32sRef Input;
-            public readonly T1 Output;
-
-            public InputOutput(Int32sRef input, T1 output)
-            {
-                this.Input = input;
-                this.Output = output;
-            }
-
-            public virtual int CompareTo(InputOutput<T1> other)
-            {
-                return this.Input.CompareTo(other.Input);
-            }
-        }
-
-        public virtual void DoTest(bool testPruning)
-        {
-            // no pruning
-            DoTest(0, 0, true);
-
-            if (testPruning)
-            {
-                // simple pruning
-                DoTest(TestUtil.NextInt(Random, 1, 1 + Pairs.Count), 0, true);
-
-                // leafy pruning
-                DoTest(0, TestUtil.NextInt(Random, 1, 1 + Pairs.Count), true);
-            }
-        }
-
-        // runs the term, returning the output, or null if term
-        // isn't accepted.  if prefixLength is non-null it must be
-        // length 1 int array; prefixLength[0] is set to the length
-        // of the term prefix that matches
-        private T Run(FST<T> fst, Int32sRef term, int[] prefixLength)
-        {
-            Debug.Assert(prefixLength == null || prefixLength.Length == 1);
-            FST.Arc<T> arc = fst.GetFirstArc(new FST.Arc<T>());
-            T NO_OUTPUT = fst.Outputs.NoOutput;
-            T output = NO_OUTPUT;
-            FST.BytesReader fstReader = fst.GetBytesReader();
-
-            for (int i = 0; i <= term.Length; i++)
-            {
-                int label;
-                if (i == term.Length)
-                {
-                    label = FST.END_LABEL;
-                }
-                else
-                {
-                    label = term.Int32s[term.Offset + i];
-                }
-                // System.out.println("   loop i=" + i + " label=" + label + " output=" + fst.Outputs.outputToString(output) + " curArc: target=" + arc.target + " isFinal?=" + arc.isFinal());
-                if (fst.FindTargetArc(label, arc, arc, fstReader) == null)
-                {
-                    // System.out.println("    not found");
-                    if (prefixLength != null)
-                    {
-                        prefixLength[0] = i;
-                        return output;
-                    }
-                    else
-                    {
-                        return default(T);
-                    }
-                }
-                output = fst.Outputs.Add(output, arc.Output);
-            }
-
-            if (prefixLength != null)
-            {
-                prefixLength[0] = term.Length;
-            }
-
-            return output;
-        }
-
-        private T RandomAcceptedWord(FST<T> fst, Int32sRef @in)
-        {
-            FST.Arc<T> arc = fst.GetFirstArc(new FST.Arc<T>());
-
-            IList<FST.Arc<T>> arcs = new List<FST.Arc<T>>();
-            @in.Length = 0;
-            @in.Offset = 0;
-            T NO_OUTPUT = fst.Outputs.NoOutput;
-            T output = NO_OUTPUT;
-            FST.BytesReader fstReader = fst.GetBytesReader();
-
-            while (true)
-            {
-                // read all arcs:
-                fst.ReadFirstTargetArc(arc, arc, fstReader);
-                arcs.Add((new FST.Arc<T>()).CopyFrom(arc));
-                while (!arc.IsLast)
-                {
-                    fst.ReadNextArc(arc, fstReader);
-                    arcs.Add((new FST.Arc<T>()).CopyFrom(arc));
-                }
-
-                // pick one
-                arc = arcs[Random.Next(arcs.Count)];
-                arcs.Clear();
-
-                // accumulate output
-                output = fst.Outputs.Add(output, arc.Output);
-
-                // append label
-                if (arc.Label == FST.END_LABEL)
-                {
-                    break;
-                }
-
-                if (@in.Int32s.Length == @in.Length)
-                {
-                    @in.Grow(1 + @in.Length);
-                }
-                @in.Int32s[@in.Length++] = arc.Label;
-            }
-
-            return output;
-        }
-
-        internal virtual FST<T> DoTest(int prune1, int prune2, bool allowRandomSuffixSharing)
-        {
-            if (LuceneTestCase.VERBOSE)
-            {
-                Console.WriteLine("\nTEST: prune1=" + prune1 + " prune2=" + prune2);
-            }
-
-            bool willRewrite = Random.NextBoolean();
-
-            Builder<T> builder = new Builder<T>(InputMode == 0 ? FST.INPUT_TYPE.BYTE1 : FST.INPUT_TYPE.BYTE4, 
-                                                prune1, prune2, 
-                                                prune1 == 0 && prune2 == 0, 
-                                                allowRandomSuffixSharing ? Random.NextBoolean() : true, 
-                                                allowRandomSuffixSharing ? TestUtil.NextInt(Random, 1, 10) : int.MaxValue, 
-                                                Outputs, 
-                                                null, 
-                                                willRewrite, 
-                                                PackedInt32s.DEFAULT, 
-                                                true, 
-                                                15);
-            if (LuceneTestCase.VERBOSE)
-            {
-                if (willRewrite)
-                {
-                    Console.WriteLine("TEST: packed FST");
-                }
-                else
-                {
-                    Console.WriteLine("TEST: non-packed FST");
-                }
-            }
-
-            foreach (InputOutput<T> pair in Pairs)
-            {
-                if (pair.Output is IEnumerable)
-                {
-                    Builder<object> builderObject = builder as Builder<object>;
-                    var values = pair.Output as IEnumerable;
-                    foreach (object value in values)
-                    {
-                        builderObject.Add(pair.Input, value);
-                    }
-                }
-                else
-                {
-                    builder.Add(pair.Input, pair.Output);
-                }
-            }
-            FST<T> fst = builder.Finish();
-
-            if (Random.NextBoolean() && fst != null && !willRewrite)
-            {
-                IOContext context = LuceneTestCase.NewIOContext(Random);
-                using (IndexOutput @out = Dir.CreateOutput("fst.bin", context))
-                {
-                    fst.Save(@out);
-                }
-                IndexInput @in = Dir.OpenInput("fst.bin", context);
-                try
-                {
-                    fst = new FST<T>(@in, Outputs);
-                }
-                finally
-                {
-                    @in.Dispose();
-                    Dir.DeleteFile("fst.bin");
-                }
-            }
-
-            if (LuceneTestCase.VERBOSE && Pairs.Count <= 20 && fst != null)
-            {
-                using (TextWriter w = new StreamWriter(new FileStream("out.dot", FileMode.OpenOrCreate), Encoding.UTF8))
-                {
-                    Util.ToDot(fst, w, false, false);
-                }
-                Console.WriteLine("SAVED out.dot");
-            }
-
-            if (LuceneTestCase.VERBOSE)
-            {
-                if (fst == null)
-                {
-                    Console.WriteLine("  fst has 0 nodes (fully pruned)");
-                }
-                else
-                {
-                    Console.WriteLine("  fst has " + fst.NodeCount + " nodes and " + fst.ArcCount + " arcs");
-                }
-            }
-
-            if (prune1 == 0 && prune2 == 0)
-            {
-                VerifyUnPruned(InputMode, fst);
-            }
-            else
-            {
-                VerifyPruned(InputMode, fst, prune1, prune2);
-            }
-
-            return fst;
-        }
-
-        protected internal virtual bool OutputsEqual(T a, T b)
-        {
-            // LUCENENET: In .NET, IEnumerables do not automatically test to ensure
-            // their values are equal, so we need to do that manually.
-            // Note that we are testing the values without regard to whether
-            // the enumerable type is nullable.
-            return a.ValueEquals(b);
-        }
-
-        // FST is complete
-        private void VerifyUnPruned(int inputMode, FST<T> fst)
-        {
-            FST<long?> fstLong;
-            ISet<long?> validOutputs;
-            long minLong = long.MaxValue;
-            long maxLong = long.MinValue;
-
-            if (DoReverseLookup)
-            {
-                FST<long?> fstLong0 = fst as FST<long?>;
-                fstLong = fstLong0;
-                validOutputs = new HashSet<long?>();
-                foreach (InputOutput<T> pair in Pairs)
-                {
-                    long? output = pair.Output as long?;
-                    maxLong = Math.Max(maxLong, output.Value);
-                    minLong = Math.Min(minLong, output.Value);
-                    validOutputs.Add(output.Value);
-                }
-            }
-            else
-            {
-                fstLong = null;
-                validOutputs = null;
-            }
-
-            if (Pairs.Count == 0)
-            {
-                Assert.IsNull(fst);
-                return;
-            }
-
-            if (LuceneTestCase.VERBOSE)
-            {
-                Console.WriteLine("TEST: now verify " + Pairs.Count + " terms");
-                foreach (InputOutput<T> pair in Pairs)
-                {
-                    Assert.IsNotNull(pair);
-                    Assert.IsNotNull(pair.Input);
-                    Assert.IsNotNull(pair.Output);
-                    Console.WriteLine("  " + InputToString(inputMode, pair.Input) + ": " + Outputs.OutputToString(pair.Output));
-                }
-            }
-
-            Assert.IsNotNull(fst);
-
-            // visit valid pairs in order -- make sure all words
-            // are accepted, and FSTEnum's next() steps through
-            // them correctly
-            if (LuceneTestCase.VERBOSE)
-            {
-                Console.WriteLine("TEST: check valid terms/next()");
-            }
-            {
-                Int32sRefFSTEnum<T> fstEnum = new Int32sRefFSTEnum<T>(fst);
-                foreach (InputOutput<T> pair in Pairs)
-                {
-                    Int32sRef term = pair.Input;
-                    if (LuceneTestCase.VERBOSE)
-                    {
-                        Console.WriteLine("TEST: check term=" + InputToString(inputMode, term) + " output=" + fst.Outputs.OutputToString(pair.Output));
-                    }
-                    T output = Run(fst, term, null);
-                    Assert.IsNotNull(output, "term " + InputToString(inputMode, term) + " is not accepted");
-                    Assert.IsTrue(OutputsEqual(pair.Output, output));
-
-                    // verify enum's next
-                    Int32sRefFSTEnum.InputOutput<T> t = fstEnum.Next();
-                    Assert.IsNotNull(t);
-                    Assert.AreEqual(term, t.Input, "expected input=" + InputToString(inputMode, term) + " but fstEnum returned " + InputToString(inputMode, t.Input));
-                    Assert.IsTrue(OutputsEqual(pair.Output, t.Output));
-                }
-                Assert.IsNull(fstEnum.Next());
-            }
-
-            IDictionary<Int32sRef, T> termsMap = new Dictionary<Int32sRef, T>();
-            foreach (InputOutput<T> pair in Pairs)
-            {
-                termsMap[pair.Input] = pair.Output;
-            }
-
-            if (DoReverseLookup && maxLong > minLong)
-            {
-                // Do random lookups so we test null (output doesn't
-                // exist) case:
-                Assert.IsNull(Util.GetByOutput(fstLong, minLong - 7));
-                Assert.IsNull(Util.GetByOutput(fstLong, maxLong + 7));
-
-                int num = LuceneTestCase.AtLeast(Random, 100);
-                for (int iter = 0; iter < num; iter++)
-                {
-                    long v = TestUtil.NextLong(Random, minLong, maxLong);
-                    Int32sRef input = Util.GetByOutput(fstLong, v);
-                    Assert.IsTrue(validOutputs.Contains(v) || input == null);
-                }
-            }
-
-            // find random matching word and make sure it's valid
-            if (LuceneTestCase.VERBOSE)
-            {
-                Console.WriteLine("TEST: verify random accepted terms");
-            }
-            Int32sRef scratch = new Int32sRef(10);
-            int num_ = LuceneTestCase.AtLeast(Random, 500);
-            for (int iter = 0; iter < num_; iter++)
-            {
-                T output = RandomAcceptedWord(fst, scratch);
-                Assert.IsTrue(termsMap.ContainsKey(scratch), "accepted word " + InputToString(inputMode, scratch) + " is not valid");
-                Assert.IsTrue(OutputsEqual(termsMap[scratch], output));
-
-                if (DoReverseLookup)
-                {
-                    //System.out.println("lookup output=" + output + " outs=" + fst.Outputs);
-                    Int32sRef input = Util.GetByOutput(fstLong, (output as long?).Value);
-                    Assert.IsNotNull(input);
-                    //System.out.println("  got " + Util.toBytesRef(input, new BytesRef()).utf8ToString());
-                    Assert.AreEqual(scratch, input);
-                }
-            }
-
-            // test IntsRefFSTEnum.Seek:
-            if (LuceneTestCase.VERBOSE)
-            {
-                Console.WriteLine("TEST: verify seek");
-            }
-            Int32sRefFSTEnum<T> fstEnum_ = new Int32sRefFSTEnum<T>(fst);
-            num_ = LuceneTestCase.AtLeast(Random, 100);
-            for (int iter = 0; iter < num_; iter++)
-            {
-                if (LuceneTestCase.VERBOSE)
-                {
-                    Console.WriteLine("  iter=" + iter);
-                }
-                if (Random.NextBoolean())
-                {
-                    // seek to term that doesn't exist:
-                    while (true)
-                    {
-                        Int32sRef term = ToIntsRef(GetRandomString(Random), inputMode);
-                        int pos = Pairs.BinarySearch(new InputOutput<T>(term, default(T)));
-                        if (pos < 0)
-                        {
-                            pos = -(pos + 1);
-                            // ok doesn't exist
-                            //System.out.println("  seek " + inputToString(inputMode, term));
-                            Int32sRefFSTEnum.InputOutput<T> seekResult;
-                            if (Random.Next(3) == 0)
-                            {
-                                if (LuceneTestCase.VERBOSE)
-                                {
-                                    Console.WriteLine("  do non-exist seekExact term=" + InputToString(inputMode, term));
-                                }
-                                seekResult = fstEnum_.SeekExact(term);
-                                pos = -1;
-                            }
-                            else if (Random.NextBoolean())
-                            {
-                                if (LuceneTestCase.VERBOSE)
-                                {
-                                    Console.WriteLine("  do non-exist seekFloor term=" + InputToString(inputMode, term));
-                                }
-                                seekResult = fstEnum_.SeekFloor(term);
-                                pos--;
-                            }
-                            else
-                            {
-                                if (LuceneTestCase.VERBOSE)
-                                {
-                                    Console.WriteLine("  do non-exist seekCeil term=" + InputToString(inputMode, term));
-                                }
-                                seekResult = fstEnum_.SeekCeil(term);
-                            }
-
-                            if (pos != -1 && pos < Pairs.Count)
-                            {
-                                //System.out.println("    got " + inputToString(inputMode,seekResult.input) + " output=" + fst.Outputs.outputToString(seekResult.Output));
-                                Assert.IsNotNull(seekResult, "got null but expected term=" + InputToString(inputMode, Pairs[pos].Input));
-                                if (LuceneTestCase.VERBOSE)
-                                {
-                                    Console.WriteLine("    got " + InputToString(inputMode, seekResult.Input));
-                                }
-                                Assert.AreEqual(Pairs[pos].Input, seekResult.Input, "expected " + InputToString(inputMode, Pairs[pos].Input) + " but got " + InputToString(inputMode, seekResult.Input));
-                                Assert.IsTrue(OutputsEqual(Pairs[pos].Output, seekResult.Output));
-                            }
-                            else
-                            {
-                                // seeked before start or beyond end
-                                //System.out.println("seek=" + seekTerm);
-                                Assert.IsNull(seekResult, "expected null but got " + (seekResult == null ? "null" : InputToString(inputMode, seekResult.Input)));
-                                if (LuceneTestCase.VERBOSE)
-                                {
-                                    Console.WriteLine("    got null");
-                                }
-                            }
-
-                            break;
-                        }
-                    }
-                }
-                else
-                {
-                    // seek to term that does exist:
-                    InputOutput<T> pair = Pairs[Random.Next(Pairs.Count)];
-                    Int32sRefFSTEnum.InputOutput<T> seekResult;
-                    if (Random.Next(3) == 2)
-                    {
-                        if (LuceneTestCase.VERBOSE)
-                        {
-                            Console.WriteLine("  do exists seekExact term=" + InputToString(inputMode, pair.Input));
-                        }
-                        seekResult = fstEnum_.SeekExact(pair.Input);
-                    }
-                    else if (Random.NextBoolean())
-                    {
-                        if (LuceneTestCase.VERBOSE)
-                        {
-                            Console.WriteLine("  do exists seekFloor " + InputToString(inputMode, pair.Input));
-                        }
-                        seekResult = fstEnum_.SeekFloor(pair.Input);
-                    }
-                    else
-                    {
-                        if (LuceneTestCase.VERBOSE)
-                        {
-                            Console.WriteLine("  do exists seekCeil " + InputToString(inputMode, pair.Input));
-                        }
-                        seekResult = fstEnum_.SeekCeil(pair.Input);
-                    }
-                    Assert.IsNotNull(seekResult);
-                    Assert.AreEqual(pair.Input, seekResult.Input, "got " + InputToString(inputMode, seekResult.Input) + " but expected " + InputToString(inputMode, pair.Input));
-                    Assert.IsTrue(OutputsEqual(pair.Output, seekResult.Output));
-                }
-            }
-
-            if (LuceneTestCase.VERBOSE)
-            {
-                Console.WriteLine("TEST: mixed next/seek");
-            }
-
-            // test mixed next/seek
-            num_ = LuceneTestCase.AtLeast(Random, 100);
-            for (int iter = 0; iter < num_; iter++)
-            {
-                if (LuceneTestCase.VERBOSE)
-                {
-                    Console.WriteLine("TEST: iter " + iter);
-                }
-                // reset:
-                fstEnum_ = new Int32sRefFSTEnum<T>(fst);
-                int upto = -1;
-                while (true)
-                {
-                    bool isDone = false;
-                    if (upto == Pairs.Count - 1 || Random.NextBoolean())
-                    {
-                        // next
-                        upto++;
-                        if (LuceneTestCase.VERBOSE)
-                        {
-                            Console.WriteLine("  do next");
-                        }
-                        isDone = fstEnum_.Next() == null;
-                    }
-                    else if (upto != -1 && upto < 0.75 * Pairs.Count && Random.NextBoolean())
-                    {
-                        int attempt = 0;
-                        for (; attempt < 10; attempt++)
-                        {
-                            Int32sRef term = ToIntsRef(GetRandomString(Random), inputMode);
-                            if (!termsMap.ContainsKey(term) && term.CompareTo(Pairs[upto].Input) > 0)
-                            {
-                                int pos = Pairs.BinarySearch(new InputOutput<T>(term, default(T)));
-                                Debug.Assert(pos < 0);
-                                upto = -(pos + 1);
-
-                                if (Random.NextBoolean())
-                                {
-                                    upto--;
-                                    Assert.IsTrue(upto != -1);
-                                    if (LuceneTestCase.VERBOSE)
-                                    {
-                                        Console.WriteLine("  do non-exist seekFloor(" + InputToString(inputMode, term) + ")");
-                                    }
-                                    isDone = fstEnum_.SeekFloor(term) == null;
-                                }
-                                else
-                                {
-                                    if (LuceneTestCase.VERBOSE)
-                                    {
-                                        Console.WriteLine("  do non-exist seekCeil(" + InputToString(inputMode, term) + ")");
-                                    }
-                                    isDone = fstEnum_.SeekCeil(term) == null;
-                                }
-
-                                break;
-                            }
-                        }
-                        if (attempt == 10)
-                        {
-                            continue;
-                        }
-                    }
-                    else
-                    {
-                        int inc = Random.Next(Pairs.Count - upto - 1);
-                        upto += inc;
-                        if (upto == -1)
-                        {
-                            upto = 0;
-                        }
-
-                        if (Random.NextBoolean())
-                        {
-                            if (LuceneTestCase.VERBOSE)
-                            {
-                                Console.WriteLine("  do seekCeil(" + InputToString(inputMode, Pairs[upto].Input) + ")");
-                            }
-                            isDone = fstEnum_.SeekCeil(Pairs[upto].Input) == null;
-                        }
-                        else
-                        {
-                            if (LuceneTestCase.VERBOSE)
-                            {
-                                Console.WriteLine("  do seekFloor(" + InputToString(inputMode, Pairs[upto].Input) + ")");
-                            }
-                            isDone = fstEnum_.SeekFloor(Pairs[upto].Input) == null;
-                        }
-                    }
-                    if (LuceneTestCase.VERBOSE)
-                    {
-                        if (!isDone)
-                        {
-                            Console.WriteLine("    got " + InputToString(inputMode, fstEnum_.Current.Input));
-                        }
-                        else
-                        {
-                            Console.WriteLine("    got null");
-                        }
-                    }
-
-                    if (upto == Pairs.Count)
-                    {
-                        Assert.IsTrue(isDone);
-                        break;
-                    }
-                    else
-                    {
-                        Assert.IsFalse(isDone);
-                        Assert.AreEqual(Pairs[upto].Input, fstEnum_.Current.Input);
-                        Assert.IsTrue(OutputsEqual(Pairs[upto].Output, fstEnum_.Current.Output));
-
-                        /*
-                          if (upto < pairs.size()-1) {
-                          int tryCount = 0;
-                          while(tryCount < 10) {
-                          final IntsRef t = toIntsRef(getRandomString(), inputMode);
-                          if (pairs.get(upto).input.compareTo(t) < 0) {
-                          final boolean expected = t.compareTo(pairs.get(upto+1).input) < 0;
-                          if (LuceneTestCase.VERBOSE) {
-                          System.out.println("TEST: call beforeNext(" + inputToString(inputMode, t) + "); current=" + inputToString(inputMode, pairs.get(upto).input) + " next=" + inputToString(inputMode, pairs.get(upto+1).input) + " expected=" + expected);
-                          }
-                          Assert.AreEqual(expected, fstEnum.beforeNext(t));
-                          break;
-                          }
-                          tryCount++;
-                          }
-                          }
-                        */
-                    }
-                }
-            }
-        }
-
-        private class CountMinOutput<S>
-        {
-            internal int Count;
-            internal S Output;
-            internal S FinalOutput;
-            internal bool IsLeaf = true;
-            internal bool IsFinal;
-        }
-
-        // FST is pruned
-        private void VerifyPruned(int inputMode, FST<T> fst, int prune1, int prune2)
-        {
-            if (LuceneTestCase.VERBOSE)
-            {
-                Console.WriteLine("TEST: now verify pruned " + Pairs.Count + " terms; outputs=" + Outputs);
-                foreach (InputOutput<T> pair in Pairs)
-                {
-                    Console.WriteLine("  " + InputToString(inputMode, pair.Input) + ": " + Outputs.OutputToString(pair.Output));
-                }
-            }
-
-            // To validate the FST, we brute-force compute all prefixes
-            // in the terms, matched to their "common" outputs, prune that
-            // set according to the prune thresholds, then assert the FST
-            // matches that same set.
-
-            // NOTE: Crazy RAM intensive!!
-
-            //System.out.println("TEST: tally prefixes");
-
-            // build all prefixes
-            IDictionary<Int32sRef, CountMinOutput<T>> prefixes = new HashMap<Int32sRef, CountMinOutput<T>>();
-            Int32sRef scratch = new Int32sRef(10);
-            foreach (InputOutput<T> pair in Pairs)
-            {
-                scratch.CopyInt32s(pair.Input);
-                for (int idx = 0; idx <= pair.Input.Length; idx++)
-                {
-                    scratch.Length = idx;
-                    CountMinOutput<T> cmo = prefixes.ContainsKey(scratch) ? prefixes[scratch] : null;
-                    if (cmo == null)
-                    {
-                        cmo = new CountMinOutput<T>();
-                        cmo.Count = 1;
-                        cmo.Output = pair.Output;
-                        prefixes[Int32sRef.DeepCopyOf(scratch)] = cmo;
-                    }
-                    else
-                    {
-                        cmo.Count++;
-                        T output1 = cmo.Output;
-                        if (output1.Equals(Outputs.NoOutput))
-                        {
-                            output1 = Outputs.NoOutput;
-                        }
-                        T output2 = pair.Output;
-                        if (output2.Equals(Outputs.NoOutput))
-                        {
-                            output2 = Outputs.NoOutput;
-                        }
-                        cmo.Output = Outputs.Common(output1, output2);
-                    }
-                    if (idx == pair.Input.Length)
-                    {
-                        cmo.IsFinal = true;
-                        cmo.FinalOutput = cmo.Output;
-                    }
-                }
-            }
-
-            if (LuceneTestCase.VERBOSE)
-            {
-                Console.WriteLine("TEST: now prune");
-            }
-
-
-            // prune 'em
-            // LUCENENET NOTE: Altered this a bit to go in reverse rather than use an enumerator since
-            // in .NET you cannot delete records while enumerating forward through a dictionary.
-            for (int i = prefixes.Count - 1; i >= 0; i--)
-            {
-                KeyValuePair<Int32sRef, CountMinOutput<T>> ent = prefixes.ElementAt(i);
-                Int32sRef prefix = ent.Key;
-                CountMinOutput<T> cmo = ent.Value;
-                if (LuceneTestCase.VERBOSE)
-                {
-                    Console.WriteLine("  term prefix=" + InputToString(inputMode, prefix, false) + " count=" + cmo.Count + " isLeaf=" + cmo.IsLeaf + " output=" + Outputs.OutputToString(cmo.Output) + " isFinal=" + cmo.IsFinal);
-                }
-                bool keep;
-                if (prune1 > 0)
-                {
-                    keep = cmo.Count >= prune1;
-                }
-                else
-                {
-                    Debug.Assert(prune2 > 0);
-                    if (prune2 > 1 && cmo.Count >= prune2)
-                    {
-                        keep = true;
-                    }
-                    else if (prefix.Length > 0)
-                    {
-                        // consult our parent
-                        scratch.Length = prefix.Length - 1;
-                        Array.Copy(prefix.Int32s, prefix.Offset, scratch.Int32s, 0, scratch.Length);
-                        CountMinOutput<T> cmo2 = prefixes.ContainsKey(scratch) ? prefixes[scratch] : null;
-                        //System.out.println("    parent count = " + (cmo2 == null ? -1 : cmo2.count));
-                        keep = cmo2 != null && ((prune2 > 1 && cmo2.Count >= prune2) || (prune2 == 1 && (cmo2.Count >= 2 || prefix.Length <= 1)));
-                    }
-                    else if (cmo.Count >= prune2)
-                    {
-                        keep = true;
-                    }
-                    else
-                    {
-                        keep = false;
-                    }
-                }
-
-                if (!keep)
-                {
-                    prefixes.Remove(prefix);
-                    //System.out.println("    remove");
-                }
-                else
-                {
-                    // clear isLeaf for all ancestors
-                    //System.out.println("    keep");
-                    scratch.CopyInt32s(prefix);
-                    scratch.Length--;
-                    while (scratch.Length >= 0)
-                    {
-                        CountMinOutput<T> cmo2 = prefixes.ContainsKey(scratch) ? prefixes[scratch] : null;
-                        if (cmo2 != null)
-                        {
-                            //System.out.println("    clear isLeaf " + inputToString(inputMode, scratch));
-                            cmo2.IsLeaf = false;
-                        }
-                        scratch.Length--;
-                    }
-                }
-            }
-
-            if (LuceneTestCase.VERBOSE)
-            {
-                Console.WriteLine("TEST: after prune");
-                foreach (KeyValuePair<Int32sRef, CountMinOutput<T>> ent in prefixes)
-                {
-                    Console.WriteLine("  " + InputToString(inputMode, ent.Key, false) + ": isLeaf=" + ent.Value.IsLeaf + " isFinal=" + ent.Value.IsFinal);
-                    if (ent.Value.IsFinal)
-                    {
-                        Console.WriteLine("    finalOutput=" + Outputs.OutputToString(ent.Value.FinalOutput));
-                    }
-                }
-            }
-
-            if (prefixes.Count <= 1)
-            {
-                Assert.IsNull(fst);
-                return;
-            }
-
-            Assert.IsNotNull(fst);
-
-            // make sure FST only enums valid prefixes
-            if (LuceneTestCase.VERBOSE)
-            {
-                Console.WriteLine("TEST: check pruned enum");
-            }
-            Int32sRefFSTEnum<T> fstEnum = new Int32sRefFSTEnum<T>(fst);
-            Int32sRefFSTEnum.InputOutput<T> current;
-            while ((current = fstEnum.Next()) != null)
-            {
-                if (LuceneTestCase.VERBOSE)
-                {
-                    Console.WriteLine("  fstEnum.next prefix=" + InputToString(inputMode, current.Input, false) + " output=" + Outputs.OutputToString(current.Output));
-                }
-                CountMinOutput<T> cmo = prefixes.ContainsKey(current.Input) ? prefixes[current.Input] : null;
-                Assert.IsNotNull(cmo);
-                Assert.IsTrue(cmo.IsLeaf || cmo.IsFinal);
-                //if (cmo.isFinal && !cmo.isLeaf) {
-                if (cmo.IsFinal)
-                {
-                    Assert.AreEqual(cmo.FinalOutput, current.Output);
-                }
-                else
-                {
-                    Assert.AreEqual(cmo.Output, current.Output);
-                }
-            }
-
-            // make sure all non-pruned prefixes are present in the FST
-            if (LuceneTestCase.VERBOSE)
-            {
-                Console.WriteLine("TEST: verify all prefixes");
-            }
-            int[] stopNode = new int[1];
-            foreach (KeyValuePair<Int32sRef, CountMinOutput<T>> ent in prefixes)
-            {
-                if (ent.Key.Length > 0)
-                {
-                    CountMinOutput<T> cmo = ent.Value;
-                    T output = Run(fst, ent.Key, stopNode);
-                    if (LuceneTestCase.VERBOSE)
-                    {
-                        Console.WriteLine("TEST: verify prefix=" + InputToString(inputMode, ent.Key, false) + " output=" + Outputs.OutputToString(cmo.Output));
-                    }
-                    // if (cmo.isFinal && !cmo.isLeaf) {
-                    if (cmo.IsFinal)
-                    {
-                        Assert.AreEqual(cmo.FinalOutput, output);
-                    }
-                    else
-                    {
-                        Assert.AreEqual(cmo.Output, output);
-                    }
-                    Assert.AreEqual(ent.Key.Length, stopNode[0]);
-                }
-            }
-        }
-    }
-}
\ No newline at end of file


[47/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Codecs/Lucene40/TestReuseDocsEnum.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene40/TestReuseDocsEnum.cs b/src/Lucene.Net.Tests/Codecs/Lucene40/TestReuseDocsEnum.cs
new file mode 100644
index 0000000..c2bdadf
--- /dev/null
+++ b/src/Lucene.Net.Tests/Codecs/Lucene40/TestReuseDocsEnum.cs
@@ -0,0 +1,221 @@
+using System;
+using System.Collections.Generic;
+
+namespace Lucene.Net.Codecs.Lucene40
+{
+    using Lucene.Net.Randomized.Generators;
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using AtomicReader = Lucene.Net.Index.AtomicReader;
+    using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
+    using IBits = Lucene.Net.Util.IBits;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
+    using DocsEnum = Lucene.Net.Index.DocsEnum;
+    using IndexWriter = Lucene.Net.Index.IndexWriter;
+    using IOUtils = Lucene.Net.Util.IOUtils;
+    using LineFileDocs = Lucene.Net.Util.LineFileDocs;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MatchNoBits = Lucene.Net.Util.Bits.MatchNoBits;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Terms = Lucene.Net.Index.Terms;
+    using TermsEnum = Lucene.Net.Index.TermsEnum;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    // TODO: really this should be in BaseTestPF or somewhere else? useful test!
+    [TestFixture]
+    public class TestReuseDocsEnum : LuceneTestCase
+    {
+        /// <summary>
+        /// LUCENENET specific
+        /// Is non-static because OLD_FORMAT_IMPERSONATION_IS_ACTIVE is no longer static.
+        /// </summary>
+        [OneTimeSetUp]
+        public void BeforeClass()
+        {
+            OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true; // explicitly instantiates ancient codec
+        }
+
+        [Test]
+        public virtual void TestReuseDocsEnumNoReuse()
+        {
+            Directory dir = NewDirectory();
+            Codec cp = TestUtil.AlwaysPostingsFormat(new Lucene40RWPostingsFormat(OLD_FORMAT_IMPERSONATION_IS_ACTIVE));
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetCodec(cp));
+            int numdocs = AtLeast(20);
+            CreateRandomIndex(numdocs, writer, Random());
+            writer.Commit();
+
+            DirectoryReader open = DirectoryReader.Open(dir);
+            foreach (AtomicReaderContext ctx in open.Leaves)
+            {
+                AtomicReader indexReader = (AtomicReader)ctx.Reader;
+                Terms terms = indexReader.Terms("body");
+                TermsEnum iterator = terms.GetIterator(null);
+                IdentityHashMap<DocsEnum, bool?> enums = new IdentityHashMap<DocsEnum, bool?>();
+                MatchNoBits bits = new MatchNoBits(indexReader.MaxDoc);
+                while ((iterator.Next()) != null)
+                {
+                    DocsEnum docs = iterator.Docs(Random().NextBoolean() ? bits : new MatchNoBits(indexReader.MaxDoc), null, Random().NextBoolean() ? DocsEnum.FLAG_FREQS : DocsEnum.FLAG_NONE);
+                    enums[docs] = true;
+                }
+
+                Assert.AreEqual(terms.Count, enums.Count);
+            }
+            IOUtils.Close(writer, open, dir);
+        }
+
+        // tests for reuse only if bits are the same either null or the same instance
+        [Test]
+        public virtual void TestReuseDocsEnumSameBitsOrNull()
+        {
+            Directory dir = NewDirectory();
+            Codec cp = TestUtil.AlwaysPostingsFormat(new Lucene40RWPostingsFormat(OLD_FORMAT_IMPERSONATION_IS_ACTIVE));
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetCodec(cp));
+            int numdocs = AtLeast(20);
+            CreateRandomIndex(numdocs, writer, Random());
+            writer.Commit();
+
+            DirectoryReader open = DirectoryReader.Open(dir);
+            foreach (AtomicReaderContext ctx in open.Leaves)
+            {
+                Terms terms = ((AtomicReader)ctx.Reader).Terms("body");
+                TermsEnum iterator = terms.GetIterator(null);
+                IdentityHashMap<DocsEnum, bool?> enums = new IdentityHashMap<DocsEnum, bool?>();
+                MatchNoBits bits = new MatchNoBits(open.MaxDoc);
+                DocsEnum docs = null;
+                while ((iterator.Next()) != null)
+                {
+                    docs = iterator.Docs(bits, docs, Random().NextBoolean() ? DocsEnum.FLAG_FREQS : DocsEnum.FLAG_NONE);
+                    enums[docs] = true;
+                }
+
+                Assert.AreEqual(1, enums.Count);
+                enums.Clear();
+                iterator = terms.GetIterator(null);
+                docs = null;
+                while ((iterator.Next()) != null)
+                {
+                    docs = iterator.Docs(new MatchNoBits(open.MaxDoc), docs, Random().NextBoolean() ? DocsEnum.FLAG_FREQS : DocsEnum.FLAG_NONE);
+                    enums[docs] = true;
+                }
+                Assert.AreEqual(terms.Count, enums.Count);
+
+                enums.Clear();
+                iterator = terms.GetIterator(null);
+                docs = null;
+                while ((iterator.Next()) != null)
+                {
+                    docs = iterator.Docs(null, docs, Random().NextBoolean() ? DocsEnum.FLAG_FREQS : DocsEnum.FLAG_NONE);
+                    enums[docs] = true;
+                }
+                Assert.AreEqual(1, enums.Count);
+            }
+            IOUtils.Close(writer, open, dir);
+        }
+
+        // make sure we never reuse from another reader even if it is the same field & codec etc
+        [Test]
+        public virtual void TestReuseDocsEnumDifferentReader()
+        {
+            Directory dir = NewDirectory();
+            Codec cp = TestUtil.AlwaysPostingsFormat(new Lucene40RWPostingsFormat(OLD_FORMAT_IMPERSONATION_IS_ACTIVE));
+            MockAnalyzer analyzer = new MockAnalyzer(Random());
+            analyzer.MaxTokenLength = TestUtil.NextInt(Random(), 1, IndexWriter.MAX_TERM_LENGTH);
+
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetCodec(cp));
+            int numdocs = AtLeast(20);
+            CreateRandomIndex(numdocs, writer, Random());
+            writer.Commit();
+
+            DirectoryReader firstReader = DirectoryReader.Open(dir);
+            DirectoryReader secondReader = DirectoryReader.Open(dir);
+            IList<AtomicReaderContext> leaves = firstReader.Leaves;
+            IList<AtomicReaderContext> leaves2 = secondReader.Leaves;
+
+            foreach (AtomicReaderContext ctx in leaves)
+            {
+                Terms terms = ((AtomicReader)ctx.Reader).Terms("body");
+                TermsEnum iterator = terms.GetIterator(null);
+                IdentityHashMap<DocsEnum, bool?> enums = new IdentityHashMap<DocsEnum, bool?>();
+                MatchNoBits bits = new MatchNoBits(firstReader.MaxDoc);
+                iterator = terms.GetIterator(null);
+                DocsEnum docs = null;
+                BytesRef term = null;
+                while ((term = iterator.Next()) != null)
+                {
+                    docs = iterator.Docs(null, RandomDocsEnum("body", term, leaves2, bits), Random().NextBoolean() ? DocsEnum.FLAG_FREQS : DocsEnum.FLAG_NONE);
+                    enums[docs] = true;
+                }
+                Assert.AreEqual(terms.Count, enums.Count);
+
+                iterator = terms.GetIterator(null);
+                enums.Clear();
+                docs = null;
+                while ((term = iterator.Next()) != null)
+                {
+                    docs = iterator.Docs(bits, RandomDocsEnum("body", term, leaves2, bits), Random().NextBoolean() ? DocsEnum.FLAG_FREQS : DocsEnum.FLAG_NONE);
+                    enums[docs] = true;
+                }
+                Assert.AreEqual(terms.Count, enums.Count);
+            }
+            IOUtils.Close(writer, firstReader, secondReader, dir);
+        }
+
+        public virtual DocsEnum RandomDocsEnum(string field, BytesRef term, IList<AtomicReaderContext> readers, IBits bits)
+        {
+            if (Random().Next(10) == 0)
+            {
+                return null;
+            }
+            AtomicReader indexReader = (AtomicReader)readers[Random().Next(readers.Count)].Reader;
+            Terms terms = indexReader.Terms(field);
+            if (terms == null)
+            {
+                return null;
+            }
+            TermsEnum iterator = terms.GetIterator(null);
+            if (iterator.SeekExact(term))
+            {
+                return iterator.Docs(bits, null, Random().NextBoolean() ? DocsEnum.FLAG_FREQS : DocsEnum.FLAG_NONE);
+            }
+            return null;
+        }
+
+        /// <summary>
+        /// populates a writer with random stuff. this must be fully reproducable with
+        /// the seed!
+        /// </summary>
+        public static void CreateRandomIndex(int numdocs, RandomIndexWriter writer, Random random)
+        {
+            LineFileDocs lineFileDocs = new LineFileDocs(random);
+
+            for (int i = 0; i < numdocs; i++)
+            {
+                writer.AddDocument(lineFileDocs.NextDoc());
+            }
+
+            lineFileDocs.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat.cs b/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat.cs
new file mode 100644
index 0000000..fb309a5
--- /dev/null
+++ b/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat.cs
@@ -0,0 +1,103 @@
+using NUnit.Framework;
+
+namespace Lucene.Net.Codecs.Lucene41
+{
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using BasePostingsFormatTestCase = Lucene.Net.Index.BasePostingsFormatTestCase;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    /// <summary>
+    /// Tests BlockPostingsFormat
+    /// </summary>
+    public class TestBlockPostingsFormat : BasePostingsFormatTestCase
+    {
+        private readonly Codec Codec_Renamed = TestUtil.AlwaysPostingsFormat(new Lucene41PostingsFormat());
+
+        protected override Codec Codec
+        {
+            get
+            {
+                return Codec_Renamed;
+            }
+        }
+
+
+        #region BasePostingsFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestDocsOnly()
+        {
+            base.TestDocsOnly();
+        }
+
+        [Test]
+        public override void TestDocsAndFreqs()
+        {
+            base.TestDocsAndFreqs();
+        }
+
+        [Test]
+        public override void TestDocsAndFreqsAndPositions()
+        {
+            base.TestDocsAndFreqsAndPositions();
+        }
+
+        [Test]
+        public override void TestDocsAndFreqsAndPositionsAndPayloads()
+        {
+            base.TestDocsAndFreqsAndPositionsAndPayloads();
+        }
+
+        [Test]
+        public override void TestDocsAndFreqsAndPositionsAndOffsets()
+        {
+            base.TestDocsAndFreqsAndPositionsAndOffsets();
+        }
+
+        [Test]
+        public override void TestDocsAndFreqsAndPositionsAndOffsetsAndPayloads()
+        {
+            base.TestDocsAndFreqsAndPositionsAndOffsetsAndPayloads();
+        }
+
+        [Test]
+        public override void TestRandom()
+        {
+            base.TestRandom();
+        }
+
+        #endregion
+
+        #region BaseIndexFileFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestMergeStability()
+        {
+            base.TestMergeStability();
+        }
+
+        #endregion
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat2.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat2.cs b/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat2.cs
new file mode 100644
index 0000000..f0f471d
--- /dev/null
+++ b/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat2.cs
@@ -0,0 +1,166 @@
+using Lucene.Net.Documents;
+using NUnit.Framework;
+using System;
+using System.Text;
+
+namespace Lucene.Net.Codecs.Lucene41
+{
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldInfo = Lucene.Net.Index.FieldInfo;
+    using FieldType = FieldType;
+    using IIndexableField = Lucene.Net.Index.IIndexableField;
+    using IndexOptions = Lucene.Net.Index.IndexOptions;
+    using IndexWriter = Lucene.Net.Index.IndexWriter;
+    using IndexWriterConfig = Lucene.Net.Index.IndexWriterConfig;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using OpenMode = Lucene.Net.Index.OpenMode;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using TextField = TextField;
+
+    /// <summary>
+    /// Tests special cases of BlockPostingsFormat
+    /// </summary>
+    [TestFixture]
+    public class TestBlockPostingsFormat2 : LuceneTestCase
+    {
+        internal Directory Dir;
+        internal RandomIndexWriter Iw;
+        internal IndexWriterConfig Iwc;
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Dir = NewFSDirectory(CreateTempDir("testDFBlockSize"));
+            Iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            Iwc.SetCodec(TestUtil.AlwaysPostingsFormat(new Lucene41PostingsFormat()));
+            Iw = new RandomIndexWriter(Random(), Dir, (IndexWriterConfig)Iwc.Clone());
+            Iw.RandomForceMerge = false; // we will ourselves
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            Iw.Dispose();
+            TestUtil.CheckIndex(Dir); // for some extra coverage, checkIndex before we forceMerge
+            Iwc.SetOpenMode(OpenMode.APPEND);
+            IndexWriter iw = new IndexWriter(Dir, (IndexWriterConfig)Iwc.Clone());
+            iw.ForceMerge(1);
+            iw.Dispose();
+            Dir.Dispose(); // just force a checkindex for now
+            base.TearDown();
+        }
+
+        private Document NewDocument()
+        {
+            Document doc = new Document();
+            foreach (IndexOptions option in Enum.GetValues(typeof(IndexOptions)))
+            {
+                var ft = new FieldType(TextField.TYPE_NOT_STORED)
+                {
+                    StoreTermVectors = true,
+                    StoreTermVectorOffsets = true,
+                    StoreTermVectorPositions = true,
+                    StoreTermVectorPayloads = true,
+                    IndexOptions = option
+                };
+                // turn on tvs for a cross-check, since we rely upon checkindex in this test (for now)
+                doc.Add(new Field(option.ToString(), "", ft));
+            }
+            return doc;
+        }
+
+        /// <summary>
+        /// tests terms with df = blocksize </summary>
+        [Test]
+        public virtual void TestDFBlockSize()
+        {
+            Document doc = NewDocument();
+            for (int i = 0; i < Lucene41PostingsFormat.BLOCK_SIZE; i++)
+            {
+                foreach (IIndexableField f in doc.Fields)
+                {
+                    ((Field)f).SetStringValue(f.Name + " " + f.Name + "_2");
+                }
+                Iw.AddDocument(doc);
+            }
+        }
+
+        /// <summary>
+        /// tests terms with df % blocksize = 0 </summary>
+        [Test]
+        public virtual void TestDFBlockSizeMultiple()
+        {
+            Document doc = NewDocument();
+            for (int i = 0; i < Lucene41PostingsFormat.BLOCK_SIZE * 16; i++)
+            {
+                foreach (IIndexableField f in doc.Fields)
+                {
+                    ((Field)f).SetStringValue(f.Name + " " + f.Name + "_2");
+                }
+                Iw.AddDocument(doc);
+            }
+        }
+
+        /// <summary>
+        /// tests terms with ttf = blocksize </summary>
+        [Test]
+        public virtual void TestTTFBlockSize()
+        {
+            Document doc = NewDocument();
+            for (int i = 0; i < Lucene41PostingsFormat.BLOCK_SIZE / 2; i++)
+            {
+                foreach (IIndexableField f in doc.Fields)
+                {
+                    ((Field)f).SetStringValue(f.Name + " " + f.Name + " " + f.Name + "_2 " + f.Name + "_2");
+                }
+                Iw.AddDocument(doc);
+            }
+        }
+
+        /// <summary>
+        /// tests terms with ttf % blocksize = 0 </summary>
+        [Test]
+        public virtual void TestTTFBlockSizeMultiple()
+        {
+            Document doc = NewDocument();
+            for (int i = 0; i < Lucene41PostingsFormat.BLOCK_SIZE / 2; i++)
+            {
+                foreach (IIndexableField f in doc.Fields)
+                {
+                    string proto = (f.Name + " " + f.Name + " " + f.Name + " " + f.Name + " " + f.Name + "_2 " + f.Name + "_2 " + f.Name + "_2 " + f.Name + "_2");
+                    StringBuilder val = new StringBuilder();
+                    for (int j = 0; j < 16; j++)
+                    {
+                        val.Append(proto);
+                        val.Append(" ");
+                    }
+                    ((Field)f).SetStringValue(val.ToString());
+                }
+                Iw.AddDocument(doc);
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat3.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat3.cs b/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat3.cs
new file mode 100644
index 0000000..3e6b0e4
--- /dev/null
+++ b/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat3.cs
@@ -0,0 +1,571 @@
+using Lucene.Net.Attributes;
+using Lucene.Net.Documents;
+using Lucene.Net.Index;
+using Lucene.Net.Support;
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+
+namespace Lucene.Net.Codecs.Lucene41
+{
+    using Lucene.Net.Randomized.Generators;
+    using NUnit.Framework;
+    using System.IO;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Analyzer = Lucene.Net.Analysis.Analyzer;
+    using AtomicReader = Lucene.Net.Index.AtomicReader;
+    using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
+    using AutomatonTestUtil = Lucene.Net.Util.Automaton.AutomatonTestUtil;
+    using IBits = Lucene.Net.Util.IBits;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using CompiledAutomaton = Lucene.Net.Util.Automaton.CompiledAutomaton;
+    using Directory = Lucene.Net.Store.Directory;
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using DocsAndPositionsEnum = Lucene.Net.Index.DocsAndPositionsEnum;
+    using DocsEnum = Lucene.Net.Index.DocsEnum;
+    using Document = Documents.Document;
+    using English = Lucene.Net.Util.English;
+    using Field = Field;
+    using FieldType = FieldType;
+    using FixedBitSet = Lucene.Net.Util.FixedBitSet;
+    using IndexWriter = Lucene.Net.Index.IndexWriter;
+    using IndexWriterConfig = Lucene.Net.Index.IndexWriterConfig;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockFixedLengthPayloadFilter = Lucene.Net.Analysis.MockFixedLengthPayloadFilter;
+    using MockTokenizer = Lucene.Net.Analysis.MockTokenizer;
+    using MockVariableLengthPayloadFilter = Lucene.Net.Analysis.MockVariableLengthPayloadFilter;
+    using OpenMode = Lucene.Net.Index.OpenMode;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using RegExp = Lucene.Net.Util.Automaton.RegExp;
+    using SeekStatus = Lucene.Net.Index.TermsEnum.SeekStatus;
+    using Terms = Lucene.Net.Index.Terms;
+    using TermsEnum = Lucene.Net.Index.TermsEnum;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using TextField = TextField;
+    using TokenFilter = Lucene.Net.Analysis.TokenFilter;
+    using Tokenizer = Lucene.Net.Analysis.Tokenizer;
+
+    /// <summary>
+    /// Tests partial enumeration (only pulling a subset of the indexed data)
+    /// </summary>
+    [TestFixture]
+    public class TestBlockPostingsFormat3 : LuceneTestCase
+    {
+        internal static readonly int MAXDOC = Lucene41PostingsFormat.BLOCK_SIZE * 20;
+
+        // creates 8 fields with different options and does "duels" of fields against each other
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(50000)]
+#endif
+        [Test, LongRunningTest, HasTimeout]
+        public virtual void Test()
+        {
+            Directory dir = NewDirectory();
+            Analyzer analyzer = new AnalyzerAnonymousInnerClassHelper(this, Analyzer.PER_FIELD_REUSE_STRATEGY);
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
+            iwc.SetCodec(TestUtil.AlwaysPostingsFormat(new Lucene41PostingsFormat()));
+            // TODO we could actually add more fields implemented with different PFs
+            // or, just put this test into the usual rotation?
+            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, (IndexWriterConfig)iwc.Clone());
+            Document doc = new Document();
+            FieldType docsOnlyType = new FieldType(TextField.TYPE_NOT_STORED);
+            // turn this on for a cross-check
+            docsOnlyType.StoreTermVectors = true;
+            docsOnlyType.IndexOptions = IndexOptions.DOCS_ONLY;
+
+            FieldType docsAndFreqsType = new FieldType(TextField.TYPE_NOT_STORED);
+            // turn this on for a cross-check
+            docsAndFreqsType.StoreTermVectors = true;
+            docsAndFreqsType.IndexOptions = IndexOptions.DOCS_AND_FREQS;
+
+            FieldType positionsType = new FieldType(TextField.TYPE_NOT_STORED);
+            // turn these on for a cross-check
+            positionsType.StoreTermVectors = true;
+            positionsType.StoreTermVectorPositions = true;
+            positionsType.StoreTermVectorOffsets = true;
+            positionsType.StoreTermVectorPayloads = true;
+            FieldType offsetsType = new FieldType(positionsType);
+            offsetsType.IndexOptions = IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS;
+            Field field1 = new Field("field1docs", "", docsOnlyType);
+            Field field2 = new Field("field2freqs", "", docsAndFreqsType);
+            Field field3 = new Field("field3positions", "", positionsType);
+            Field field4 = new Field("field4offsets", "", offsetsType);
+            Field field5 = new Field("field5payloadsFixed", "", positionsType);
+            Field field6 = new Field("field6payloadsVariable", "", positionsType);
+            Field field7 = new Field("field7payloadsFixedOffsets", "", offsetsType);
+            Field field8 = new Field("field8payloadsVariableOffsets", "", offsetsType);
+            doc.Add(field1);
+            doc.Add(field2);
+            doc.Add(field3);
+            doc.Add(field4);
+            doc.Add(field5);
+            doc.Add(field6);
+            doc.Add(field7);
+            doc.Add(field8);
+            for (int i = 0; i < MAXDOC; i++)
+            {
+                string stringValue = Convert.ToString(i) + " verycommon " + English.IntToEnglish(i).Replace('-', ' ') + " " + TestUtil.RandomSimpleString(Random());
+                field1.SetStringValue(stringValue);
+                field2.SetStringValue(stringValue);
+                field3.SetStringValue(stringValue);
+                field4.SetStringValue(stringValue);
+                field5.SetStringValue(stringValue);
+                field6.SetStringValue(stringValue);
+                field7.SetStringValue(stringValue);
+                field8.SetStringValue(stringValue);
+                iw.AddDocument(doc);
+            }
+            iw.Dispose();
+            Verify(dir);
+            TestUtil.CheckIndex(dir); // for some extra coverage, checkIndex before we forceMerge
+            iwc.SetOpenMode(OpenMode.APPEND);
+            IndexWriter iw2 = new IndexWriter(dir, (IndexWriterConfig)iwc.Clone());
+            iw2.ForceMerge(1);
+            iw2.Dispose();
+            Verify(dir);
+            dir.Dispose();
+        }
+
+        private class AnalyzerAnonymousInnerClassHelper : Analyzer
+        {
+            private readonly TestBlockPostingsFormat3 OuterInstance;
+
+            public AnalyzerAnonymousInnerClassHelper(TestBlockPostingsFormat3 outerInstance, Analyzer.ReuseStrategy PER_FIELD_REUSE_STRATEGY)
+                : base(PER_FIELD_REUSE_STRATEGY)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                Tokenizer tokenizer = new MockTokenizer(reader);
+                if (fieldName.Contains("payloadsFixed"))
+                {
+                    TokenFilter filter = new MockFixedLengthPayloadFilter(new Random(0), tokenizer, 1);
+                    return new TokenStreamComponents(tokenizer, filter);
+                }
+                else if (fieldName.Contains("payloadsVariable"))
+                {
+                    TokenFilter filter = new MockVariableLengthPayloadFilter(new Random(0), tokenizer);
+                    return new TokenStreamComponents(tokenizer, filter);
+                }
+                else
+                {
+                    return new TokenStreamComponents(tokenizer);
+                }
+            }
+        }
+
+        private void Verify(Directory dir)
+        {
+            DirectoryReader ir = DirectoryReader.Open(dir);
+            foreach (AtomicReaderContext leaf in ir.Leaves)
+            {
+                AtomicReader leafReader = (AtomicReader)leaf.Reader;
+                AssertTerms(leafReader.Terms("field1docs"), leafReader.Terms("field2freqs"), true);
+                AssertTerms(leafReader.Terms("field3positions"), leafReader.Terms("field4offsets"), true);
+                AssertTerms(leafReader.Terms("field4offsets"), leafReader.Terms("field5payloadsFixed"), true);
+                AssertTerms(leafReader.Terms("field5payloadsFixed"), leafReader.Terms("field6payloadsVariable"), true);
+                AssertTerms(leafReader.Terms("field6payloadsVariable"), leafReader.Terms("field7payloadsFixedOffsets"), true);
+                AssertTerms(leafReader.Terms("field7payloadsFixedOffsets"), leafReader.Terms("field8payloadsVariableOffsets"), true);
+            }
+            ir.Dispose();
+        }
+
+        // following code is almost an exact dup of code from TestDuelingCodecs: sorry!
+
+        public virtual void AssertTerms(Terms leftTerms, Terms rightTerms, bool deep)
+        {
+            if (leftTerms == null || rightTerms == null)
+            {
+                Assert.IsNull(leftTerms);
+                Assert.IsNull(rightTerms);
+                return;
+            }
+            AssertTermsStatistics(leftTerms, rightTerms);
+
+            // NOTE: we don't assert hasOffsets/hasPositions/hasPayloads because they are allowed to be different
+
+            TermsEnum leftTermsEnum = leftTerms.GetIterator(null);
+            TermsEnum rightTermsEnum = rightTerms.GetIterator(null);
+            AssertTermsEnum(leftTermsEnum, rightTermsEnum, true);
+
+            AssertTermsSeeking(leftTerms, rightTerms);
+
+            if (deep)
+            {
+                int numIntersections = AtLeast(3);
+                for (int i = 0; i < numIntersections; i++)
+                {
+                    string re = AutomatonTestUtil.RandomRegexp(Random());
+                    CompiledAutomaton automaton = new CompiledAutomaton((new RegExp(re, RegExp.NONE)).ToAutomaton());
+                    if (automaton.Type == CompiledAutomaton.AUTOMATON_TYPE.NORMAL)
+                    {
+                        // TODO: test start term too
+                        TermsEnum leftIntersection = leftTerms.Intersect(automaton, null);
+                        TermsEnum rightIntersection = rightTerms.Intersect(automaton, null);
+                        AssertTermsEnum(leftIntersection, rightIntersection, Rarely());
+                    }
+                }
+            }
+        }
+
+        private void AssertTermsSeeking(Terms leftTerms, Terms rightTerms)
+        {
+            TermsEnum leftEnum = null;
+            TermsEnum rightEnum = null;
+
+            // just an upper bound
+            int numTests = AtLeast(20);
+            Random random = Random();
+
+            // collect this number of terms from the left side
+            HashSet<BytesRef> tests = new HashSet<BytesRef>();
+            int numPasses = 0;
+            while (numPasses < 10 && tests.Count < numTests)
+            {
+                leftEnum = leftTerms.GetIterator(leftEnum);
+                BytesRef term = null;
+                while ((term = leftEnum.Next()) != null)
+                {
+                    int code = random.Next(10);
+                    if (code == 0)
+                    {
+                        // the term
+                        tests.Add(BytesRef.DeepCopyOf(term));
+                    }
+                    else if (code == 1)
+                    {
+                        // truncated subsequence of term
+                        term = BytesRef.DeepCopyOf(term);
+                        if (term.Length > 0)
+                        {
+                            // truncate it
+                            term.Length = random.Next(term.Length);
+                        }
+                    }
+                    else if (code == 2)
+                    {
+                        // term, but ensure a non-zero offset
+                        var newbytes = new byte[term.Length + 5];
+                        Array.Copy(term.Bytes, term.Offset, newbytes, 5, term.Length);
+                        tests.Add(new BytesRef(newbytes, 5, term.Length));
+                    }
+                }
+                numPasses++;
+            }
+
+            List<BytesRef> shuffledTests = new List<BytesRef>(tests);
+            Collections.Shuffle(shuffledTests);
+
+            foreach (BytesRef b in shuffledTests)
+            {
+                leftEnum = leftTerms.GetIterator(leftEnum);
+                rightEnum = rightTerms.GetIterator(rightEnum);
+
+                Assert.AreEqual(leftEnum.SeekExact(b), rightEnum.SeekExact(b));
+                Assert.AreEqual(leftEnum.SeekExact(b), rightEnum.SeekExact(b));
+
+                SeekStatus leftStatus;
+                SeekStatus rightStatus;
+
+                leftStatus = leftEnum.SeekCeil(b);
+                rightStatus = rightEnum.SeekCeil(b);
+                Assert.AreEqual(leftStatus, rightStatus);
+                if (leftStatus != SeekStatus.END)
+                {
+                    Assert.AreEqual(leftEnum.Term, rightEnum.Term);
+                }
+
+                leftStatus = leftEnum.SeekCeil(b);
+                rightStatus = rightEnum.SeekCeil(b);
+                Assert.AreEqual(leftStatus, rightStatus);
+                if (leftStatus != SeekStatus.END)
+                {
+                    Assert.AreEqual(leftEnum.Term, rightEnum.Term);
+                }
+            }
+        }
+
+        /// <summary>
+        /// checks collection-level statistics on Terms
+        /// </summary>
+        public virtual void AssertTermsStatistics(Terms leftTerms, Terms rightTerms)
+        {
+            Debug.Assert(leftTerms.Comparer == rightTerms.Comparer);
+            if (leftTerms.DocCount != -1 && rightTerms.DocCount != -1)
+            {
+                Assert.AreEqual(leftTerms.DocCount, rightTerms.DocCount);
+            }
+            if (leftTerms.SumDocFreq != -1 && rightTerms.SumDocFreq != -1)
+            {
+                Assert.AreEqual(leftTerms.SumDocFreq, rightTerms.SumDocFreq);
+            }
+            if (leftTerms.SumTotalTermFreq != -1 && rightTerms.SumTotalTermFreq != -1)
+            {
+                Assert.AreEqual(leftTerms.SumTotalTermFreq, rightTerms.SumTotalTermFreq);
+            }
+            if (leftTerms.Count != -1 && rightTerms.Count != -1)
+            {
+                Assert.AreEqual(leftTerms.Count, rightTerms.Count);
+            }
+        }
+
+        /// <summary>
+        /// checks the terms enum sequentially
+        /// if deep is false, it does a 'shallow' test that doesnt go down to the docsenums
+        /// </summary>
+        public virtual void AssertTermsEnum(TermsEnum leftTermsEnum, TermsEnum rightTermsEnum, bool deep)
+        {
+            BytesRef term;
+            IBits randomBits = new RandomBits(MAXDOC, Random().NextDouble(), Random());
+            DocsAndPositionsEnum leftPositions = null;
+            DocsAndPositionsEnum rightPositions = null;
+            DocsEnum leftDocs = null;
+            DocsEnum rightDocs = null;
+
+            while ((term = leftTermsEnum.Next()) != null)
+            {
+                Assert.AreEqual(term, rightTermsEnum.Next());
+                AssertTermStats(leftTermsEnum, rightTermsEnum);
+                if (deep)
+                {
+                    // with payloads + off
+                    AssertDocsAndPositionsEnum(leftPositions = leftTermsEnum.DocsAndPositions(null, leftPositions), rightPositions = rightTermsEnum.DocsAndPositions(null, rightPositions));
+                    AssertDocsAndPositionsEnum(leftPositions = leftTermsEnum.DocsAndPositions(randomBits, leftPositions), rightPositions = rightTermsEnum.DocsAndPositions(randomBits, rightPositions));
+
+                    AssertPositionsSkipping(leftTermsEnum.DocFreq, leftPositions = leftTermsEnum.DocsAndPositions(null, leftPositions), rightPositions = rightTermsEnum.DocsAndPositions(null, rightPositions));
+                    AssertPositionsSkipping(leftTermsEnum.DocFreq, leftPositions = leftTermsEnum.DocsAndPositions(randomBits, leftPositions), rightPositions = rightTermsEnum.DocsAndPositions(randomBits, rightPositions));
+                    // with payloads only
+                    AssertDocsAndPositionsEnum(leftPositions = leftTermsEnum.DocsAndPositions(null, leftPositions, DocsAndPositionsEnum.FLAG_PAYLOADS), rightPositions = rightTermsEnum.DocsAndPositions(null, rightPositions, DocsAndPositionsEnum.FLAG_PAYLOADS));
+                    AssertDocsAndPositionsEnum(leftPositions = leftTermsEnum.DocsAndPositions(randomBits, leftPositions, DocsAndPositionsEnum.FLAG_PAYLOADS), rightPositions = rightTermsEnum.DocsAndPositions(randomBits, rightPositions, DocsAndPositionsEnum.FLAG_PAYLOADS));
+
+                    AssertPositionsSkipping(leftTermsEnum.DocFreq, leftPositions = leftTermsEnum.DocsAndPositions(null, leftPositions, DocsAndPositionsEnum.FLAG_PAYLOADS), rightPositions = rightTermsEnum.DocsAndPositions(null, rightPositions, DocsAndPositionsEnum.FLAG_PAYLOADS));
+                    AssertPositionsSkipping(leftTermsEnum.DocFreq, leftPositions = leftTermsEnum.DocsAndPositions(randomBits, leftPositions, DocsAndPositionsEnum.FLAG_PAYLOADS), rightPositions = rightTermsEnum.DocsAndPositions(randomBits, rightPositions, DocsAndPositionsEnum.FLAG_PAYLOADS));
+
+                    // with offsets only
+                    AssertDocsAndPositionsEnum(leftPositions = leftTermsEnum.DocsAndPositions(null, leftPositions, DocsAndPositionsEnum.FLAG_OFFSETS), rightPositions = rightTermsEnum.DocsAndPositions(null, rightPositions, DocsAndPositionsEnum.FLAG_OFFSETS));
+                    AssertDocsAndPositionsEnum(leftPositions = leftTermsEnum.DocsAndPositions(randomBits, leftPositions, DocsAndPositionsEnum.FLAG_OFFSETS), rightPositions = rightTermsEnum.DocsAndPositions(randomBits, rightPositions, DocsAndPositionsEnum.FLAG_OFFSETS));
+
+                    AssertPositionsSkipping(leftTermsEnum.DocFreq, leftPositions = leftTermsEnum.DocsAndPositions(null, leftPositions, DocsAndPositionsEnum.FLAG_OFFSETS), rightPositions = rightTermsEnum.DocsAndPositions(null, rightPositions, DocsAndPositionsEnum.FLAG_OFFSETS));
+                    AssertPositionsSkipping(leftTermsEnum.DocFreq, leftPositions = leftTermsEnum.DocsAndPositions(randomBits, leftPositions, DocsAndPositionsEnum.FLAG_OFFSETS), rightPositions = rightTermsEnum.DocsAndPositions(randomBits, rightPositions, DocsAndPositionsEnum.FLAG_OFFSETS));
+
+                    // with positions only
+                    AssertDocsAndPositionsEnum(leftPositions = leftTermsEnum.DocsAndPositions(null, leftPositions, DocsEnum.FLAG_NONE), rightPositions = rightTermsEnum.DocsAndPositions(null, rightPositions, DocsEnum.FLAG_NONE));
+                    AssertDocsAndPositionsEnum(leftPositions = leftTermsEnum.DocsAndPositions(randomBits, leftPositions, DocsEnum.FLAG_NONE), rightPositions = rightTermsEnum.DocsAndPositions(randomBits, rightPositions, DocsEnum.FLAG_NONE));
+
+                    AssertPositionsSkipping(leftTermsEnum.DocFreq, leftPositions = leftTermsEnum.DocsAndPositions(null, leftPositions, DocsEnum.FLAG_NONE), rightPositions = rightTermsEnum.DocsAndPositions(null, rightPositions, DocsEnum.FLAG_NONE));
+                    AssertPositionsSkipping(leftTermsEnum.DocFreq, leftPositions = leftTermsEnum.DocsAndPositions(randomBits, leftPositions, DocsEnum.FLAG_NONE), rightPositions = rightTermsEnum.DocsAndPositions(randomBits, rightPositions, DocsEnum.FLAG_NONE));
+
+                    // with freqs:
+                    AssertDocsEnum(leftDocs = leftTermsEnum.Docs(null, leftDocs), rightDocs = rightTermsEnum.Docs(null, rightDocs));
+                    AssertDocsEnum(leftDocs = leftTermsEnum.Docs(randomBits, leftDocs), rightDocs = rightTermsEnum.Docs(randomBits, rightDocs));
+
+                    // w/o freqs:
+                    AssertDocsEnum(leftDocs = leftTermsEnum.Docs(null, leftDocs, DocsEnum.FLAG_NONE), rightDocs = rightTermsEnum.Docs(null, rightDocs, DocsEnum.FLAG_NONE));
+                    AssertDocsEnum(leftDocs = leftTermsEnum.Docs(randomBits, leftDocs, DocsEnum.FLAG_NONE), rightDocs = rightTermsEnum.Docs(randomBits, rightDocs, DocsEnum.FLAG_NONE));
+
+                    // with freqs:
+                    AssertDocsSkipping(leftTermsEnum.DocFreq, leftDocs = leftTermsEnum.Docs(null, leftDocs), rightDocs = rightTermsEnum.Docs(null, rightDocs));
+                    AssertDocsSkipping(leftTermsEnum.DocFreq, leftDocs = leftTermsEnum.Docs(randomBits, leftDocs), rightDocs = rightTermsEnum.Docs(randomBits, rightDocs));
+
+                    // w/o freqs:
+                    AssertDocsSkipping(leftTermsEnum.DocFreq, leftDocs = leftTermsEnum.Docs(null, leftDocs, DocsEnum.FLAG_NONE), rightDocs = rightTermsEnum.Docs(null, rightDocs, DocsEnum.FLAG_NONE));
+                    AssertDocsSkipping(leftTermsEnum.DocFreq, leftDocs = leftTermsEnum.Docs(randomBits, leftDocs, DocsEnum.FLAG_NONE), rightDocs = rightTermsEnum.Docs(randomBits, rightDocs, DocsEnum.FLAG_NONE));
+                }
+            }
+            Assert.IsNull(rightTermsEnum.Next());
+        }
+
+        /// <summary>
+        /// checks term-level statistics
+        /// </summary>
+        public virtual void AssertTermStats(TermsEnum leftTermsEnum, TermsEnum rightTermsEnum)
+        {
+            Assert.AreEqual(leftTermsEnum.DocFreq, rightTermsEnum.DocFreq);
+            if (leftTermsEnum.TotalTermFreq != -1 && rightTermsEnum.TotalTermFreq != -1)
+            {
+                Assert.AreEqual(leftTermsEnum.TotalTermFreq, rightTermsEnum.TotalTermFreq);
+            }
+        }
+
+        /// <summary>
+        /// checks docs + freqs + positions + payloads, sequentially
+        /// </summary>
+        public virtual void AssertDocsAndPositionsEnum(DocsAndPositionsEnum leftDocs, DocsAndPositionsEnum rightDocs)
+        {
+            if (leftDocs == null || rightDocs == null)
+            {
+                Assert.IsNull(leftDocs);
+                Assert.IsNull(rightDocs);
+                return;
+            }
+            Assert.AreEqual(-1, leftDocs.DocID);
+            Assert.AreEqual(-1, rightDocs.DocID);
+            int docid;
+            while ((docid = leftDocs.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS)
+            {
+                Assert.AreEqual(docid, rightDocs.NextDoc());
+                int freq = leftDocs.Freq;
+                Assert.AreEqual(freq, rightDocs.Freq);
+                for (int i = 0; i < freq; i++)
+                {
+                    Assert.AreEqual(leftDocs.NextPosition(), rightDocs.NextPosition());
+                    // we don't assert offsets/payloads, they are allowed to be different
+                }
+            }
+            Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, rightDocs.NextDoc());
+        }
+
+        /// <summary>
+        /// checks docs + freqs, sequentially
+        /// </summary>
+        public virtual void AssertDocsEnum(DocsEnum leftDocs, DocsEnum rightDocs)
+        {
+            if (leftDocs == null)
+            {
+                Assert.IsNull(rightDocs);
+                return;
+            }
+            Assert.AreEqual(-1, leftDocs.DocID);
+            Assert.AreEqual(-1, rightDocs.DocID);
+            int docid;
+            while ((docid = leftDocs.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS)
+            {
+                Assert.AreEqual(docid, rightDocs.NextDoc());
+                // we don't assert freqs, they are allowed to be different
+            }
+            Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, rightDocs.NextDoc());
+        }
+
+        /// <summary>
+        /// checks advancing docs
+        /// </summary>
+        public virtual void AssertDocsSkipping(int docFreq, DocsEnum leftDocs, DocsEnum rightDocs)
+        {
+            if (leftDocs == null)
+            {
+                Assert.IsNull(rightDocs);
+                return;
+            }
+            int docid = -1;
+            int averageGap = MAXDOC / (1 + docFreq);
+            int skipInterval = 16;
+
+            while (true)
+            {
+                if (Random().NextBoolean())
+                {
+                    // nextDoc()
+                    docid = leftDocs.NextDoc();
+                    Assert.AreEqual(docid, rightDocs.NextDoc());
+                }
+                else
+                {
+                    // advance()
+                    int skip = docid + (int)Math.Ceiling(Math.Abs(skipInterval + Random().NextDouble() * averageGap));
+                    docid = leftDocs.Advance(skip);
+                    Assert.AreEqual(docid, rightDocs.Advance(skip));
+                }
+
+                if (docid == DocIdSetIterator.NO_MORE_DOCS)
+                {
+                    return;
+                }
+                // we don't assert freqs, they are allowed to be different
+            }
+        }
+
+        /// <summary>
+        /// checks advancing docs + positions
+        /// </summary>
+        public virtual void AssertPositionsSkipping(int docFreq, DocsAndPositionsEnum leftDocs, DocsAndPositionsEnum rightDocs)
+        {
+            if (leftDocs == null || rightDocs == null)
+            {
+                Assert.IsNull(leftDocs);
+                Assert.IsNull(rightDocs);
+                return;
+            }
+
+            int docid = -1;
+            int averageGap = MAXDOC / (1 + docFreq);
+            int skipInterval = 16;
+
+            while (true)
+            {
+                if (Random().NextBoolean())
+                {
+                    // nextDoc()
+                    docid = leftDocs.NextDoc();
+                    Assert.AreEqual(docid, rightDocs.NextDoc());
+                }
+                else
+                {
+                    // advance()
+                    int skip = docid + (int)Math.Ceiling(Math.Abs(skipInterval + Random().NextDouble() * averageGap));
+                    docid = leftDocs.Advance(skip);
+                    Assert.AreEqual(docid, rightDocs.Advance(skip));
+                }
+
+                if (docid == DocIdSetIterator.NO_MORE_DOCS)
+                {
+                    return;
+                }
+                int freq = leftDocs.Freq;
+                Assert.AreEqual(freq, rightDocs.Freq);
+                for (int i = 0; i < freq; i++)
+                {
+                    Assert.AreEqual(leftDocs.NextPosition(), rightDocs.NextPosition());
+                    // we don't compare the payloads, its allowed that one is empty etc
+                }
+            }
+        }
+
+        new private class RandomBits : IBits
+        {
+            internal FixedBitSet Bits;
+
+            internal RandomBits(int maxDoc, double pctLive, Random random)
+            {
+                Bits = new FixedBitSet(maxDoc);
+                for (int i = 0; i < maxDoc; i++)
+                {
+                    if (random.NextDouble() <= pctLive)
+                    {
+                        Bits.Set(i);
+                    }
+                }
+            }
+
+            public bool Get(int index)
+            {
+                return Bits.Get(index);
+            }
+
+            public int Length
+            {
+                get { return Bits.Length; }
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Codecs/Lucene41/TestForUtil.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene41/TestForUtil.cs b/src/Lucene.Net.Tests/Codecs/Lucene41/TestForUtil.cs
new file mode 100644
index 0000000..2f6a7bc
--- /dev/null
+++ b/src/Lucene.Net.Tests/Codecs/Lucene41/TestForUtil.cs
@@ -0,0 +1,97 @@
+namespace Lucene.Net.Codecs.Lucene41
+{
+    using Lucene.Net.Randomized.Generators;
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Directory = Lucene.Net.Store.Directory;
+    using IndexInput = Lucene.Net.Store.IndexInput;
+    using IndexOutput = Lucene.Net.Store.IndexOutput;
+    using IOContext = Lucene.Net.Store.IOContext;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using PackedInt32s = Lucene.Net.Util.Packed.PackedInt32s;
+    using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+
+    [TestFixture]
+    public class TestForUtil : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestEncodeDecode()
+        {
+            int iterations = RandomInts.NextIntBetween(Random(), 1, 1000);
+            float AcceptableOverheadRatio = (float)Random().NextDouble();
+            int[] values = new int[(iterations - 1) * Lucene41PostingsFormat.BLOCK_SIZE + ForUtil.MAX_DATA_SIZE];
+            for (int i = 0; i < iterations; ++i)
+            {
+                int bpv = Random().Next(32);
+                if (bpv == 0)
+                {
+                    int value = RandomInts.NextIntBetween(Random(), 0, int.MaxValue);
+                    for (int j = 0; j < Lucene41PostingsFormat.BLOCK_SIZE; ++j)
+                    {
+                        values[i * Lucene41PostingsFormat.BLOCK_SIZE + j] = value;
+                    }
+                }
+                else
+                {
+                    for (int j = 0; j < Lucene41PostingsFormat.BLOCK_SIZE; ++j)
+                    {
+                        values[i * Lucene41PostingsFormat.BLOCK_SIZE + j] = RandomInts.NextIntBetween(Random(), 0, (int)PackedInt32s.MaxValue(bpv));
+                    }
+                }
+            }
+
+            Directory d = new RAMDirectory();
+            long endPointer;
+
+            {
+                // encode
+                IndexOutput @out = d.CreateOutput("test.bin", IOContext.DEFAULT);
+                ForUtil forUtil = new ForUtil(AcceptableOverheadRatio, @out);
+
+                for (int i = 0; i < iterations; ++i)
+                {
+                    forUtil.WriteBlock(Arrays.CopyOfRange(values, i * Lucene41PostingsFormat.BLOCK_SIZE, values.Length), new byte[Lucene41.ForUtil.MAX_ENCODED_SIZE], @out);
+                }
+                endPointer = @out.FilePointer;
+                @out.Dispose();
+            }
+
+            {
+                // decode
+                IndexInput @in = d.OpenInput("test.bin", IOContext.READ_ONCE);
+                ForUtil forUtil = new ForUtil(@in);
+                for (int i = 0; i < iterations; ++i)
+                {
+                    if (Random().NextBoolean())
+                    {
+                        forUtil.SkipBlock(@in);
+                        continue;
+                    }
+                    int[] restored = new int[Lucene41.ForUtil.MAX_DATA_SIZE];
+                    forUtil.ReadBlock(@in, new byte[Lucene41.ForUtil.MAX_ENCODED_SIZE], restored);
+                    Assert.AreEqual(Arrays.CopyOfRange(values, i * Lucene41PostingsFormat.BLOCK_SIZE, (i + 1) * Lucene41PostingsFormat.BLOCK_SIZE), Arrays.CopyOf(restored, Lucene41PostingsFormat.BLOCK_SIZE));
+                }
+                Assert.AreEqual(endPointer, @in.FilePointer);
+                @in.Dispose();
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Codecs/Lucene41/TestLucene41StoredFieldsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene41/TestLucene41StoredFieldsFormat.cs b/src/Lucene.Net.Tests/Codecs/Lucene41/TestLucene41StoredFieldsFormat.cs
new file mode 100644
index 0000000..0cbb28b
--- /dev/null
+++ b/src/Lucene.Net.Tests/Codecs/Lucene41/TestLucene41StoredFieldsFormat.cs
@@ -0,0 +1,146 @@
+namespace Lucene.Net.Codecs.Lucene41
+{
+    using Attributes;
+    using NUnit.Framework;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using BaseStoredFieldsFormatTestCase = Lucene.Net.Index.BaseStoredFieldsFormatTestCase;
+
+    public class TestLucene41StoredFieldsFormat : BaseStoredFieldsFormatTestCase
+    {
+        /// <summary>
+        /// LUCENENET specific
+        /// Is non-static because OLD_FORMAT_IMPERSONATION_IS_ACTIVE is no longer static.
+        /// </summary>
+        [OneTimeSetUp]
+        public void BeforeClass()
+        {
+            OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true; // explicitly instantiates ancient codec
+        }
+
+        protected override Codec Codec
+        {
+            get
+            {
+                return new Lucene41RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE);
+            }
+        }
+
+
+        #region BaseStoredFieldsFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestRandomStoredFields()
+        {
+            base.TestRandomStoredFields();
+        }
+
+        [Test]
+        // LUCENE-1727: make sure doc fields are stored in order
+        public override void TestStoredFieldsOrder()
+        {
+            base.TestStoredFieldsOrder();
+        }
+
+        [Test]
+        // LUCENE-1219
+        public override void TestBinaryFieldOffsetLength()
+        {
+            base.TestBinaryFieldOffsetLength();
+        }
+
+        [Test]
+        public override void TestNumericField()
+        {
+            base.TestNumericField();
+        }
+
+        [Test]
+        public override void TestIndexedBit()
+        {
+            base.TestIndexedBit();
+        }
+
+        [Test]
+        public override void TestReadSkip()
+        {
+            base.TestReadSkip();
+        }
+
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(300000)]
+#endif
+        [Test, HasTimeout]
+        public override void TestEmptyDocs()
+        {
+            base.TestEmptyDocs();
+        }
+
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(300000)]
+#endif
+        [Test, HasTimeout]
+        public override void TestConcurrentReads()
+        {
+            base.TestConcurrentReads();
+        }
+
+        [Test]
+        public override void TestWriteReadMerge()
+        {
+            base.TestWriteReadMerge();
+        }
+
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(120000)]
+#endif
+        [Test, HasTimeout]
+        public override void TestBigDocuments()
+        {
+            base.TestBigDocuments();
+        }
+
+        [Test]
+        public override void TestBulkMergeWithDeletes()
+        {
+            base.TestBulkMergeWithDeletes();
+        }
+
+        #endregion
+
+        #region BaseIndexFileFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestMergeStability()
+        {
+            base.TestMergeStability();
+        }
+
+        #endregion
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Codecs/Lucene42/TestLucene42DocValuesFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene42/TestLucene42DocValuesFormat.cs b/src/Lucene.Net.Tests/Codecs/Lucene42/TestLucene42DocValuesFormat.cs
new file mode 100644
index 0000000..f9c47ce
--- /dev/null
+++ b/src/Lucene.Net.Tests/Codecs/Lucene42/TestLucene42DocValuesFormat.cs
@@ -0,0 +1,581 @@
+using NUnit.Framework;
+
+namespace Lucene.Net.Codecs.Lucene42
+{
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using BaseCompressingDocValuesFormatTestCase = Lucene.Net.Index.BaseCompressingDocValuesFormatTestCase;
+
+    /// <summary>
+    /// Tests Lucene42DocValuesFormat
+    /// </summary>
+    public class TestLucene42DocValuesFormat : BaseCompressingDocValuesFormatTestCase
+    {
+        private Codec Codec_Renamed;
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Is non-static because OLD_FORMAT_IMPERSONATION_IS_ACTIVE is no longer static.
+        /// </summary>
+        [OneTimeSetUp]
+        public void BeforeClass()
+        {
+            OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true; // explicitly instantiates ancient codec
+            Codec_Renamed = new Lucene42RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE);
+        }
+
+        protected override Codec Codec
+        {
+            get
+            {
+                return Codec_Renamed;
+            }
+        }
+
+        protected internal override bool CodecAcceptsHugeBinaryValues(string field)
+        {
+            return false;
+        }
+
+
+        #region BaseCompressingDocValuesFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestUniqueValuesCompression()
+        {
+            base.TestUniqueValuesCompression();
+        }
+
+        [Test]
+        public override void TestDateCompression()
+        {
+            base.TestDateCompression();
+        }
+
+        [Test]
+        public override void TestSingleBigValueCompression()
+        {
+            base.TestSingleBigValueCompression();
+        }
+
+        #endregion
+
+        #region BaseDocValuesFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestOneNumber()
+        {
+            base.TestOneNumber();
+        }
+
+        [Test]
+        public override void TestOneFloat()
+        {
+            base.TestOneFloat();
+        }
+
+        [Test]
+        public override void TestTwoNumbers()
+        {
+            base.TestTwoNumbers();
+        }
+
+        [Test]
+        public override void TestTwoBinaryValues()
+        {
+            base.TestTwoBinaryValues();
+        }
+
+        [Test]
+        public override void TestTwoFieldsMixed()
+        {
+            base.TestTwoFieldsMixed();
+        }
+
+        [Test]
+        public override void TestThreeFieldsMixed()
+        {
+            base.TestThreeFieldsMixed();
+        }
+
+        [Test]
+        public override void TestThreeFieldsMixed2()
+        {
+            base.TestThreeFieldsMixed2();
+        }
+
+        [Test]
+        public override void TestTwoDocumentsNumeric()
+        {
+            base.TestTwoDocumentsNumeric();
+        }
+
+        [Test]
+        public override void TestTwoDocumentsMerged()
+        {
+            base.TestTwoDocumentsMerged();
+        }
+
+        [Test]
+        public override void TestBigNumericRange()
+        {
+            base.TestBigNumericRange();
+        }
+
+        [Test]
+        public override void TestBigNumericRange2()
+        {
+            base.TestBigNumericRange2();
+        }
+
+        [Test]
+        public override void TestBytes()
+        {
+            base.TestBytes();
+        }
+
+        [Test]
+        public override void TestBytesTwoDocumentsMerged()
+        {
+            base.TestBytesTwoDocumentsMerged();
+        }
+
+        [Test]
+        public override void TestSortedBytes()
+        {
+            base.TestSortedBytes();
+        }
+
+        [Test]
+        public override void TestSortedBytesTwoDocuments()
+        {
+            base.TestSortedBytesTwoDocuments();
+        }
+
+        [Test]
+        public override void TestSortedBytesThreeDocuments()
+        {
+            base.TestSortedBytesThreeDocuments();
+        }
+
+        [Test]
+        public override void TestSortedBytesTwoDocumentsMerged()
+        {
+            base.TestSortedBytesTwoDocumentsMerged();
+        }
+
+        [Test]
+        public override void TestSortedMergeAwayAllValues()
+        {
+            base.TestSortedMergeAwayAllValues();
+        }
+
+        [Test]
+        public override void TestBytesWithNewline()
+        {
+            base.TestBytesWithNewline();
+        }
+
+        [Test]
+        public override void TestMissingSortedBytes()
+        {
+            base.TestMissingSortedBytes();
+        }
+
+        [Test]
+        public override void TestSortedTermsEnum()
+        {
+            base.TestSortedTermsEnum();
+        }
+
+        [Test]
+        public override void TestEmptySortedBytes()
+        {
+            base.TestEmptySortedBytes();
+        }
+
+        [Test]
+        public override void TestEmptyBytes()
+        {
+            base.TestEmptyBytes();
+        }
+
+        [Test]
+        public override void TestVeryLargeButLegalBytes()
+        {
+            base.TestVeryLargeButLegalBytes();
+        }
+
+        [Test]
+        public override void TestVeryLargeButLegalSortedBytes()
+        {
+            base.TestVeryLargeButLegalSortedBytes();
+        }
+
+        [Test]
+        public override void TestCodecUsesOwnBytes()
+        {
+            base.TestCodecUsesOwnBytes();
+        }
+
+        [Test]
+        public override void TestCodecUsesOwnSortedBytes()
+        {
+            base.TestCodecUsesOwnSortedBytes();
+        }
+
+        [Test]
+        public override void TestCodecUsesOwnBytesEachTime()
+        {
+            base.TestCodecUsesOwnBytesEachTime();
+        }
+
+        [Test]
+        public override void TestCodecUsesOwnSortedBytesEachTime()
+        {
+            base.TestCodecUsesOwnSortedBytesEachTime();
+        }
+
+        /*
+         * Simple test case to show how to use the API
+         */
+        [Test]
+        public override void TestDocValuesSimple()
+        {
+            base.TestDocValuesSimple();
+        }
+
+        [Test]
+        public override void TestRandomSortedBytes()
+        {
+            base.TestRandomSortedBytes();
+        }
+
+        [Test]
+        public override void TestBooleanNumericsVsStoredFields()
+        {
+            base.TestBooleanNumericsVsStoredFields();
+        }
+
+        [Test]
+        public override void TestByteNumericsVsStoredFields()
+        {
+            base.TestByteNumericsVsStoredFields();
+        }
+
+        [Test]
+        public override void TestByteMissingVsFieldCache()
+        {
+            base.TestByteMissingVsFieldCache();
+        }
+
+        [Test]
+        public override void TestShortNumericsVsStoredFields()
+        {
+            base.TestShortNumericsVsStoredFields();
+        }
+
+        [Test]
+        public override void TestShortMissingVsFieldCache()
+        {
+            base.TestShortMissingVsFieldCache();
+        }
+
+        [Test]
+        public override void TestIntNumericsVsStoredFields()
+        {
+            base.TestIntNumericsVsStoredFields();
+        }
+
+        [Test]
+        public override void TestIntMissingVsFieldCache()
+        {
+            base.TestIntMissingVsFieldCache();
+        }
+
+        [Test]
+        public override void TestLongNumericsVsStoredFields()
+        {
+            base.TestLongNumericsVsStoredFields();
+        }
+
+        [Test]
+        public override void TestLongMissingVsFieldCache()
+        {
+            base.TestLongMissingVsFieldCache();
+        }
+
+        [Test]
+        public override void TestBinaryFixedLengthVsStoredFields()
+        {
+            base.TestBinaryFixedLengthVsStoredFields();
+        }
+
+        [Test]
+        public override void TestBinaryVariableLengthVsStoredFields()
+        {
+            base.TestBinaryVariableLengthVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedFixedLengthVsStoredFields()
+        {
+            base.TestSortedFixedLengthVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedFixedLengthVsFieldCache()
+        {
+            base.TestSortedFixedLengthVsFieldCache();
+        }
+
+        [Test]
+        public override void TestSortedVariableLengthVsFieldCache()
+        {
+            base.TestSortedVariableLengthVsFieldCache();
+        }
+
+        [Test]
+        public override void TestSortedVariableLengthVsStoredFields()
+        {
+            base.TestSortedVariableLengthVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedSetOneValue()
+        {
+            base.TestSortedSetOneValue();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoFields()
+        {
+            base.TestSortedSetTwoFields();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoDocumentsMerged()
+        {
+            base.TestSortedSetTwoDocumentsMerged();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoValues()
+        {
+            base.TestSortedSetTwoValues();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoValuesUnordered()
+        {
+            base.TestSortedSetTwoValuesUnordered();
+        }
+
+        [Test]
+        public override void TestSortedSetThreeValuesTwoDocs()
+        {
+            base.TestSortedSetThreeValuesTwoDocs();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoDocumentsLastMissing()
+        {
+            base.TestSortedSetTwoDocumentsLastMissing();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoDocumentsLastMissingMerge()
+        {
+            base.TestSortedSetTwoDocumentsLastMissingMerge();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoDocumentsFirstMissing()
+        {
+            base.TestSortedSetTwoDocumentsFirstMissing();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoDocumentsFirstMissingMerge()
+        {
+            base.TestSortedSetTwoDocumentsFirstMissingMerge();
+        }
+
+        [Test]
+        public override void TestSortedSetMergeAwayAllValues()
+        {
+            base.TestSortedSetMergeAwayAllValues();
+        }
+
+        [Test]
+        public override void TestSortedSetTermsEnum()
+        {
+            base.TestSortedSetTermsEnum();
+        }
+
+        [Test]
+        public override void TestSortedSetFixedLengthVsStoredFields()
+        {
+            base.TestSortedSetFixedLengthVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedSetVariableLengthVsStoredFields()
+        {
+            base.TestSortedSetVariableLengthVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedSetFixedLengthSingleValuedVsStoredFields()
+        {
+            base.TestSortedSetFixedLengthSingleValuedVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedSetVariableLengthSingleValuedVsStoredFields()
+        {
+            base.TestSortedSetVariableLengthSingleValuedVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedSetFixedLengthVsUninvertedField()
+        {
+            base.TestSortedSetFixedLengthVsUninvertedField();
+        }
+
+        [Test]
+        public override void TestSortedSetVariableLengthVsUninvertedField()
+        {
+            base.TestSortedSetVariableLengthVsUninvertedField();
+        }
+
+        [Test]
+        public override void TestGCDCompression()
+        {
+            base.TestGCDCompression();
+        }
+
+        [Test]
+        public override void TestZeros()
+        {
+            base.TestZeros();
+        }
+
+        [Test]
+        public override void TestZeroOrMin()
+        {
+            base.TestZeroOrMin();
+        }
+
+        [Test]
+        public override void TestTwoNumbersOneMissing()
+        {
+            base.TestTwoNumbersOneMissing();
+        }
+
+        [Test]
+        public override void TestTwoNumbersOneMissingWithMerging()
+        {
+            base.TestTwoNumbersOneMissingWithMerging();
+        }
+
+        [Test]
+        public override void TestThreeNumbersOneMissingWithMerging()
+        {
+            base.TestThreeNumbersOneMissingWithMerging();
+        }
+
+        [Test]
+        public override void TestTwoBytesOneMissing()
+        {
+            base.TestTwoBytesOneMissing();
+        }
+
+        [Test]
+        public override void TestTwoBytesOneMissingWithMerging()
+        {
+            base.TestTwoBytesOneMissingWithMerging();
+        }
+
+        [Test]
+        public override void TestThreeBytesOneMissingWithMerging()
+        {
+            base.TestThreeBytesOneMissingWithMerging();
+        }
+
+        // LUCENE-4853
+        [Test]
+        public override void TestHugeBinaryValues()
+        {
+            base.TestHugeBinaryValues();
+        }
+
+        // TODO: get this out of here and into the deprecated codecs (4.0, 4.2)
+        [Test]
+        public override void TestHugeBinaryValueLimit()
+        {
+            base.TestHugeBinaryValueLimit();
+        }
+
+        /// <summary>
+        /// Tests dv against stored fields with threads (binary/numeric/sorted, no missing)
+        /// </summary>
+        [Test]
+        public override void TestThreads()
+        {
+            base.TestThreads();
+        }
+
+        /// <summary>
+        /// Tests dv against stored fields with threads (all types + missing)
+        /// </summary>
+        [Test]
+        public override void TestThreads2()
+        {
+            base.TestThreads2();
+        }
+
+        // LUCENE-5218
+        [Test]
+        public override void TestEmptyBinaryValueOnPageSizes()
+        {
+            base.TestEmptyBinaryValueOnPageSizes();
+        }
+
+        #endregion
+
+        #region BaseIndexFileFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestMergeStability()
+        {
+            base.TestMergeStability();
+        }
+
+        #endregion
+    }
+}
\ No newline at end of file


[34/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestDocumentsWriterDeleteQueue.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestDocumentsWriterDeleteQueue.cs b/src/Lucene.Net.Tests/Index/TestDocumentsWriterDeleteQueue.cs
new file mode 100644
index 0000000..b8de01b
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestDocumentsWriterDeleteQueue.cs
@@ -0,0 +1,302 @@
+using Lucene.Net.Search;
+using Lucene.Net.Support;
+using NUnit.Framework;
+using System;
+using System.Collections;
+using System.Collections.Generic;
+using System.Reflection;
+using System.Threading;
+
+namespace Lucene.Net.Index
+{
+    using BytesRef = Lucene.Net.Util.BytesRef;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements. See the NOTICE file distributed with this
+         * work for additional information regarding copyright ownership. The ASF
+         * licenses this file to You under the Apache License, Version 2.0 (the
+         * "License"); you may not use this file except in compliance with the License.
+         * You may obtain a copy of the License at
+         *
+         * http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+         * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+         * License for the specific language governing permissions and limitations under
+         * the License.
+         */
+
+    using DeleteSlice = Lucene.Net.Index.DocumentsWriterDeleteQueue.DeleteSlice;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using TermQuery = Lucene.Net.Search.TermQuery;
+
+    /// <summary>
+    /// Unit test for <seealso cref="DocumentsWriterDeleteQueue"/>
+    /// </summary>
+    [TestFixture]
+    public class TestDocumentsWriterDeleteQueue : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestUpdateDelteSlices()
+        {
+            DocumentsWriterDeleteQueue queue = new DocumentsWriterDeleteQueue();
+            int size = 200 + Random().Next(500) * RANDOM_MULTIPLIER;
+            int?[] ids = new int?[size];
+            for (int i = 0; i < ids.Length; i++)
+            {
+                ids[i] = Random().Next();
+            }
+            DeleteSlice slice1 = queue.NewSlice();
+            DeleteSlice slice2 = queue.NewSlice();
+            BufferedUpdates bd1 = new BufferedUpdates();
+            BufferedUpdates bd2 = new BufferedUpdates();
+            int last1 = 0;
+            int last2 = 0;
+            HashSet<Term> uniqueValues = new HashSet<Term>();
+            for (int j = 0; j < ids.Length; j++)
+            {
+                int? i = ids[j];
+                // create an array here since we compare identity below against tailItem
+                Term[] term = new Term[] { new Term("id", i.ToString()) };
+                uniqueValues.Add(term[0]);
+                queue.AddDelete(term);
+                if (Random().Next(20) == 0 || j == ids.Length - 1)
+                {
+                    queue.UpdateSlice(slice1);
+                    Assert.IsTrue(slice1.IsTailItem(term));
+                    slice1.Apply(bd1, j);
+                    AssertAllBetween(last1, j, bd1, ids);
+                    last1 = j + 1;
+                }
+                if (Random().Next(10) == 5 || j == ids.Length - 1)
+                {
+                    queue.UpdateSlice(slice2);
+                    Assert.IsTrue(slice2.IsTailItem(term));
+                    slice2.Apply(bd2, j);
+                    AssertAllBetween(last2, j, bd2, ids);
+                    last2 = j + 1;
+                }
+                Assert.AreEqual(j + 1, queue.NumGlobalTermDeletes);
+            }
+            assertEquals(uniqueValues, new HashSet<Term>(bd1.terms.Keys));
+            assertEquals(uniqueValues, new HashSet<Term>(bd2.terms.Keys));
+            var frozenSet = new HashSet<Term>();
+            foreach (Term t in queue.FreezeGlobalBuffer(null).GetTermsEnumerable())
+            {
+                BytesRef bytesRef = new BytesRef();
+                bytesRef.CopyBytes(t.Bytes);
+                frozenSet.Add(new Term(t.Field, bytesRef));
+            }
+            assertEquals(uniqueValues, frozenSet);
+            Assert.AreEqual(0, queue.NumGlobalTermDeletes, "num deletes must be 0 after freeze");
+        }
+
+        private void AssertAllBetween(int start, int end, BufferedUpdates deletes, int?[] ids)
+        {
+            for (int i = start; i <= end; i++)
+            {
+                Assert.AreEqual(Convert.ToInt32(end), deletes.terms[new Term("id", ids[i].ToString())]);
+            }
+        }
+
+        [Test]
+        public virtual void TestClear()
+        {
+            DocumentsWriterDeleteQueue queue = new DocumentsWriterDeleteQueue();
+            Assert.IsFalse(queue.AnyChanges());
+            queue.Clear();
+            Assert.IsFalse(queue.AnyChanges());
+            int size = 200 + Random().Next(500) * RANDOM_MULTIPLIER;
+            int termsSinceFreeze = 0;
+            int queriesSinceFreeze = 0;
+            for (int i = 0; i < size; i++)
+            {
+                Term term = new Term("id", "" + i);
+                if (Random().Next(10) == 0)
+                {
+                    queue.AddDelete(new TermQuery(term));
+                    queriesSinceFreeze++;
+                }
+                else
+                {
+                    queue.AddDelete(term);
+                    termsSinceFreeze++;
+                }
+                Assert.IsTrue(queue.AnyChanges());
+                if (Random().Next(10) == 0)
+                {
+                    queue.Clear();
+                    queue.TryApplyGlobalSlice();
+                    Assert.IsFalse(queue.AnyChanges());
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestAnyChanges()
+        {
+            DocumentsWriterDeleteQueue queue = new DocumentsWriterDeleteQueue();
+            int size = 200 + Random().Next(500) * RANDOM_MULTIPLIER;
+            int termsSinceFreeze = 0;
+            int queriesSinceFreeze = 0;
+            for (int i = 0; i < size; i++)
+            {
+                Term term = new Term("id", "" + i);
+                if (Random().Next(10) == 0)
+                {
+                    queue.AddDelete(new TermQuery(term));
+                    queriesSinceFreeze++;
+                }
+                else
+                {
+                    queue.AddDelete(term);
+                    termsSinceFreeze++;
+                }
+                Assert.IsTrue(queue.AnyChanges());
+                if (Random().Next(5) == 0)
+                {
+                    FrozenBufferedUpdates freezeGlobalBuffer = queue.FreezeGlobalBuffer(null);
+                    Assert.AreEqual(termsSinceFreeze, freezeGlobalBuffer.termCount);
+                    Assert.AreEqual(queriesSinceFreeze, ((Query[])freezeGlobalBuffer.queries.Clone()).Length);
+                    queriesSinceFreeze = 0;
+                    termsSinceFreeze = 0;
+                    Assert.IsFalse(queue.AnyChanges());
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestPartiallyAppliedGlobalSlice()
+        {
+            DocumentsWriterDeleteQueue queue = new DocumentsWriterDeleteQueue();
+            System.Reflection.FieldInfo field = typeof(DocumentsWriterDeleteQueue).GetField("globalBufferLock", 
+                BindingFlags.NonPublic | BindingFlags.GetField | BindingFlags.Instance);
+            ReentrantLock @lock = (ReentrantLock)field.GetValue(queue);
+            @lock.Lock();
+            ThreadClass t = new ThreadAnonymousInnerClassHelper(this, queue);
+            t.Start();
+            t.Join();
+            @lock.Unlock();
+            Assert.IsTrue(queue.AnyChanges(), "changes in del queue but not in slice yet");
+            queue.TryApplyGlobalSlice();
+            Assert.IsTrue(queue.AnyChanges(), "changes in global buffer");
+            FrozenBufferedUpdates freezeGlobalBuffer = queue.FreezeGlobalBuffer(null);
+            Assert.IsTrue(freezeGlobalBuffer.Any());
+            Assert.AreEqual(1, freezeGlobalBuffer.termCount);
+            Assert.IsFalse(queue.AnyChanges(), "all changes applied");
+        }
+
+        private class ThreadAnonymousInnerClassHelper : ThreadClass
+        {
+            private readonly TestDocumentsWriterDeleteQueue OuterInstance;
+
+            private DocumentsWriterDeleteQueue Queue;
+
+            public ThreadAnonymousInnerClassHelper(TestDocumentsWriterDeleteQueue outerInstance, DocumentsWriterDeleteQueue queue)
+            {
+                this.OuterInstance = outerInstance;
+                this.Queue = queue;
+            }
+
+            public override void Run()
+            {
+                Queue.AddDelete(new Term("foo", "bar"));
+            }
+        }
+
+        [Test]
+        public virtual void TestStressDeleteQueue()
+        {
+            DocumentsWriterDeleteQueue queue = new DocumentsWriterDeleteQueue();
+            HashSet<Term> uniqueValues = new HashSet<Term>();
+            int size = 10000 + Random().Next(500) * RANDOM_MULTIPLIER;
+            int?[] ids = new int?[size];
+            for (int i = 0; i < ids.Length; i++)
+            {
+                ids[i] = Random().Next();
+                uniqueValues.Add(new Term("id", ids[i].ToString()));
+            }
+            CountdownEvent latch = new CountdownEvent(1);
+            AtomicInt32 index = new AtomicInt32(0);
+            int numThreads = 2 + Random().Next(5);
+            UpdateThread[] threads = new UpdateThread[numThreads];
+            for (int i = 0; i < threads.Length; i++)
+            {
+                threads[i] = new UpdateThread(queue, index, ids, latch);
+                threads[i].Start();
+            }
+            latch.Signal();
+            for (int i = 0; i < threads.Length; i++)
+            {
+                threads[i].Join();
+            }
+
+            foreach (UpdateThread updateThread in threads)
+            {
+                DeleteSlice slice = updateThread.Slice;
+                queue.UpdateSlice(slice);
+                BufferedUpdates deletes = updateThread.Deletes;
+                slice.Apply(deletes, BufferedUpdates.MAX_INT32);
+                assertEquals(uniqueValues, new HashSet<Term>(deletes.terms.Keys));
+            }
+            queue.TryApplyGlobalSlice();
+            HashSet<Term> frozenSet = new HashSet<Term>();
+            foreach (Term t in queue.FreezeGlobalBuffer(null).GetTermsEnumerable())
+            {
+                BytesRef bytesRef = new BytesRef();
+                bytesRef.CopyBytes(t.Bytes);
+                frozenSet.Add(new Term(t.Field, bytesRef));
+            }
+            Assert.AreEqual(0, queue.NumGlobalTermDeletes, "num deletes must be 0 after freeze");
+            Assert.AreEqual(uniqueValues.Count, frozenSet.Count);
+            assertEquals(uniqueValues, frozenSet);
+        }
+
+        private class UpdateThread : ThreadClass
+        {
+            internal readonly DocumentsWriterDeleteQueue Queue;
+            internal readonly AtomicInt32 Index;
+            internal readonly int?[] Ids;
+            internal readonly DeleteSlice Slice;
+            internal readonly BufferedUpdates Deletes;
+            internal readonly CountdownEvent Latch;
+
+            protected internal UpdateThread(DocumentsWriterDeleteQueue queue, AtomicInt32 index, int?[] ids, CountdownEvent latch)
+            {
+                this.Queue = queue;
+                this.Index = index;
+                this.Ids = ids;
+                this.Slice = queue.NewSlice();
+                Deletes = new BufferedUpdates();
+                this.Latch = latch;
+            }
+
+            public override void Run()
+            {
+#if !NETSTANDARD
+                try
+                {
+#endif
+                    Latch.Wait();
+#if !NETSTANDARD
+                }
+                catch (ThreadInterruptedException e)
+                {
+                    throw new ThreadInterruptedException("Thread Interrupted Exception", e);
+                }
+#endif
+
+                int i = 0;
+                while ((i = Index.GetAndIncrement()) < Ids.Length)
+                {
+                    Term term = new Term("id", Ids[i].ToString());
+                    Queue.Add(term, Slice);
+                    Assert.IsTrue(Slice.IsTailItem(term));
+                    Slice.Apply(Deletes, BufferedUpdates.MAX_INT32);
+                }
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestDocumentsWriterStallControl.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestDocumentsWriterStallControl.cs b/src/Lucene.Net.Tests/Index/TestDocumentsWriterStallControl.cs
new file mode 100644
index 0000000..9459fca
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestDocumentsWriterStallControl.cs
@@ -0,0 +1,473 @@
+using System;
+using System.Collections.Generic;
+using System.Threading;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Randomized.Generators;
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    /*
+             * Licensed to the Apache Software Foundation (ASF) under one or more
+             * contributor license agreements. See the NOTICE file distributed with this
+             * work for additional information regarding copyright ownership. The ASF
+             * licenses this file to You under the Apache License, Version 2.0 (the
+             * "License"); you may not use this file except in compliance with the License.
+             * You may obtain a copy of the License at
+             *
+             * http://www.apache.org/licenses/LICENSE-2.0
+             *
+             * Unless required by applicable law or agreed to in writing, software
+             * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+             * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+             * License for the specific language governing permissions and limitations under
+             * the License.
+             */
+
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /// <summary>
+    /// Tests for <seealso cref="DocumentsWriterStallControl"/>
+    /// </summary>
+    [TestFixture]
+    public class TestDocumentsWriterStallControl : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestSimpleStall()
+        {
+            DocumentsWriterStallControl ctrl = new DocumentsWriterStallControl();
+
+            ctrl.UpdateStalled(false);
+            ThreadClass[] waitThreads = WaitThreads(AtLeast(1), ctrl);
+            Start(waitThreads);
+            Assert.IsFalse(ctrl.HasBlocked);
+            Assert.IsFalse(ctrl.AnyStalledThreads());
+            Join(waitThreads);
+
+            // now stall threads and wake them up again
+            ctrl.UpdateStalled(true);
+            waitThreads = WaitThreads(AtLeast(1), ctrl);
+            Start(waitThreads);
+            AwaitState(ThreadState.WaitSleepJoin, waitThreads);
+            Assert.IsTrue(ctrl.HasBlocked);
+            Assert.IsTrue(ctrl.AnyStalledThreads());
+            ctrl.UpdateStalled(false);
+            Assert.IsFalse(ctrl.AnyStalledThreads());
+            Join(waitThreads);
+        }
+
+        [Test]
+        public virtual void TestRandom()
+        {
+            DocumentsWriterStallControl ctrl = new DocumentsWriterStallControl();
+            ctrl.UpdateStalled(false);
+
+            ThreadClass[] stallThreads = new ThreadClass[AtLeast(3)];
+            for (int i = 0; i < stallThreads.Length; i++)
+            {
+                int stallProbability = 1 + Random().Next(10);
+                stallThreads[i] = new ThreadAnonymousInnerClassHelper(ctrl, stallProbability);
+            }
+            Start(stallThreads);
+            long time = Environment.TickCount;
+            /*
+             * use a 100 sec timeout to make sure we not hang forever. join will fail in
+             * that case
+             */
+            while ((Environment.TickCount - time) < 100 * 1000 && !Terminated(stallThreads))
+            {
+                ctrl.UpdateStalled(false);
+                if (Random().NextBoolean())
+                {
+                    Thread.Sleep(0);
+                }
+                else
+                {
+                    Thread.Sleep(1);
+                }
+            }
+            Join(stallThreads);
+        }
+
+        private class ThreadAnonymousInnerClassHelper : ThreadClass
+        {
+            private DocumentsWriterStallControl Ctrl;
+            private int StallProbability;
+
+            public ThreadAnonymousInnerClassHelper(DocumentsWriterStallControl ctrl, int stallProbability)
+            {
+                this.Ctrl = ctrl;
+                this.StallProbability = stallProbability;
+            }
+
+            public override void Run()
+            {
+                int iters = AtLeast(1000);
+                for (int j = 0; j < iters; j++)
+                {
+                    Ctrl.UpdateStalled(Random().Next(StallProbability) == 0);
+                    if (Random().Next(5) == 0) // thread 0 only updates
+                    {
+                        Ctrl.WaitIfStalled();
+                    }
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestAccquireReleaseRace()
+        {
+            DocumentsWriterStallControl ctrl = new DocumentsWriterStallControl();
+            ctrl.UpdateStalled(false);
+            AtomicBoolean stop = new AtomicBoolean(false);
+            AtomicBoolean checkPoint = new AtomicBoolean(true);
+
+            int numStallers = AtLeast(1);
+            int numReleasers = AtLeast(1);
+            int numWaiters = AtLeast(1);
+            var sync = new Synchronizer(numStallers + numReleasers, numStallers + numReleasers + numWaiters);
+            var threads = new ThreadClass[numReleasers + numStallers + numWaiters];
+            IList<Exception> exceptions = new SynchronizedList<Exception>();
+            for (int i = 0; i < numReleasers; i++)
+            {
+                threads[i] = new Updater(stop, checkPoint, ctrl, sync, true, exceptions);
+            }
+            for (int i = numReleasers; i < numReleasers + numStallers; i++)
+            {
+                threads[i] = new Updater(stop, checkPoint, ctrl, sync, false, exceptions);
+            }
+            for (int i = numReleasers + numStallers; i < numReleasers + numStallers + numWaiters; i++)
+            {
+                threads[i] = new Waiter(stop, checkPoint, ctrl, sync, exceptions);
+            }
+
+            Start(threads);
+            int iters = AtLeast(10000);
+            float checkPointProbability = TEST_NIGHTLY ? 0.5f : 0.1f;
+            for (int i = 0; i < iters; i++)
+            {
+                if (checkPoint.Get())
+                {
+                    Assert.IsTrue(sync.UpdateJoin.Wait(new TimeSpan(0, 0, 0, 10)), "timed out waiting for update threads - deadlock?");
+                    if (exceptions.Count > 0)
+                    {
+                        foreach (Exception throwable in exceptions)
+                        {
+                            Console.WriteLine(throwable.ToString());
+                            Console.Write(throwable.StackTrace);
+                        }
+                        Assert.Fail("got exceptions in threads");
+                    }
+
+                    if (ctrl.HasBlocked && ctrl.IsHealthy)
+                    {
+                        AssertState(numReleasers, numStallers, numWaiters, threads, ctrl);
+                    }
+
+                    checkPoint.Set(false);
+                    sync.Waiter.Signal();
+                    sync.LeftCheckpoint.Wait();
+                }
+                Assert.IsFalse(checkPoint.Get());
+                Assert.AreEqual(0, sync.Waiter.CurrentCount);
+                if (checkPointProbability >= (float)Random().NextDouble())
+                {
+                    sync.Reset(numStallers + numReleasers, numStallers + numReleasers + numWaiters);
+                    checkPoint.Set(true);
+                }
+            }
+            if (!checkPoint.Get())
+            {
+                sync.Reset(numStallers + numReleasers, numStallers + numReleasers + numWaiters);
+                checkPoint.Set(true);
+            }
+
+            Assert.IsTrue(sync.UpdateJoin.Wait(new TimeSpan(0, 0, 0, 10)));
+            AssertState(numReleasers, numStallers, numWaiters, threads, ctrl);
+            checkPoint.Set(false);
+            stop.Set(true);
+            sync.Waiter.Signal();
+            sync.LeftCheckpoint.Wait();
+
+            for (int i = 0; i < threads.Length; i++)
+            {
+                ctrl.UpdateStalled(false);
+                threads[i].Join(2000);
+                if (threads[i].IsAlive && threads[i] is Waiter)
+                {
+                    if (threads[i].State == ThreadState.WaitSleepJoin)
+                    {
+                        Assert.Fail("waiter is not released - anyThreadsStalled: " + ctrl.AnyStalledThreads());
+                    }
+                }
+            }
+        }
+
+        private void AssertState(int numReleasers, int numStallers, int numWaiters, ThreadClass[] threads, DocumentsWriterStallControl ctrl)
+        {
+            int millisToSleep = 100;
+            while (true)
+            {
+                if (ctrl.HasBlocked && ctrl.IsHealthy)
+                {
+                    for (int n = numReleasers + numStallers; n < numReleasers + numStallers + numWaiters; n++)
+                    {
+                        if (ctrl.IsThreadQueued(threads[n]))
+                        {
+                            if (millisToSleep < 60000)
+                            {
+                                Thread.Sleep(millisToSleep);
+                                millisToSleep *= 2;
+                                break;
+                            }
+                            else
+                            {
+                                Assert.Fail("control claims no stalled threads but waiter seems to be blocked ");
+                            }
+                        }
+                    }
+                    break;
+                }
+                else
+                {
+                    break;
+                }
+            }
+        }
+
+        internal class Waiter : ThreadClass
+        {
+            internal Synchronizer Sync;
+            internal DocumentsWriterStallControl Ctrl;
+            internal AtomicBoolean CheckPoint;
+            internal AtomicBoolean Stop;
+            internal IList<Exception> Exceptions;
+
+            public Waiter(AtomicBoolean stop, AtomicBoolean checkPoint, DocumentsWriterStallControl ctrl, Synchronizer sync, IList<Exception> exceptions)
+                : base("waiter")
+            {
+                this.Stop = stop;
+                this.CheckPoint = checkPoint;
+                this.Ctrl = ctrl;
+                this.Sync = sync;
+                this.Exceptions = exceptions;
+            }
+
+            public override void Run()
+            {
+                try
+                {
+                    while (!Stop.Get())
+                    {
+                        Ctrl.WaitIfStalled();
+                        if (CheckPoint.Get())
+                        {
+#if !NETSTANDARD
+                            try
+                            {
+#endif
+                                Assert.IsTrue(Sync.await());
+#if !NETSTANDARD
+                            }
+                            catch (ThreadInterruptedException e)
+                            {
+                                Console.WriteLine("[Waiter] got interrupted - wait count: " + Sync.Waiter.CurrentCount);
+                                throw new ThreadInterruptedException("Thread Interrupted Exception", e);
+                            }
+#endif
+                        }
+                    }
+                }
+                catch (Exception e)
+                {
+                    Console.WriteLine(e.ToString());
+                    Console.Write(e.StackTrace);
+                    Exceptions.Add(e);
+                }
+            }
+        }
+
+        internal class Updater : ThreadClass
+        {
+            internal Synchronizer Sync;
+            internal DocumentsWriterStallControl Ctrl;
+            internal AtomicBoolean CheckPoint;
+            internal AtomicBoolean Stop;
+            internal bool Release;
+            internal IList<Exception> Exceptions;
+
+            public Updater(AtomicBoolean stop, AtomicBoolean checkPoint, DocumentsWriterStallControl ctrl, Synchronizer sync, bool release, IList<Exception> exceptions)
+                : base("updater")
+            {
+                this.Stop = stop;
+                this.CheckPoint = checkPoint;
+                this.Ctrl = ctrl;
+                this.Sync = sync;
+                this.Release = release;
+                this.Exceptions = exceptions;
+            }
+
+            public override void Run()
+            {
+                try
+                {
+                    while (!Stop.Get())
+                    {
+                        int internalIters = Release && Random().NextBoolean() ? AtLeast(5) : 1;
+                        for (int i = 0; i < internalIters; i++)
+                        {
+                            Ctrl.UpdateStalled(Random().NextBoolean());
+                        }
+                        if (CheckPoint.Get())
+                        {
+                            Sync.UpdateJoin.Signal();
+                            try
+                            {
+                                Assert.IsTrue(Sync.await());
+                            }
+#if !NETSTANDARD
+                            catch (ThreadInterruptedException e)
+                            {
+                                Console.WriteLine("[Updater] got interrupted - wait count: " + Sync.Waiter.CurrentCount);
+                                throw new ThreadInterruptedException("Thread Interrupted Exception", e);
+                            }
+#endif
+                            catch (Exception e)
+                            {
+                                Console.Write("signal failed with : " + e);
+                                throw e;
+                            }
+
+                            Sync.LeftCheckpoint.Signal();
+                        }
+                        if (Random().NextBoolean())
+                        {
+                            Thread.Sleep(0);
+                        }
+                    }
+                }
+                catch (Exception e)
+                {
+                    Console.WriteLine(e.ToString());
+                    Console.Write(e.StackTrace);
+                    Exceptions.Add(e);
+                }
+
+                if (!Sync.UpdateJoin.IsSet)
+                {
+                    Sync.UpdateJoin.Signal();
+                }
+            }
+        }
+
+        public static bool Terminated(ThreadClass[] threads)
+        {
+            foreach (ThreadClass thread in threads)
+            {
+                if (ThreadState.Stopped != thread.State)
+                {
+                    return false;
+                }
+            }
+            return true;
+        }
+
+        public static void Start(ThreadClass[] tostart)
+        {
+            foreach (ThreadClass thread in tostart)
+            {
+                thread.Start();
+            }
+            Thread.Sleep(1); // let them start
+        }
+
+        public static void Join(ThreadClass[] toJoin)
+        {
+            foreach (ThreadClass thread in toJoin)
+            {
+                thread.Join();
+            }
+        }
+
+        internal static ThreadClass[] WaitThreads(int num, DocumentsWriterStallControl ctrl)
+        {
+            ThreadClass[] array = new ThreadClass[num];
+            for (int i = 0; i < array.Length; i++)
+            {
+                array[i] = new ThreadAnonymousInnerClassHelper2(ctrl);
+            }
+            return array;
+        }
+
+        private class ThreadAnonymousInnerClassHelper2 : ThreadClass
+        {
+            private DocumentsWriterStallControl Ctrl;
+
+            public ThreadAnonymousInnerClassHelper2(DocumentsWriterStallControl ctrl)
+            {
+                this.Ctrl = ctrl;
+            }
+
+            public override void Run()
+            {
+                Ctrl.WaitIfStalled();
+            }
+        }
+
+        /// <summary>
+        /// Waits for all incoming threads to be in wait()
+        ///  methods.
+        /// </summary>
+        public static void AwaitState(ThreadState state, params ThreadClass[] threads)
+        {
+            while (true)
+            {
+                bool done = true;
+                foreach (ThreadClass thread in threads)
+                {
+                    if (thread.State != state)
+                    {
+                        done = false;
+                        break;
+                    }
+                }
+                if (done)
+                {
+                    return;
+                }
+                if (Random().NextBoolean())
+                {
+                    Thread.Sleep(0);
+                }
+                else
+                {
+                    Thread.Sleep(1);
+                }
+            }
+        }
+
+        public sealed class Synchronizer
+        {
+            internal volatile CountdownEvent Waiter;
+            internal volatile CountdownEvent UpdateJoin;
+            internal volatile CountdownEvent LeftCheckpoint;
+
+            public Synchronizer(int numUpdater, int numThreads)
+            {
+                Reset(numUpdater, numThreads);
+            }
+
+            public void Reset(int numUpdaters, int numThreads)
+            {
+                this.Waiter = new CountdownEvent(1);
+                this.UpdateJoin = new CountdownEvent(numUpdaters);
+                this.LeftCheckpoint = new CountdownEvent(numUpdaters);
+            }
+
+            public bool @await()
+            {
+                return Waiter.Wait(new TimeSpan(0, 0, 0, 10));
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestDuelingCodecs.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestDuelingCodecs.cs b/src/Lucene.Net.Tests/Index/TestDuelingCodecs.cs
new file mode 100644
index 0000000..4ac4e65
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestDuelingCodecs.cs
@@ -0,0 +1,183 @@
+using Lucene.Net.Documents;
+using NUnit.Framework;
+using System;
+
+namespace Lucene.Net.Index
+{
+    using System.Text.RegularExpressions;
+    using Attributes;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Codec = Lucene.Net.Codecs.Codec;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using LineFileDocs = Lucene.Net.Util.LineFileDocs;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using NumericDocValuesField = NumericDocValuesField;
+    using SortedSetDocValuesField = SortedSetDocValuesField;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    /// <summary>
+    /// Compares one codec against another
+    /// </summary>
+    [TestFixture]
+    public class TestDuelingCodecs : LuceneTestCase
+    {
+        private Directory LeftDir;
+        private IndexReader LeftReader;
+        private Codec LeftCodec;
+
+        private Directory RightDir;
+        private IndexReader RightReader;
+        private Codec RightCodec;
+
+        private string Info; // for debugging
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+
+            // for now its SimpleText vs Lucene46(random postings format)
+            // as this gives the best overall coverage. when we have more
+            // codecs we should probably pick 2 from Codec.availableCodecs()
+
+            LeftCodec = Codec.ForName("SimpleText");
+            RightCodec = new RandomCodec(Random());
+
+            LeftDir = NewDirectory();
+            RightDir = NewDirectory();
+
+            long seed = Random().Next();
+
+            // must use same seed because of random payloads, etc
+            int maxTermLength = TestUtil.NextInt(Random(), 1, IndexWriter.MAX_TERM_LENGTH);
+            MockAnalyzer leftAnalyzer = new MockAnalyzer(new Random((int)seed));
+            leftAnalyzer.MaxTokenLength = maxTermLength;
+            MockAnalyzer rightAnalyzer = new MockAnalyzer(new Random((int)seed));
+            rightAnalyzer.MaxTokenLength = maxTermLength;
+
+            // but these can be different
+            // TODO: this turns this into a really big test of Multi*, is that what we want?
+            IndexWriterConfig leftConfig = NewIndexWriterConfig(TEST_VERSION_CURRENT, leftAnalyzer);
+            leftConfig.SetCodec(LeftCodec);
+            // preserve docids
+            leftConfig.SetMergePolicy(NewLogMergePolicy());
+
+            IndexWriterConfig rightConfig = NewIndexWriterConfig(TEST_VERSION_CURRENT, rightAnalyzer);
+            rightConfig.SetCodec(RightCodec);
+            // preserve docids
+            rightConfig.SetMergePolicy(NewLogMergePolicy());
+
+            // must use same seed because of random docvalues fields, etc
+            RandomIndexWriter leftWriter = new RandomIndexWriter(new Random((int)seed), LeftDir, leftConfig);
+            RandomIndexWriter rightWriter = new RandomIndexWriter(new Random((int)seed), RightDir, rightConfig);
+
+            int numdocs = AtLeast(100);
+            CreateRandomIndex(numdocs, leftWriter, seed);
+            CreateRandomIndex(numdocs, rightWriter, seed);
+
+            LeftReader = MaybeWrapReader(leftWriter.Reader);
+            leftWriter.Dispose();
+            RightReader = MaybeWrapReader(rightWriter.Reader);
+            rightWriter.Dispose();
+
+            // check that our readers are valid
+            TestUtil.CheckReader(LeftReader);
+            TestUtil.CheckReader(RightReader);
+
+            Info = "left: " + LeftCodec.ToString() + " / right: " + RightCodec.ToString();
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            if (LeftReader != null)
+            {
+                LeftReader.Dispose();
+            }
+            if (RightReader != null)
+            {
+                RightReader.Dispose();
+            }
+
+            if (LeftDir != null)
+            {
+                LeftDir.Dispose();
+            }
+            if (RightDir != null)
+            {
+                RightDir.Dispose();
+            }
+
+            base.TearDown();
+        }
+
+        /// <summary>
+        /// populates a writer with random stuff. this must be fully reproducable with the seed!
+        /// </summary>
+        public static void CreateRandomIndex(int numdocs, RandomIndexWriter writer, long seed)
+        {
+            Random random = new Random((int)seed);
+            // primary source for our data is from linefiledocs, its realistic.
+            LineFileDocs lineFileDocs = new LineFileDocs(random);
+
+            // LUCENENET: compile a regex so we don't have to do it in each loop (for regex.split())
+            Regex whiteSpace = new Regex("\\s+", RegexOptions.Compiled);
+
+            // TODO: we should add other fields that use things like docs&freqs but omit positions,
+            // because linefiledocs doesn't cover all the possibilities.
+            for (int i = 0; i < numdocs; i++)
+            {
+                Document document = lineFileDocs.NextDoc();
+                // grab the title and add some SortedSet instances for fun
+                string title = document.Get("titleTokenized");
+                string[] split = whiteSpace.Split(title);
+                foreach (string trash in split)
+                {
+                    document.Add(new SortedSetDocValuesField("sortedset", new BytesRef(trash)));
+                }
+                // add a numeric dv field sometimes
+                document.RemoveFields("sparsenumeric");
+                if (random.Next(4) == 2)
+                {
+                    document.Add(new NumericDocValuesField("sparsenumeric", random.Next()));
+                }
+                writer.AddDocument(document);
+            }
+
+            lineFileDocs.Dispose();
+        }
+
+        /// <summary>
+        /// checks the two indexes are equivalent
+        /// </summary>
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(120000)]
+#endif
+        [Test, HasTimeout]
+        public virtual void TestEquals()
+        {
+            AssertReaderEquals(Info, LeftReader, RightReader);
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestExceedMaxTermLength.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestExceedMaxTermLength.cs b/src/Lucene.Net.Tests/Index/TestExceedMaxTermLength.cs
new file mode 100644
index 0000000..aa3470d
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestExceedMaxTermLength.cs
@@ -0,0 +1,108 @@
+using System;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Randomized.Generators;
+    using NUnit.Framework;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldType = FieldType;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    /// <summary>
+    /// Tests that a useful exception is thrown when attempting to index a term that is
+    /// too large
+    /// </summary>
+    /// <seealso cref= IndexWriter#MAX_TERM_LENGTH </seealso>
+    [TestFixture]
+    public class TestExceedMaxTermLength : LuceneTestCase
+    {
+        private static readonly int MinTestTermLength = IndexWriter.MAX_TERM_LENGTH + 1;
+        private static readonly int MaxTestTermLegnth = IndexWriter.MAX_TERM_LENGTH * 2;
+
+        internal Directory Dir = null;
+
+        [SetUp]
+        public virtual void CreateDir()
+        {
+            Dir = NewDirectory();
+        }
+
+        [TearDown]
+        public virtual void DestroyDir()
+        {
+            Dir.Dispose();
+            Dir = null;
+        }
+
+        [Test]
+        public virtual void Test()
+        {
+            IndexWriter w = new IndexWriter(Dir, NewIndexWriterConfig(Random(), TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            try
+            {
+                FieldType ft = new FieldType();
+                ft.IsIndexed = true;
+                ft.IsStored = Random().NextBoolean();
+                ft.Freeze();
+
+                Document doc = new Document();
+                if (Random().NextBoolean())
+                {
+                    // totally ok short field value
+                    doc.Add(new Field(TestUtil.RandomSimpleString(Random(), 1, 10), TestUtil.RandomSimpleString(Random(), 1, 10), ft));
+                }
+                // problematic field
+                string name = TestUtil.RandomSimpleString(Random(), 1, 50);
+                string value = TestUtil.RandomSimpleString(Random(), MinTestTermLength, MaxTestTermLegnth);
+                Field f = new Field(name, value, ft);
+                if (Random().NextBoolean())
+                {
+                    // totally ok short field value
+                    doc.Add(new Field(TestUtil.RandomSimpleString(Random(), 1, 10), TestUtil.RandomSimpleString(Random(), 1, 10), ft));
+                }
+                doc.Add(f);
+
+                try
+                {
+                    w.AddDocument(doc);
+                    Assert.Fail("Did not get an exception from adding a monster term");
+                }
+                catch (System.ArgumentException e)
+                {
+                    string maxLengthMsg = Convert.ToString(IndexWriter.MAX_TERM_LENGTH);
+                    string msg = e.Message;
+                    Assert.IsTrue(msg.Contains("immense term"), "IllegalArgumentException didn't mention 'immense term': " + msg);
+                    Assert.IsTrue(msg.Contains(maxLengthMsg), "IllegalArgumentException didn't mention max length (" + maxLengthMsg + "): " + msg);
+                    Assert.IsTrue(msg.Contains(name), "IllegalArgumentException didn't mention field name (" + name + "): " + msg);
+                }
+            }
+            finally
+            {
+                w.Dispose();
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestFieldInfos.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestFieldInfos.cs b/src/Lucene.Net.Tests/Index/TestFieldInfos.cs
new file mode 100644
index 0000000..35f8d8a
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestFieldInfos.cs
@@ -0,0 +1,126 @@
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Codec = Lucene.Net.Codecs.Codec;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using FieldInfosReader = Lucene.Net.Codecs.FieldInfosReader;
+    using FieldInfosWriter = Lucene.Net.Codecs.FieldInfosWriter;
+    using IndexOutput = Lucene.Net.Store.IndexOutput;
+    using IOContext = Lucene.Net.Store.IOContext;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    //import org.cnlp.utils.properties.ResourceBundleHelper;
+
+    [TestFixture]
+    public class TestFieldInfos : LuceneTestCase
+    {
+        private Document TestDoc = new Document();
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            DocHelper.SetupDoc(TestDoc);
+        }
+
+        public virtual FieldInfos CreateAndWriteFieldInfos(Directory dir, string filename)
+        {
+            //Positive test of FieldInfos
+            Assert.IsTrue(TestDoc != null);
+            FieldInfos.Builder builder = new FieldInfos.Builder();
+            foreach (IIndexableField field in TestDoc)
+            {
+                builder.AddOrUpdate(field.Name, field.FieldType);
+            }
+            FieldInfos fieldInfos = builder.Finish();
+            //Since the complement is stored as well in the fields map
+            Assert.IsTrue(fieldInfos.Count == DocHelper.All.Count); //this is all b/c we are using the no-arg constructor
+
+            IndexOutput output = dir.CreateOutput(filename, NewIOContext(Random()));
+            Assert.IsTrue(output != null);
+            //Use a RAMOutputStream
+
+            FieldInfosWriter writer = Codec.Default.FieldInfosFormat.FieldInfosWriter;
+            writer.Write(dir, filename, "", fieldInfos, IOContext.DEFAULT);
+            output.Dispose();
+            return fieldInfos;
+        }
+
+        public virtual FieldInfos ReadFieldInfos(Directory dir, string filename)
+        {
+            FieldInfosReader reader = Codec.Default.FieldInfosFormat.FieldInfosReader;
+            return reader.Read(dir, filename, "", IOContext.DEFAULT);
+        }
+
+        [Test]
+        public virtual void Test()
+        {
+            string name = "testFile";
+            Directory dir = NewDirectory();
+            FieldInfos fieldInfos = CreateAndWriteFieldInfos(dir, name);
+
+            FieldInfos readIn = ReadFieldInfos(dir, name);
+            Assert.IsTrue(fieldInfos.Count == readIn.Count);
+            FieldInfo info = readIn.FieldInfo("textField1");
+            Assert.IsTrue(info != null);
+            Assert.IsTrue(info.HasVectors == false);
+            Assert.IsTrue(info.OmitsNorms == false);
+
+            info = readIn.FieldInfo("textField2");
+            Assert.IsTrue(info != null);
+            Assert.IsTrue(info.OmitsNorms == false);
+
+            info = readIn.FieldInfo("textField3");
+            Assert.IsTrue(info != null);
+            Assert.IsTrue(info.HasVectors == false);
+            Assert.IsTrue(info.OmitsNorms == true);
+
+            info = readIn.FieldInfo("omitNorms");
+            Assert.IsTrue(info != null);
+            Assert.IsTrue(info.HasVectors == false);
+            Assert.IsTrue(info.OmitsNorms == true);
+
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestReadOnly()
+        {
+            string name = "testFile";
+            Directory dir = NewDirectory();
+            FieldInfos fieldInfos = CreateAndWriteFieldInfos(dir, name);
+            FieldInfos readOnly = ReadFieldInfos(dir, name);
+            AssertReadOnly(readOnly, fieldInfos);
+            dir.Dispose();
+        }
+
+        private void AssertReadOnly(FieldInfos readOnly, FieldInfos modifiable)
+        {
+            Assert.AreEqual(modifiable.Count, readOnly.Count);
+            // assert we can iterate
+            foreach (FieldInfo fi in readOnly)
+            {
+                Assert.AreEqual(fi.Name, modifiable.FieldInfo(fi.Number).Name);
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestFieldsReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestFieldsReader.cs b/src/Lucene.Net.Tests/Index/TestFieldsReader.cs
new file mode 100644
index 0000000..e539561
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestFieldsReader.cs
@@ -0,0 +1,286 @@
+using System;
+using System.Collections.Generic;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+    using System.IO;
+    using BaseDirectory = Lucene.Net.Store.BaseDirectory;
+    using BufferedIndexInput = Lucene.Net.Store.BufferedIndexInput;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using DocumentStoredFieldVisitor = DocumentStoredFieldVisitor;
+    using Field = Field;
+    using IndexInput = Lucene.Net.Store.IndexInput;
+    using IndexOutput = Lucene.Net.Store.IndexOutput;
+    using IOContext = Lucene.Net.Store.IOContext;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    [TestFixture]
+    public class TestFieldsReader : LuceneTestCase
+    {
+        private static Directory Dir;
+        private static Document TestDoc;
+        private static FieldInfos.Builder FieldInfos = null;
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Is non-static because NewIndexWriterConfig is no longer static.
+        /// </summary>
+        [OneTimeSetUp]
+        public void BeforeClass()
+        {
+            TestDoc = new Document();
+            FieldInfos = new FieldInfos.Builder();
+            DocHelper.SetupDoc(TestDoc);
+            foreach (IIndexableField field in TestDoc)
+            {
+                FieldInfos.AddOrUpdate(field.Name, field.FieldType);
+            }
+            Dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy());
+            conf.MergePolicy.NoCFSRatio = 0.0;
+            IndexWriter writer = new IndexWriter(Dir, conf);
+            writer.AddDocument(TestDoc);
+            writer.Dispose();
+            FaultyIndexInput.DoFail = false;
+        }
+
+        [OneTimeTearDown]
+        public static void AfterClass()
+        {
+            Dir.Dispose();
+            Dir = null;
+            FieldInfos = null;
+            TestDoc = null;
+        }
+
+        [Test]
+        public virtual void Test()
+        {
+            Assert.IsTrue(Dir != null);
+            Assert.IsTrue(FieldInfos != null);
+            IndexReader reader = DirectoryReader.Open(Dir);
+            Document doc = reader.Document(0);
+            Assert.IsTrue(doc != null);
+            Assert.IsTrue(doc.GetField(DocHelper.TEXT_FIELD_1_KEY) != null);
+
+            Field field = (Field)doc.GetField(DocHelper.TEXT_FIELD_2_KEY);
+            Assert.IsTrue(field != null);
+            Assert.IsTrue(field.FieldType.StoreTermVectors);
+
+            Assert.IsFalse(field.FieldType.OmitNorms);
+            Assert.IsTrue(field.FieldType.IndexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
+
+            field = (Field)doc.GetField(DocHelper.TEXT_FIELD_3_KEY);
+            Assert.IsTrue(field != null);
+            Assert.IsFalse(field.FieldType.StoreTermVectors);
+            Assert.IsTrue(field.FieldType.OmitNorms);
+            Assert.IsTrue(field.FieldType.IndexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
+
+            field = (Field)doc.GetField(DocHelper.NO_TF_KEY);
+            Assert.IsTrue(field != null);
+            Assert.IsFalse(field.FieldType.StoreTermVectors);
+            Assert.IsFalse(field.FieldType.OmitNorms);
+            Assert.IsTrue(field.FieldType.IndexOptions == IndexOptions.DOCS_ONLY);
+
+            DocumentStoredFieldVisitor visitor = new DocumentStoredFieldVisitor(DocHelper.TEXT_FIELD_3_KEY);
+            reader.Document(0, visitor);
+            IList<IIndexableField> fields = visitor.Document.Fields;
+            Assert.AreEqual(1, fields.Count);
+            Assert.AreEqual(DocHelper.TEXT_FIELD_3_KEY, fields[0].Name);
+            reader.Dispose();
+        }
+
+        public class FaultyFSDirectory : BaseDirectory
+        {
+            internal Directory FsDir;
+
+            public FaultyFSDirectory(DirectoryInfo dir)
+            {
+                FsDir = NewFSDirectory(dir);
+                m_lockFactory = FsDir.LockFactory;
+            }
+
+            public override IndexInput OpenInput(string name, IOContext context)
+            {
+                return new FaultyIndexInput(FsDir.OpenInput(name, context));
+            }
+
+            public override string[] ListAll()
+            {
+                return FsDir.ListAll();
+            }
+
+            [Obsolete("this method will be removed in 5.0")]
+            public override bool FileExists(string name)
+            {
+                return FsDir.FileExists(name);
+            }
+
+            public override void DeleteFile(string name)
+            {
+                FsDir.DeleteFile(name);
+            }
+
+            public override long FileLength(string name)
+            {
+                return FsDir.FileLength(name);
+            }
+
+            public override IndexOutput CreateOutput(string name, IOContext context)
+            {
+                return FsDir.CreateOutput(name, context);
+            }
+
+            public override void Sync(ICollection<string> names)
+            {
+                FsDir.Sync(names);
+            }
+
+            public override void Dispose()
+            {
+                FsDir.Dispose();
+            }
+        }
+
+        private class FaultyIndexInput : BufferedIndexInput
+        {
+            internal IndexInput @delegate;
+            internal static bool DoFail;
+            internal int Count;
+
+            internal FaultyIndexInput(IndexInput @delegate)
+                : base("FaultyIndexInput(" + @delegate + ")", BufferedIndexInput.BUFFER_SIZE)
+            {
+                this.@delegate = @delegate;
+            }
+
+            internal virtual void SimOutage()
+            {
+                if (DoFail && Count++ % 2 == 1)
+                {
+                    throw new IOException("Simulated network outage");
+                }
+            }
+
+            protected override void ReadInternal(byte[] b, int offset, int length)
+            {
+                SimOutage();
+                @delegate.Seek(FilePointer);
+                @delegate.ReadBytes(b, offset, length);
+            }
+
+            protected override void SeekInternal(long pos)
+            {
+            }
+
+            public override long Length
+            {
+                get { return @delegate.Length; }
+            }
+
+            public override void Dispose()
+            {
+                @delegate.Dispose();
+            }
+
+            public override object Clone()
+            {
+                FaultyIndexInput i = new FaultyIndexInput((IndexInput)@delegate.Clone());
+                // seek the clone to our current position
+                try
+                {
+                    i.Seek(FilePointer);
+                }
+#pragma warning disable 168
+                catch (IOException e)
+#pragma warning restore 168
+                {
+                    throw new Exception();
+                }
+                return i;
+            }
+        }
+
+        // LUCENE-1262
+        [Test]
+        public virtual void TestExceptions()
+        {
+            DirectoryInfo indexDir = CreateTempDir("testfieldswriterexceptions");
+
+            try
+            {
+                Directory dir = new FaultyFSDirectory(indexDir);
+                IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE);
+                IndexWriter writer = new IndexWriter(dir, iwc);
+                for (int i = 0; i < 2; i++)
+                {
+                    writer.AddDocument(TestDoc);
+                }
+                writer.ForceMerge(1);
+                writer.Dispose();
+
+                IndexReader reader = DirectoryReader.Open(dir);
+
+                FaultyIndexInput.DoFail = true;
+
+                bool exc = false;
+
+                for (int i = 0; i < 2; i++)
+                {
+                    try
+                    {
+                        reader.Document(i);
+                    }
+#pragma warning disable 168
+                    catch (IOException ioe)
+#pragma warning restore 168
+                    {
+                        // expected
+                        exc = true;
+                    }
+                    try
+                    {
+                        reader.Document(i);
+                    }
+#pragma warning disable 168
+                    catch (IOException ioe)
+#pragma warning restore 168
+                    {
+                        // expected
+                        exc = true;
+                    }
+                }
+                Assert.IsTrue(exc);
+                reader.Dispose();
+                dir.Dispose();
+            }
+            finally
+            {
+                System.IO.Directory.Delete(indexDir.FullName, true);
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestFilterAtomicReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestFilterAtomicReader.cs b/src/Lucene.Net.Tests/Index/TestFilterAtomicReader.cs
new file mode 100644
index 0000000..f22c0ee
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestFilterAtomicReader.cs
@@ -0,0 +1,224 @@
+using System;
+using System.Linq;
+using System.Reflection;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+    using BaseDirectoryWrapper = Lucene.Net.Store.BaseDirectoryWrapper;
+    using IBits = Lucene.Net.Util.IBits;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using Document = Documents.Document;
+    using Field = Field;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+
+    [TestFixture]
+    public class TestFilterAtomicReader : LuceneTestCase
+    {
+        private class TestReader : FilterAtomicReader
+        {
+            /// <summary>
+            /// Filter that only permits terms containing 'e'. </summary>
+            private class TestFields : FilterFields
+            {
+                internal TestFields(Fields @in)
+                    : base(@in)
+                {
+                }
+
+                public override Terms GetTerms(string field)
+                {
+                    return new TestTerms(base.GetTerms(field));
+                }
+            }
+
+            private class TestTerms : FilterTerms
+            {
+                internal TestTerms(Terms @in)
+                    : base(@in)
+                {
+                }
+
+                public override TermsEnum GetIterator(TermsEnum reuse)
+                {
+                    return new TestTermsEnum(base.GetIterator(reuse));
+                }
+            }
+
+            private class TestTermsEnum : FilterTermsEnum
+            {
+                public TestTermsEnum(TermsEnum @in)
+                    : base(@in)
+                {
+                }
+
+                /// <summary>
+                /// Scan for terms containing the letter 'e'. </summary>
+                public override BytesRef Next()
+                {
+                    BytesRef text;
+                    while ((text = m_input.Next()) != null)
+                    {
+                        if (text.Utf8ToString().IndexOf('e') != -1)
+                        {
+                            return text;
+                        }
+                    }
+                    return null;
+                }
+
+                public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPositionsEnum reuse, int flags)
+                {
+                    return new TestPositions(base.DocsAndPositions(liveDocs, reuse == null ? null : ((FilterDocsAndPositionsEnum)reuse).m_input, flags));
+                }
+            }
+
+            /// <summary>
+            /// Filter that only returns odd numbered documents. </summary>
+            private class TestPositions : FilterDocsAndPositionsEnum
+            {
+                public TestPositions(DocsAndPositionsEnum input)
+                    : base(input)
+                {
+                }
+
+                /// <summary>
+                /// Scan for odd numbered documents. </summary>
+                public override int NextDoc()
+                {
+                    int doc;
+                    while ((doc = m_input.NextDoc()) != NO_MORE_DOCS)
+                    {
+                        if ((doc % 2) == 1)
+                        {
+                            return doc;
+                        }
+                    }
+                    return NO_MORE_DOCS;
+                }
+            }
+
+            public TestReader(IndexReader reader)
+                : base(SlowCompositeReaderWrapper.Wrap(reader))
+            {
+            }
+
+            public override Fields Fields
+            {
+                get { return new TestFields(base.Fields); }
+            }
+        }
+
+        /// <summary>
+        /// Tests the IndexReader.getFieldNames implementation </summary>
+        /// <exception cref="Exception"> on error </exception>
+        [Test]
+        public virtual void TestFilterIndexReader()
+        {
+            Directory directory = NewDirectory();
+
+            IndexWriter writer = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+
+            Document d1 = new Document();
+            d1.Add(NewTextField("default", "one two", Field.Store.YES));
+            writer.AddDocument(d1);
+
+            Document d2 = new Document();
+            d2.Add(NewTextField("default", "one three", Field.Store.YES));
+            writer.AddDocument(d2);
+
+            Document d3 = new Document();
+            d3.Add(NewTextField("default", "two four", Field.Store.YES));
+            writer.AddDocument(d3);
+
+            writer.Dispose();
+
+            Directory target = NewDirectory();
+
+            // We mess with the postings so this can fail:
+            ((BaseDirectoryWrapper)target).CrossCheckTermVectorsOnClose = false;
+
+            writer = new IndexWriter(target, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            IndexReader reader = new TestReader(DirectoryReader.Open(directory));
+            writer.AddIndexes(reader);
+            writer.Dispose();
+            reader.Dispose();
+            reader = DirectoryReader.Open(target);
+
+            TermsEnum terms = MultiFields.GetTerms(reader, "default").GetIterator(null);
+            while (terms.Next() != null)
+            {
+                Assert.IsTrue(terms.Term.Utf8ToString().IndexOf('e') != -1);
+            }
+
+            Assert.AreEqual(TermsEnum.SeekStatus.FOUND, terms.SeekCeil(new BytesRef("one")));
+
+            DocsAndPositionsEnum positions = terms.DocsAndPositions(MultiFields.GetLiveDocs(reader), null);
+            while (positions.NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
+            {
+                Assert.IsTrue((positions.DocID % 2) == 1);
+            }
+
+            reader.Dispose();
+            directory.Dispose();
+            target.Dispose();
+        }
+
+        private static void CheckOverrideMethods(Type clazz)
+        {
+            Type superClazz = clazz.GetTypeInfo().BaseType;
+            foreach (MethodInfo m in superClazz.GetMethods())
+            {
+                // LUCENENET specific - since we changed to using a property for Attributes rather than a method,
+                // we need to reflect that as get_Attributes here.
+                if (m.IsStatic || m.IsAbstract || m.IsFinal || /*m.Synthetic ||*/ m.Name.Equals("get_Attributes"))
+                {
+                    continue;
+                }
+                // The point of these checks is to ensure that methods that have a default
+                // impl through other methods are not overridden. this makes the number of
+                // methods to override to have a working impl minimal and prevents from some
+                // traps: for example, think about having getCoreCacheKey delegate to the
+                // filtered impl by default
+                MethodInfo subM = clazz.GetMethod(m.Name, m.GetParameters().Select(p => p.ParameterType).ToArray());
+                if (subM.DeclaringType == clazz && m.DeclaringType != typeof(object) && m.DeclaringType != subM.DeclaringType)
+                {
+                    Assert.Fail(clazz + " overrides " + m + " although it has a default impl");
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestOverrideMethods()
+        {
+            CheckOverrideMethods(typeof(FilterAtomicReader));
+            CheckOverrideMethods(typeof(FilterAtomicReader.FilterFields));
+            CheckOverrideMethods(typeof(FilterAtomicReader.FilterTerms));
+            CheckOverrideMethods(typeof(FilterAtomicReader.FilterTermsEnum));
+            CheckOverrideMethods(typeof(FilterAtomicReader.FilterDocsEnum));
+            CheckOverrideMethods(typeof(FilterAtomicReader.FilterDocsAndPositionsEnum));
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestFlex.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestFlex.cs b/src/Lucene.Net.Tests/Index/TestFlex.cs
new file mode 100644
index 0000000..01cd762
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestFlex.cs
@@ -0,0 +1,100 @@
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Analysis;
+    
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Lucene.Net.Store;
+    using Lucene.Net.Util;
+    using NUnit.Framework;
+    using Lucene41PostingsFormat = Lucene.Net.Codecs.Lucene41.Lucene41PostingsFormat;
+
+    [TestFixture]
+    public class TestFlex : LuceneTestCase
+    {
+        // Test non-flex API emulated on flex index
+        [Test]
+        public virtual void TestNonFlex()
+        {
+            Directory d = NewDirectory();
+
+            const int DOC_COUNT = 177;
+
+            IndexWriter w = new IndexWriter(d, (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetMaxBufferedDocs(7).SetMergePolicy(NewLogMergePolicy()));
+
+            for (int iter = 0; iter < 2; iter++)
+            {
+                if (iter == 0)
+                {
+                    Documents.Document doc = new Documents.Document();
+                    doc.Add(NewTextField("field1", "this is field1", Field.Store.NO));
+                    doc.Add(NewTextField("field2", "this is field2", Field.Store.NO));
+                    doc.Add(NewTextField("field3", "aaa", Field.Store.NO));
+                    doc.Add(NewTextField("field4", "bbb", Field.Store.NO));
+                    for (int i = 0; i < DOC_COUNT; i++)
+                    {
+                        w.AddDocument(doc);
+                    }
+                }
+                else
+                {
+                    w.ForceMerge(1);
+                }
+
+                IndexReader r = w.Reader;
+
+                TermsEnum terms = MultiFields.GetTerms(r, "field3").GetIterator(null);
+                Assert.AreEqual(TermsEnum.SeekStatus.END, terms.SeekCeil(new BytesRef("abc")));
+                r.Dispose();
+            }
+
+            w.Dispose();
+            d.Dispose();
+        }
+
+        [Test]
+        public virtual void TestTermOrd()
+        {
+            Directory d = NewDirectory();
+            IndexWriter w = new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetCodec(TestUtil.AlwaysPostingsFormat(new Lucene41PostingsFormat())));
+            Documents.Document doc = new Documents.Document();
+            doc.Add(NewTextField("f", "a b c", Field.Store.NO));
+            w.AddDocument(doc);
+            w.ForceMerge(1);
+            DirectoryReader r = w.Reader;
+            TermsEnum terms = GetOnlySegmentReader(r).Fields.GetTerms("f").GetIterator(null);
+            Assert.IsTrue(terms.Next() != null);
+            try
+            {
+                Assert.AreEqual(0, terms.Ord);
+            }
+#pragma warning disable 168
+            catch (System.NotSupportedException uoe)
+#pragma warning restore 168
+            {
+                // ok -- codec is not required to support this op
+            }
+            r.Dispose();
+            w.Dispose();
+            d.Dispose();
+        }
+    }
+}
\ No newline at end of file


[49/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Analysis/TokenAttributes/TestCharTermAttributeImpl.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Analysis/TokenAttributes/TestCharTermAttributeImpl.cs b/src/Lucene.Net.Tests/Analysis/TokenAttributes/TestCharTermAttributeImpl.cs
new file mode 100644
index 0000000..6501d0c
--- /dev/null
+++ b/src/Lucene.Net.Tests/Analysis/TokenAttributes/TestCharTermAttributeImpl.cs
@@ -0,0 +1,485 @@
+using Lucene.Net.Support;
+using NUnit.Framework;
+using System.Collections.Generic;
+using System.Text;
+using System.Text.RegularExpressions;
+
+namespace Lucene.Net.Analysis.TokenAttributes
+{
+    /*
+    * Licensed to the Apache Software Foundation (ASF) under one or more
+    * contributor license agreements.  See the NOTICE file distributed with
+    * this work for additional information regarding copyright ownership.
+    * The ASF licenses this file to You under the Apache License, Version 2.0
+    * (the "License"); you may not use this file except in compliance with
+    * the License.  You may obtain a copy of the License at
+    *
+    *     http://www.apache.org/licenses/LICENSE-2.0
+    *
+    * Unless required by applicable law or agreed to in writing, software
+    * distributed under the License is distributed on an "AS IS" BASIS,
+    * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    * See the License for the specific language governing permissions and
+    * limitations under the License.
+    */
+
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    [TestFixture]
+    public class TestCharTermAttributeImpl : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestResize()
+        {
+            CharTermAttribute t = new CharTermAttribute();
+            char[] content = "hello".ToCharArray();
+            t.CopyBuffer(content, 0, content.Length);
+            for (int i = 0; i < 2000; i++)
+            {
+                t.ResizeBuffer(i);
+                Assert.IsTrue(i <= t.Buffer.Length);
+                Assert.AreEqual("hello", t.ToString());
+            }
+        }
+
+        [Test]
+        public virtual void TestGrow()
+        {
+            CharTermAttribute t = new CharTermAttribute();
+            StringBuilder buf = new StringBuilder("ab");
+            for (int i = 0; i < 20; i++)
+            {
+                char[] content = buf.ToString().ToCharArray();
+                t.CopyBuffer(content, 0, content.Length);
+                Assert.AreEqual(buf.Length, t.Length);
+                Assert.AreEqual(buf.ToString(), t.ToString());
+                buf.Append(buf.ToString());
+            }
+            Assert.AreEqual(1048576, t.Length);
+
+            // now as a StringBuilder, first variant
+            t = new CharTermAttribute();
+            buf = new StringBuilder("ab");
+            for (int i = 0; i < 20; i++)
+            {
+                t.SetEmpty().Append(buf);
+                Assert.AreEqual(buf.Length, t.Length);
+                Assert.AreEqual(buf.ToString(), t.ToString());
+                buf.Append(t);
+            }
+            Assert.AreEqual(1048576, t.Length);
+
+            // Test for slow growth to a long term
+            t = new CharTermAttribute();
+            buf = new StringBuilder("a");
+            for (int i = 0; i < 20000; i++)
+            {
+                t.SetEmpty().Append(buf);
+                Assert.AreEqual(buf.Length, t.Length);
+                Assert.AreEqual(buf.ToString(), t.ToString());
+                buf.Append("a");
+            }
+            Assert.AreEqual(20000, t.Length);
+        }
+
+        [Test]
+        public virtual void TestToString()
+        {
+            char[] b = new char[] { 'a', 'l', 'o', 'h', 'a' };
+            CharTermAttribute t = new CharTermAttribute();
+            t.CopyBuffer(b, 0, 5);
+            Assert.AreEqual("aloha", t.ToString());
+
+            t.SetEmpty().Append("hi there");
+            Assert.AreEqual("hi there", t.ToString());
+        }
+
+        [Test]
+        public virtual void TestClone()
+        {
+            CharTermAttribute t = new CharTermAttribute();
+            char[] content = "hello".ToCharArray();
+            t.CopyBuffer(content, 0, 5);
+            char[] buf = t.Buffer;
+            CharTermAttribute copy = TestToken.AssertCloneIsEqual(t);
+            Assert.AreEqual(t.ToString(), copy.ToString());
+            Assert.AreNotSame(buf, copy.Buffer);
+        }
+
+        [Test]
+        public virtual void TestEquals()
+        {
+            CharTermAttribute t1a = new CharTermAttribute();
+            char[] content1a = "hello".ToCharArray();
+            t1a.CopyBuffer(content1a, 0, 5);
+            CharTermAttribute t1b = new CharTermAttribute();
+            char[] content1b = "hello".ToCharArray();
+            t1b.CopyBuffer(content1b, 0, 5);
+            CharTermAttribute t2 = new CharTermAttribute();
+            char[] content2 = "hello2".ToCharArray();
+            t2.CopyBuffer(content2, 0, 6);
+            Assert.IsTrue(t1a.Equals(t1b));
+            Assert.IsFalse(t1a.Equals(t2));
+            Assert.IsFalse(t2.Equals(t1b));
+        }
+
+        [Test]
+        public virtual void TestCopyTo()
+        {
+            CharTermAttribute t = new CharTermAttribute();
+            CharTermAttribute copy = TestToken.AssertCopyIsEqual(t);
+            Assert.AreEqual("", t.ToString());
+            Assert.AreEqual("", copy.ToString());
+
+            t = new CharTermAttribute();
+            char[] content = "hello".ToCharArray();
+            t.CopyBuffer(content, 0, 5);
+            char[] buf = t.Buffer;
+            copy = TestToken.AssertCopyIsEqual(t);
+            Assert.AreEqual(t.ToString(), copy.ToString());
+            Assert.AreNotSame(buf, copy.Buffer);
+        }
+
+        [Test]
+        public virtual void TestAttributeReflection()
+        {
+            CharTermAttribute t = new CharTermAttribute();
+            t.Append("foobar");
+            TestUtil.AssertAttributeReflection(t, new Dictionary<string, object>()
+            {
+                    { typeof(ICharTermAttribute).Name + "#term", "foobar" },
+                    { typeof(ITermToBytesRefAttribute).Name + "#bytes", new BytesRef("foobar") }
+            });
+        }
+
+        [Test]
+        public virtual void TestCharSequenceInterface()
+        {
+            const string s = "0123456789";
+            CharTermAttribute t = new CharTermAttribute();
+            t.Append(s);
+
+            Assert.AreEqual(s.Length, t.Length);
+            Assert.AreEqual("12", t.SubSequence(1, 3).ToString());
+            Assert.AreEqual(s, t.SubSequence(0, s.Length).ToString());
+
+            Assert.IsTrue(Regex.IsMatch(t.ToString(), "01\\d+"));
+            Assert.IsTrue(Regex.IsMatch(t.SubSequence(3, 5).ToString(), "34"));
+
+            Assert.AreEqual(s.Substring(3, 4), t.SubSequence(3, 7).ToString());
+
+            for (int i = 0; i < s.Length; i++)
+            {
+                Assert.IsTrue(t[i] == s[i]);
+            }
+
+            // LUCENENET specific to test indexer
+            for (int i = 0; i < s.Length; i++)
+            {
+                Assert.IsTrue(t[i] == s[i]);
+            }
+        }
+
+        [Test]
+        public virtual void TestAppendableInterface()
+        {
+            CharTermAttribute t = new CharTermAttribute();
+            //Formatter formatter = new Formatter(t, Locale.ROOT);
+            //formatter.format("%d", 1234);
+            //Assert.AreEqual("1234", t.ToString());
+            //formatter.format("%d", 5678);
+            // LUCENENET: We don't have a formatter in .NET, so continue from here
+            t.Append("12345678"); // LUCENENET specific overload that accepts string
+            Assert.AreEqual("12345678", t.ToString());
+            t.SetEmpty().Append("12345678".ToCharArray()); // LUCENENET specific overload that accepts char[]
+            Assert.AreEqual("12345678", t.ToString());
+            t.Append('9');
+            Assert.AreEqual("123456789", t.ToString());
+            t.Append(new StringCharSequenceWrapper("0"));
+            Assert.AreEqual("1234567890", t.ToString());
+            t.Append(new StringCharSequenceWrapper("0123456789"), 1, 3);
+            Assert.AreEqual("123456789012", t.ToString());
+            //t.Append((ICharSequence) CharBuffer.wrap("0123456789".ToCharArray()), 3, 5);
+            t.Append("0123456789".ToCharArray(), 3, 5); // LUCENENET: no CharBuffer in .NET, so we test char[], start, end overload
+            Assert.AreEqual("12345678901234", t.ToString());
+            t.Append((ICharSequence)t);
+            Assert.AreEqual("1234567890123412345678901234", t.ToString());
+            t.Append(/*(ICharSequence)*/ new StringBuilder("0123456789").ToString(), 5, 7); // LUCENENET: StringBuilder doesn't implement ICharSequence
+            Assert.AreEqual("123456789012341234567890123456", t.ToString());
+            t.Append(/*(ICharSequence)*/ new StringBuilder(t.ToString()));
+            Assert.AreEqual("123456789012341234567890123456123456789012341234567890123456", t.ToString()); // LUCENENET: StringBuilder doesn't implement ICharSequence
+            // very wierd, to test if a subSlice is wrapped correct :)
+            //CharBuffer buf = CharBuffer.wrap("0123456789".ToCharArray(), 3, 5); // LUCENENET: No CharBuffer in .NET
+            StringBuilder buf = new StringBuilder("0123456789", 3, 5, 16);
+            Assert.AreEqual("34567", buf.ToString());
+            t.SetEmpty().Append(/*(ICharSequence)*/ buf, 1, 2); // LUCENENET: StringBuilder doesn't implement ICharSequence
+            Assert.AreEqual("4", t.ToString());
+            ICharTermAttribute t2 = new CharTermAttribute();
+            t2.Append("test");
+            t.Append((ICharSequence)t2);
+            Assert.AreEqual("4test", t.ToString());
+            t.Append((ICharSequence)t2, 1, 2);
+            Assert.AreEqual("4teste", t.ToString());
+
+            try
+            {
+                t.Append((ICharSequence)t2, 1, 5);
+                Assert.Fail("Should throw ArgumentOutOfRangeException");
+            }
+#pragma warning disable 168
+            catch (System.IndexOutOfRangeException iobe)
+#pragma warning restore 168
+            {
+            }
+
+            try
+            {
+                t.Append((ICharSequence)t2, 1, 0);
+                Assert.Fail("Should throw ArgumentOutOfRangeException");
+            }
+#pragma warning disable 168
+            catch (System.IndexOutOfRangeException iobe)
+#pragma warning restore 168
+            {
+            }
+
+            t.Append((ICharSequence)null);
+            Assert.AreEqual("4testenull", t.ToString());
+
+
+            // LUCENENET specific - test string overloads
+            try
+            {
+                t.Append((string)t2.ToString(), 1, 5);
+                Assert.Fail("Should throw IndexOutOfBoundsException");
+            }
+#pragma warning disable 168
+            catch (System.IndexOutOfRangeException iobe)
+#pragma warning restore 168
+            {
+            }
+
+            try
+            {
+                t.Append((string)t2.ToString(), 1, 0);
+                Assert.Fail("Should throw IndexOutOfBoundsException");
+            }
+#pragma warning disable 168
+            catch (System.IndexOutOfRangeException iobe)
+#pragma warning restore 168
+            {
+            }
+
+            t.Append((string)null);
+            Assert.AreEqual("4testenullnull", t.ToString());
+
+
+            // LUCENENET specific - test char[] overloads
+            try
+            {
+                t.Append((char[])t2.ToString().ToCharArray(), 1, 5);
+                Assert.Fail("Should throw IndexOutOfBoundsException");
+            }
+#pragma warning disable 168
+            catch (System.IndexOutOfRangeException iobe)
+#pragma warning restore 168
+            {
+            }
+
+            try
+            {
+                t.Append((char[])t2.ToString().ToCharArray(), 1, 0);
+                Assert.Fail("Should throw IndexOutOfBoundsException");
+            }
+#pragma warning disable 168
+            catch (System.IndexOutOfRangeException iobe)
+#pragma warning restore 168
+            {
+            }
+
+            t.Append((char[])null);
+            Assert.AreEqual("4testenullnullnull", t.ToString());
+        }
+
+        [Test]
+        public virtual void TestAppendableInterfaceWithLongSequences()
+        {
+            CharTermAttribute t = new CharTermAttribute();
+            t.Append("01234567890123456789012345678901234567890123456789"); // LUCENENET specific overload that accepts string
+            assertEquals("01234567890123456789012345678901234567890123456789", t.ToString());
+            t.Append("01234567890123456789012345678901234567890123456789", 3, 50); // LUCENENET specific overload that accepts string, start, end
+            Assert.AreEqual("0123456789012345678901234567890123456789012345678934567890123456789012345678901234567890123456789", t.ToString());
+            t.SetEmpty();
+            t.Append("01234567890123456789012345678901234567890123456789".ToCharArray()); // LUCENENET specific overload that accepts char[]
+            assertEquals("01234567890123456789012345678901234567890123456789", t.ToString());
+            t.Append("01234567890123456789012345678901234567890123456789".ToCharArray(), 3, 50); // LUCENENET specific overload that accepts char[], start, end
+            Assert.AreEqual("0123456789012345678901234567890123456789012345678934567890123456789012345678901234567890123456789", t.ToString());
+            t.SetEmpty();
+            t.Append(new StringCharSequenceWrapper("01234567890123456789012345678901234567890123456789"));
+            //t.Append((ICharSequence) CharBuffer.wrap("01234567890123456789012345678901234567890123456789".ToCharArray()), 3, 50); // LUCENENET: No CharBuffer in .NET
+            t.Append("01234567890123456789012345678901234567890123456789".ToCharArray(), 3, 50); // LUCENENET specific overload that accepts char[], start, end
+            //              "01234567890123456789012345678901234567890123456789"
+            Assert.AreEqual("0123456789012345678901234567890123456789012345678934567890123456789012345678901234567890123456789", t.ToString());
+            t.SetEmpty().Append(/*(ICharSequence)*/ new StringBuilder("01234567890123456789"), 5, 17); // LUCENENET: StringBuilder doesn't implement ICharSequence
+            Assert.AreEqual(new StringCharSequenceWrapper("567890123456"), t.ToString());
+            t.Append(new StringBuilder(t.ToString()));
+            Assert.AreEqual(new StringCharSequenceWrapper("567890123456567890123456"), t.ToString());
+            // very wierd, to test if a subSlice is wrapped correct :)
+            //CharBuffer buf = CharBuffer.wrap("012345678901234567890123456789".ToCharArray(), 3, 15); // LUCENENET: No CharBuffer in .NET
+            StringBuilder buf = new StringBuilder("012345678901234567890123456789", 3, 15, 16);
+            Assert.AreEqual("345678901234567", buf.ToString());
+            t.SetEmpty().Append(buf, 1, 14);
+            Assert.AreEqual("4567890123456", t.ToString());
+
+            // finally use a completely custom ICharSequence that is not catched by instanceof checks
+            const string longTestString = "012345678901234567890123456789";
+            t.Append(new CharSequenceAnonymousInnerClassHelper(this, longTestString));
+            Assert.AreEqual("4567890123456" + longTestString, t.ToString());
+        }
+
+        private class CharSequenceAnonymousInnerClassHelper : ICharSequence
+        {
+            private readonly TestCharTermAttributeImpl OuterInstance;
+
+            private string LongTestString;
+
+            public CharSequenceAnonymousInnerClassHelper(TestCharTermAttributeImpl outerInstance, string longTestString)
+            {
+                this.OuterInstance = outerInstance;
+                this.LongTestString = longTestString;
+            }
+
+            public char CharAt(int i)
+            {
+                return LongTestString[i];
+            }
+
+            // LUCENENET specific - Added to .NETify
+            public char this[int i]
+            {
+                get { return LongTestString[i]; }
+            }
+
+            public int Length
+            {
+                get
+                {
+                    return LongTestString.Length;
+                }
+            }
+
+            public ICharSequence SubSequence(int start, int end)
+            {
+                return new StringCharSequenceWrapper(LongTestString.Substring(start, end - start));
+            }
+
+            public override string ToString()
+            {
+                return LongTestString;
+            }
+        }
+
+        [Test]
+        public virtual void TestNonCharSequenceAppend()
+        {
+            CharTermAttribute t = new CharTermAttribute();
+            t.Append("0123456789");
+            t.Append("0123456789");
+            Assert.AreEqual("01234567890123456789", t.ToString());
+            t.Append(new StringBuilder("0123456789"));
+            Assert.AreEqual("012345678901234567890123456789", t.ToString());
+            ICharTermAttribute t2 = new CharTermAttribute();
+            t2.Append("test");
+            t.Append(t2);
+            Assert.AreEqual("012345678901234567890123456789test", t.ToString());
+            t.Append((string)null);
+            t.Append((StringBuilder)null);
+            t.Append((ICharTermAttribute)null);
+            Assert.AreEqual("012345678901234567890123456789testnullnullnull", t.ToString());
+        }
+
+        [Test]
+        public virtual void TestExceptions()
+        {
+            CharTermAttribute t = new CharTermAttribute();
+            t.Append("test");
+            Assert.AreEqual("test", t.ToString());
+
+            try
+            {
+                var _ = t[-1];
+                Assert.Fail("Should throw IndexOutOfBoundsException");
+            }
+            catch (System.IndexOutOfRangeException)
+            {
+            }
+
+            try
+            {
+                var _ = t[4];
+                Assert.Fail("Should throw IndexOutOfBoundsException");
+            }
+            catch (System.IndexOutOfRangeException)
+            {
+            }
+
+            try
+            {
+                t.SubSequence(0, 5);
+                Assert.Fail("Should throw IndexOutOfBoundsException");
+            }
+            catch (System.IndexOutOfRangeException)
+            {
+            }
+
+            try
+            {
+                t.SubSequence(5, 0);
+                Assert.Fail("Should throw IndexOutOfBoundsException");
+            }
+            catch (System.IndexOutOfRangeException)
+            {
+            }
+        }
+
+        /*
+
+        // test speed of the dynamic instanceof checks in append(ICharSequence),
+        // to find the best max length for the generic while (start<end) loop:
+        public void testAppendPerf() {
+          CharTermAttributeImpl t = new CharTermAttributeImpl();
+          final int count = 32;
+          ICharSequence[] csq = new ICharSequence[count * 6];
+          final StringBuilder sb = new StringBuilder();
+          for (int i=0,j=0; i<count; i++) {
+            sb.append(i%10);
+            final String testString = sb.toString();
+            CharTermAttribute cta = new CharTermAttributeImpl();
+            cta.append(testString);
+            csq[j++] = cta;
+            csq[j++] = testString;
+            csq[j++] = new StringBuilder(sb);
+            csq[j++] = new StringBuffer(sb);
+            csq[j++] = CharBuffer.wrap(testString.toCharArray());
+            csq[j++] = new ICharSequence() {
+              public char charAt(int i) { return testString.charAt(i); }
+              public int length() { return testString.length(); }
+              public ICharSequence subSequence(int start, int end) { return testString.subSequence(start, end); }
+              public String toString() { return testString; }
+            };
+          }
+
+          Random rnd = newRandom();
+          long startTime = System.currentTimeMillis();
+          for (int i=0; i<100000000; i++) {
+            t.SetEmpty().append(csq[rnd.nextInt(csq.length)]);
+          }
+          long endTime = System.currentTimeMillis();
+          System.out.println("Time: " + (endTime-startTime)/1000.0 + " s");
+        }
+
+        */
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Analysis/TokenAttributes/TestSimpleAttributeImpl.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Analysis/TokenAttributes/TestSimpleAttributeImpl.cs b/src/Lucene.Net.Tests/Analysis/TokenAttributes/TestSimpleAttributeImpl.cs
new file mode 100644
index 0000000..609e4dc
--- /dev/null
+++ b/src/Lucene.Net.Tests/Analysis/TokenAttributes/TestSimpleAttributeImpl.cs
@@ -0,0 +1,66 @@
+using System.Collections.Generic;
+
+namespace Lucene.Net.Analysis.TokenAttributes
+{
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using Attribute = Lucene.Net.Util.Attribute;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    [TestFixture]
+    public class TestSimpleAttributeImpl : LuceneTestCase
+    {
+        // this checks using reflection API if the defaults are correct
+        [Test]
+        public virtual void TestAttributes()
+        {
+            TestUtil.AssertAttributeReflection(new PositionIncrementAttribute(), Collections.SingletonMap(typeof(IPositionIncrementAttribute).Name + "#positionIncrement", (object)1));
+            TestUtil.AssertAttributeReflection(new PositionLengthAttribute(), Collections.SingletonMap(typeof(IPositionLengthAttribute).Name + "#positionLength", (object)1));
+            TestUtil.AssertAttributeReflection(new FlagsAttribute(), Collections.SingletonMap(typeof(IFlagsAttribute).Name + "#flags", (object)0));
+            TestUtil.AssertAttributeReflection(new TypeAttribute(), Collections.SingletonMap(typeof(ITypeAttribute).Name + "#type", (object)TypeAttribute_Fields.DEFAULT_TYPE));
+            TestUtil.AssertAttributeReflection(new PayloadAttribute(), Collections.SingletonMap(typeof(IPayloadAttribute).Name + "#payload", (object)null));
+            TestUtil.AssertAttributeReflection(new KeywordAttribute(), Collections.SingletonMap(typeof(IKeywordAttribute).Name + "#keyword", (object)false));
+            TestUtil.AssertAttributeReflection(new OffsetAttribute(), new Dictionary<string, object>()
+            {
+                {typeof(IOffsetAttribute).Name + "#startOffset", 0 },
+                {typeof(IOffsetAttribute).Name + "#endOffset", 0}
+            });
+        }
+
+        public static Attribute AssertCloneIsEqual(Attribute att)
+        {
+            Attribute clone = (Attribute)att.Clone();
+            Assert.AreEqual(att, clone, "Clone must be equal");
+            Assert.AreEqual(att.GetHashCode(), clone.GetHashCode(), "Clone's hashcode must be equal");
+            return clone;
+        }
+
+        public static Attribute AssertCopyIsEqual(Attribute att)
+        {
+            Attribute copy = (Attribute)System.Activator.CreateInstance(att.GetType());
+            att.CopyTo(copy);
+            Assert.AreEqual(att, copy, "Copied instance must be equal");
+            Assert.AreEqual(att.GetHashCode(), copy.GetHashCode(), "Copied instance's hashcode must be equal");
+            return copy;
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Analysis/TrivialLookaheadFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Analysis/TrivialLookaheadFilter.cs b/src/Lucene.Net.Tests/Analysis/TrivialLookaheadFilter.cs
new file mode 100644
index 0000000..b422026
--- /dev/null
+++ b/src/Lucene.Net.Tests/Analysis/TrivialLookaheadFilter.cs
@@ -0,0 +1,109 @@
+using Lucene.Net.Analysis.TokenAttributes;
+using System.Collections.Generic;
+
+namespace Lucene.Net.Analysis
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Simple example of a filter that seems to show some problems with LookaheadTokenFilter.
+    /// </summary>
+    public sealed class TrivialLookaheadFilter : LookaheadTokenFilter<TestPosition>
+    {
+        private readonly ICharTermAttribute TermAtt;
+        new private readonly IPositionIncrementAttribute PosIncAtt;
+        new private readonly IOffsetAttribute OffsetAtt;
+
+        private int InsertUpto;
+
+        internal TrivialLookaheadFilter(TokenStream input)
+            : base(input)
+        {
+            TermAtt = AddAttribute<ICharTermAttribute>();
+            PosIncAtt = AddAttribute<IPositionIncrementAttribute>();
+            OffsetAtt = AddAttribute<IOffsetAttribute>();
+        }
+
+        protected internal override TestPosition NewPosition()
+        {
+            return new TestPosition();
+        }
+
+        public override bool IncrementToken()
+        {
+            // At the outset, getMaxPos is -1. So we'll peek. When we reach the end of the sentence and go to the
+            // first token of the next sentence, maxPos will be the prev sentence's end token, and we'll go again.
+            if (positions.MaxPos < OutputPos)
+            {
+                PeekSentence();
+            }
+
+            return NextToken();
+        }
+
+        public override void Reset()
+        {
+            base.Reset();
+            InsertUpto = -1;
+        }
+
+        protected internal override void AfterPosition()
+        {
+            if (InsertUpto < OutputPos)
+            {
+                InsertToken();
+                // replace term with 'improved' term.
+                ClearAttributes();
+                TermAtt.SetEmpty();
+                PosIncAtt.PositionIncrement = 0;
+                TermAtt.Append(((TestPosition)positions.Get(OutputPos)).Fact);
+                OffsetAtt.SetOffset(positions.Get(OutputPos).StartOffset, positions.Get(OutputPos + 1).EndOffset);
+                InsertUpto = OutputPos;
+            }
+        }
+
+        private void PeekSentence()
+        {
+            IList<string> facts = new List<string>();
+            bool haveSentence = false;
+            do
+            {
+                if (PeekToken())
+                {
+                    string term = new string(TermAtt.Buffer, 0, TermAtt.Length);
+                    facts.Add(term + "-huh?");
+                    if (".".Equals(term))
+                    {
+                        haveSentence = true;
+                    }
+                }
+                else
+                {
+                    haveSentence = true;
+                }
+            } while (!haveSentence);
+
+            // attach the (now disambiguated) analyzed tokens to the positions.
+            for (int x = 0; x < facts.Count; x++)
+            {
+                // sentenceTokens is just relative to sentence, positions is absolute.
+                ((TestPosition)positions.Get(OutputPos + x)).Fact = facts[x];
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/App.config
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/App.config b/src/Lucene.Net.Tests/App.config
new file mode 100644
index 0000000..909b15d
--- /dev/null
+++ b/src/Lucene.Net.Tests/App.config
@@ -0,0 +1,47 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements.  See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership.  The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License.  You may obtain a copy of the License at
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied.  See the License for the
+ specific language governing permissions and limitations
+ under the License.
+
+-->
+<configuration>
+  <appSettings>
+    <add key="tempDir" value="C:\Windows\Temp\Lucene.Net-Tests" />
+    <add key="ClientSettingsProvider.ServiceUri" value="" />
+  </appSettings>
+  <!-- When I add this setting and run tests, I get 0 success, 0 failures, 0 tests not run
+  <appSettings>
+    <add key="Lucene.Net.CompressionLib.class" value="Lucene.Net.Index.Compression.SharpZipLibAdapter"/>
+  </appSettings>
+  -->
+  <system.web>
+    <membership defaultProvider="ClientAuthenticationMembershipProvider">
+      <providers>
+        <add name="ClientAuthenticationMembershipProvider" type="System.Web.ClientServices.Providers.ClientFormsAuthenticationMembershipProvider, System.Web.Extensions, Version=4.0.0.0, Culture=neutral, PublicKeyToken=31bf3856ad364e35" serviceUri="" />
+      </providers>
+    </membership>
+    <roleManager defaultProvider="ClientRoleProvider" enabled="true">
+      <providers>
+        <add name="ClientRoleProvider" type="System.Web.ClientServices.Providers.ClientRoleProvider, System.Web.Extensions, Version=4.0.0.0, Culture=neutral, PublicKeyToken=31bf3856ad364e35" serviceUri="" cacheTimeout="86400" />
+      </providers>
+    </roleManager>
+  </system.web>
+  <system.diagnostics>
+    <assert assertuienabled="false" />
+  </system.diagnostics>
+</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/AssemblyInfo.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/AssemblyInfo.cs b/src/Lucene.Net.Tests/AssemblyInfo.cs
new file mode 100644
index 0000000..46d6fb9
--- /dev/null
+++ b/src/Lucene.Net.Tests/AssemblyInfo.cs
@@ -0,0 +1,87 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+*/
+
+using NUnit.Framework;
+using System.Reflection;
+using System.Runtime.CompilerServices;
+
+//
+// General Information about an assembly is controlled through the following 
+// set of attributes. Change these attribute values to modify the information
+// associated with an assembly.
+//
+[assembly: AssemblyTitle("Apache Lucene.Net")]
+[assembly: AssemblyDescription("The Apache Software Foundation Lucene.Net a full-text search engine library")]
+[assembly: AssemblyConfiguration("")]
+[assembly: AssemblyCompany("The Apache Software Foundation")]
+[assembly: AssemblyProduct("Lucene.Net.Test")]
+[assembly: AssemblyCopyright("Copyright 2006 - 2011 The Apache Software Foundation")]
+[assembly: AssemblyTrademark("Copyright 2006 - 2011 The Apache Software Foundation")]
+[assembly: AssemblyDefaultAlias("Lucene.Net")]
+[assembly: AssemblyCulture("")]
+
+[assembly: AssemblyInformationalVersionAttribute("4.8.0")]
+
+//
+// Version information for an assembly consists of the following four values:
+//
+//      Major Version
+//      Minor Version 
+//      Build Number
+//      Revision
+//
+// You can specify all the values or you can default the Revision and Build Numbers 
+// by using the '*' as shown below:
+
+[assembly: AssemblyVersion("3.0.3")]
+
+//
+// In order to sign your assembly you must specify a key to use. Refer to the 
+// Microsoft .NET Framework documentation for more information on assembly signing.
+//
+// Use the attributes below to control which key is used for signing. 
+//
+// Notes: 
+//   (*) If no key is specified, the assembly is not signed.
+//   (*) KeyName refers to a key that has been installed in the Crypto Service
+//       Provider (CSP) on your machine. KeyFile refers to a file which contains
+//       a key.
+//   (*) If the KeyFile and the KeyName values are both specified, the 
+//       following processing occurs:
+//       (1) If the KeyName can be found in the CSP, that key is used.
+//       (2) If the KeyName does not exist and the KeyFile does exist, the key 
+//           in the KeyFile is installed into the CSP and used.
+//   (*) In order to create a KeyFile, you can use the sn.exe (Strong Name) utility.
+//       When specifying the KeyFile, the location of the KeyFile should be
+//       relative to the project output directory which is
+//       %Project Directory%\obj\<configuration>. For example, if your KeyFile is
+//       located in the project directory, you would specify the AssemblyKeyFile 
+//       attribute as [assembly: AssemblyKeyFile("..\\..\\mykey.snk")]
+//   (*) Delay Signing is an advanced option - see the Microsoft .NET Framework
+//       documentation for more information on this.
+//
+[assembly: AssemblyDelaySign(false)]
+[assembly: AssemblyKeyFile("")]
+[assembly: AssemblyKeyName("")]
+
+#if !NETSTANDARD
+[assembly: Timeout(20000)]
+#endif 
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Codecs/Compressing/AbstractTestCompressionMode.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Compressing/AbstractTestCompressionMode.cs b/src/Lucene.Net.Tests/Codecs/Compressing/AbstractTestCompressionMode.cs
new file mode 100644
index 0000000..9dcfcde
--- /dev/null
+++ b/src/Lucene.Net.Tests/Codecs/Compressing/AbstractTestCompressionMode.cs
@@ -0,0 +1,180 @@
+using System;
+using Lucene.Net.Util;
+
+namespace Lucene.Net.Codecs.Compressing
+{
+    using Lucene.Net.Randomized.Generators;
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+
+    /*
+                 * Licensed to the Apache Software Foundation (ASF) under one or more
+                 * contributor license agreements.  See the NOTICE file distributed with
+                 * this work for additional information regarding copyright ownership.
+                 * The ASF licenses this file to You under the Apache License, Version 2.0
+                 * (the "License"); you may not use this file except in compliance with
+                 * the License.  You may obtain a copy of the License at
+                 *
+                 *     http://www.apache.org/licenses/LICENSE-2.0
+                 *
+                 * Unless required by applicable law or agreed to in writing, software
+                 * distributed under the License is distributed on an "AS IS" BASIS,
+                 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+                 * See the License for the specific language governing permissions and
+                 * limitations under the License.
+                 */
+
+    using ByteArrayDataInput = Lucene.Net.Store.ByteArrayDataInput;
+    using ByteArrayDataOutput = Lucene.Net.Store.ByteArrayDataOutput;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    [TestFixture]
+    public abstract class AbstractTestCompressionMode : LuceneTestCase
+    {
+        internal CompressionMode Mode;
+
+        internal static byte[] RandomArray()
+        {
+            int max = Random().NextBoolean() ? Random().Next(4) : Random().Next(256);
+            int length = Random().NextBoolean() ? Random().Next(20) : Random().Next(192 * 1024);
+            return RandomArray(length, max);
+        }
+
+        internal static byte[] RandomArray(int length, int max)
+        {
+            var arr = new byte[length];
+            for (int i = 0; i < arr.Length; ++i)
+            {
+                arr[i] = (byte)RandomInts.NextIntBetween(Random(), 0, max);
+            }
+            return arr;
+        }
+
+        internal virtual byte[] Compress(byte[] decompressed, int off, int len)
+        {
+            Compressor compressor = Mode.NewCompressor();
+            return Compress(compressor, decompressed, off, len);
+        }
+
+        internal static byte[] Compress(Compressor compressor, byte[] decompressed, int off, int len)
+        {
+            var compressed = new byte[len * 2 + 16]; // should be enough
+            ByteArrayDataOutput @out = new ByteArrayDataOutput(compressed);
+            compressor.Compress(decompressed, off, len, @out);
+            int compressedLen = @out.Position;
+            return Arrays.CopyOf(compressed, compressedLen);
+        }
+
+        internal virtual byte[] Decompress(byte[] compressed, int originalLength)
+        {
+            Decompressor decompressor = Mode.NewDecompressor();
+            return Decompress(decompressor, compressed, originalLength);
+        }
+
+        internal static byte[] Decompress(Decompressor decompressor, byte[] compressed, int originalLength)
+        {
+            BytesRef bytes = new BytesRef();
+            decompressor.Decompress(new ByteArrayDataInput(compressed), originalLength, 0, originalLength, bytes);
+            return Arrays.CopyOfRange(bytes.Bytes, bytes.Offset, bytes.Offset + bytes.Length);
+        }
+
+        internal virtual byte[] Decompress(byte[] compressed, int originalLength, int offset, int length)
+        {
+            Decompressor decompressor = Mode.NewDecompressor();
+            BytesRef bytes = new BytesRef();
+            decompressor.Decompress(new ByteArrayDataInput(compressed), originalLength, offset, length, bytes);
+            return Arrays.CopyOfRange(bytes.Bytes, bytes.Offset, bytes.Offset + bytes.Length);
+        }
+
+        // [Test] // LUCENENET NOTE: For now, we are overriding this test in every subclass to pull it into the right context for the subclass
+        public virtual void TestDecompress()
+        {
+            int iterations = AtLeast(10);
+            for (int i = 0; i < iterations; ++i)
+            {
+                var decompressed = RandomArray();
+                int off = Random().NextBoolean() ? 0 : TestUtil.NextInt(Random(), 0, decompressed.Length);
+                int len = Random().NextBoolean() ? decompressed.Length - off : TestUtil.NextInt(Random(), 0, decompressed.Length - off);
+                var compressed = Compress(decompressed, off, len);
+                var restored = Decompress(compressed, len);
+                Assert.AreEqual(Arrays.CopyOfRange(decompressed, off, off + len), restored);//was AssertArrayEquals
+            }
+        }
+
+        // [Test] // LUCENENET NOTE: For now, we are overriding this test in every subclass to pull it into the right context for the subclass
+        public virtual void TestPartialDecompress()
+        {
+            int iterations = AtLeast(10);
+            for (int i = 0; i < iterations; ++i)
+            {
+                var decompressed = RandomArray();
+                var compressed = Compress(decompressed, 0, decompressed.Length);
+                int offset, length;
+                if (decompressed.Length == 0)
+                {
+                    offset = length = 0;
+                }
+                else
+                {
+                    offset = Random().Next(decompressed.Length);
+                    length = Random().Next(decompressed.Length - offset);
+                }
+                var restored = Decompress(compressed, decompressed.Length, offset, length);
+                Assert.AreEqual(Arrays.CopyOfRange(decompressed, offset, offset + length), restored); //was AssertArrayEquals
+            }
+        }
+
+        public virtual byte[] Test(byte[] decompressed)
+        {
+            return Test(decompressed, 0, decompressed.Length);
+        }
+
+        public virtual byte[] Test(byte[] decompressed, int off, int len)
+        {
+            var compressed = Compress(decompressed, off, len);
+            var restored = Decompress(compressed, len);
+            Assert.AreEqual(len, restored.Length);
+            return compressed;
+        }
+
+        // [Test] // LUCENENET NOTE: For now, we are overriding this test in every subclass to pull it into the right context for the subclass
+        public virtual void TestEmptySequence()
+        {
+            Test(new byte[0]);
+        }
+
+        // [Test] // LUCENENET NOTE: For now, we are overriding this test in every subclass to pull it into the right context for the subclass
+        public virtual void TestShortSequence()
+        {
+            Test(new[] { (byte)Random().Next(256) });
+        }
+
+        // [Test] // LUCENENET NOTE: For now, we are overriding this test in every subclass to pull it into the right context for the subclass
+        public virtual void TestIncompressible()
+        {
+            var decompressed = new byte[RandomInts.NextIntBetween(Random(), 20, 256)];
+            for (int i = 0; i < decompressed.Length; ++i)
+            {
+                decompressed[i] = (byte)i;
+            }
+            Test(decompressed);
+        }
+
+        // [Test] // LUCENENET NOTE: For now, we are overriding this test in every subclass to pull it into the right context for the subclass
+        public virtual void TestConstant()
+        {
+            var decompressed = new byte[TestUtil.NextInt(Random(), 1, 10000)];
+            Arrays.Fill(decompressed, (byte)Random().Next());
+            Test(decompressed);
+        }
+
+        // [Test] // LUCENENET NOTE: For now, we are overriding this test in every subclass to pull it into the right context for the subclass
+        public virtual void TestLUCENE5201()
+        {
+            sbyte[] data = { 14, 72, 14, 85, 3, 72, 14, 85, 3, 72, 14, 72, 14, 72, 14, 85, 3, 72, 14, 72, 14, 72, 14, 72, 14, 72, 14, 72, 14, 85, 3, 72, 14, 85, 3, 72, 14, 85, 3, 72, 14, 85, 3, 72, 14, 85, 3, 72, 14, 85, 3, 72, 14, 50, 64, 0, 46, -1, 0, 0, 0, 29, 3, 85, 8, -113, 0, 68, -97, 3, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, 85, 8, -113, 0, 68, -97, 3, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 50, 64, 0, 47, -105, 0, 0, 0, 30, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, 85, 8, -113, 0, 68, -97, 3, 0, 2, 3, 85, 8, -113, 0, 68, -97, 3, 0, 2, 3, 85, 8, -113, 0, 68, -97, 3, 0, 2, -97, 6, 0, 2, 3, 85, 8, -113, 0, 68, -97, 3, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68
 , -113, 0, 120, 64, 0, 48, 4, 0, 0, 0, 31, 34, 72, 29, 72, 37, 72, 35, 72, 45, 72, 23, 72, 46, 72, 20, 72, 40, 72, 33, 72, 25, 72, 39, 72, 38, 72, 26, 72, 28, 72, 42, 72, 24, 72, 27, 72, 36, 72, 41, 72, 32, 72, 18, 72, 30, 72, 22, 72, 31, 72, 43, 72, 19, 72, 34, 72, 29, 72, 37, 72, 35, 72, 45, 72, 23, 72, 46, 72, 20, 72, 40, 72, 33, 72, 25, 72, 39, 72, 38, 72, 26, 72, 28, 72, 42, 72, 24, 72, 27, 72, 36, 72, 41, 72, 32, 72, 18, 72, 30, 72, 22, 72, 31, 72, 43, 72, 19, 72, 34, 72, 29, 72, 37, 72, 35, 72, 45, 72, 23, 72, 46, 72, 20, 72, 40, 72, 33, 72, 25, 72, 39, 72, 38, 72, 26, 72, 28, 72, 42, 72, 24, 72, 27, 72, 36, 72, 41, 72, 32, 72, 18, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 39, 24, 32, 34, 124, 0, 120, 64, 0, 48, 80, 0, 0, 0, 31, 30, 72, 22, 72, 31, 72, 43, 72, 19, 72, 34, 72, 29, 72, 37, 72, 35, 72, 45, 72, 23, 72, 46, 72, 20, 72, 40, 72, 33, 72, 25, 72, 39, 72, 38, 72, 26, 72, 28, 72, 42, 72, 24, 72, 27, 72, 36, 72, 41, 72, 32, 72, 
 18, 72, 30, 72, 22, 72, 31, 72, 43, 72, 19, 72, 34, 72, 29, 72, 37, 72, 35, 72, 45, 72, 23, 72, 46, 72, 20, 72, 40, 72, 33, 72, 25, 72, 39, 72, 38, 72, 26, 72, 28, 72, 42, 72, 24, 72, 27, 72, 36, 72, 41, 72, 32, 72, 18, 72, 30, 72, 22, 72, 31, 72, 43, 72, 19, 72, 34, 72, 29, 72, 37, 72, 35, 72, 45, 72, 23, 72, 46, 72, 20, 72, 40, 72, 33, 72, 25, 72, 39, 72, 38, 72, 26, 72, 28, 72, 42, 72, 24, 72, 27, 72, 36, 72, 41, 72, 32, 72, 18, 72, 30, 72, 22, 72, 31, 72, 43, 72, 19, 72, 34, 72, 29, 72, 37, 72, 35, 72, 45, 72, 23, 72, 46, 72, 20, 72, 40, 72, 33, 72, 25, 72, 39, 72, 38, 72, 26, 72, 28, 72, 42, 72, 24, 72, 27, 72, 36, 72, 41, 72, 32, 72, 18, 72, 30, 72, 22, 72, 31, 72, 43, 72, 19, 72, 34, 72, 29, 72, 37, 72, 35, 72, 45, 72, 23, 72, 46, 72, 20, 72, 40, 72, 33, 72, 25, 72, 39, 72, 38, 72, 26, 72, 28, 72, 42, 72, 24, 72, 27, 72, 36, 72, 41, 72, 32, 72, 18, 72, 30, 72, 22, 72, 31, 72, 43, 72, 19, 72, 34, 72, 29, 72, 37, 72, 35, 72, 45, 72, 23, 72, 46, 72, 20, 72, 40, 72, 33, 72, 25, 7
 2, 39, 72, 38, 72, 26, 72, 28, 72, 42, 72, 24, 72, 27, 72, 36, 72, 41, 72, 32, 72, 18, 72, 30, 72, 22, 72, 31, 72, 43, 72, 19, 72, 34, 72, 29, 72, 37, 72, 35, 72, 45, 72, 23, 72, 46, 72, 20, 72, 40, 72, 33, 72, 25, 72, 39, 72, 38, 72, 26, 72, 28, 72, 42, 72, 24, 72, 27, 72, 36, 72, 41, 72, 32, 72, 18, 72, 30, 72, 22, 72, 31, 72, 43, 72, 19, 50, 64, 0, 49, 20, 0, 0, 0, 32, 3, -97, 6, 0, 68, -113, 0, 2, 3, 85, 8, -113, 0, 68, -97, 3, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, 85, 8, -113, 0, 68, -97, 3, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 50, 64, 0, 50, 53, 0, 0, 0, 34, 3, -97, 6, 0, 68, -113, 0, 2, 3, 85, 8, -113, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, 85, 8, -113, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0
 , 68, -113, 0, 2, 3, 85, 8, -113, 0, 68, -97, 3, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, 85, 8, -113, 0, 68, -97, 3, 0, 2, 3, 85, 8, -113, 0, 68, -97, 3, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, 85, 8, -113, 0, 68, -97, 3, 0, 2, 3, 85, 8, -113, 0, 68, -97, 3, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, 85, 8, -113, 0, 68, -97, 3, 0, 2, 3, 85, 8, -113, 0, 68, -97, 3, 0, 2, 3, 85, 8, -113, 0, 68, -97, 3, 0, 2, 3, -97, 6, 0, 50, 64, 0, 51, 85, 0, 0, 0, 36, 3, 85, 8, -113, 0, 68, -97, 3, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, -97, 5, 0, 2, 3, 85, 8, -113, 0, 68, -97, 3, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 50, -64, 0, 51, -45, 0, 0, 0, 37, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -9
 7, 6, 0, 68, -113, 0, 2, 3, 85, 8, -113, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, 85, 8, -113, 0, 68, -97, 3, 0, 2, 3, 85, 8, -113, 0, 68, -97, 3, 0, 120, 64, 0, 52, -88, 0, 0, 0, 39, 13, 85, 5, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 72, 13, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 72, 13, 72, 13, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 72, 13, 72, 13, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 72, 13, 85, 5, 72, 13, 72, 13, 85, 5, 72, 13, 72, 13, 85, 5, 72, 13, -19, -24, -101, -35 };
+            Test(data.ToByteArray(), 9, data.Length - 9);
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Codecs/Compressing/AbstractTestLZ4CompressionMode.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Compressing/AbstractTestLZ4CompressionMode.cs b/src/Lucene.Net.Tests/Codecs/Compressing/AbstractTestLZ4CompressionMode.cs
new file mode 100644
index 0000000..334ae8d
--- /dev/null
+++ b/src/Lucene.Net.Tests/Codecs/Compressing/AbstractTestLZ4CompressionMode.cs
@@ -0,0 +1,129 @@
+using Lucene.Net.Randomized.Generators;
+using Lucene.Net.Support;
+using NUnit.Framework;
+using System;
+using System.Text;
+
+namespace Lucene.Net.Codecs.Compressing
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    [TestFixture]
+    public abstract class AbstractTestLZ4CompressionMode : AbstractTestCompressionMode
+    {
+        public override byte[] Test(byte[] decompressed)
+        {
+            var compressed = base.Test(decompressed);
+            int off = 0;
+            int decompressedOff = 0;
+            for (; ; )
+            {
+                int token = compressed[off++] & 0xFF;
+                int literalLen = (int)((uint)token >> 4);
+                if (literalLen == 0x0F)
+                {
+                    while (compressed[off] == 0xFF)
+                    {
+                        literalLen += 0xFF;
+                        ++off;
+                    }
+                    literalLen += compressed[off++] & 0xFF;
+                }
+                // skip literals
+                off += literalLen;
+                decompressedOff += literalLen;
+
+                // check that the stream ends with literals and that there are at least
+                // 5 of them
+                if (off == compressed.Length)
+                {
+                    Assert.AreEqual(decompressed.Length, decompressedOff);
+                    Assert.IsTrue(literalLen >= LZ4.LAST_LITERALS || literalLen == decompressed.Length, "lastLiterals=" + literalLen + ", bytes=" + decompressed.Length);
+                    break;
+                }
+
+                int matchDec = (compressed[off++] & 0xFF) | ((compressed[off++] & 0xFF) << 8);
+                // check that match dec is not 0
+                Assert.IsTrue(matchDec > 0 && matchDec <= decompressedOff, matchDec + " " + decompressedOff);
+
+                int matchLen = token & 0x0F;
+                if (matchLen == 0x0F)
+                {
+                    while (compressed[off] == 0xFF)
+                    {
+                        matchLen += 0xFF;
+                        ++off;
+                    }
+                    matchLen += compressed[off++] & 0xFF;
+                }
+                matchLen += LZ4.MIN_MATCH;
+
+                // if the match ends prematurely, the next sequence should not have
+                // literals or this means we are wasting space
+                if (decompressedOff + matchLen < decompressed.Length - LZ4.LAST_LITERALS)
+                {
+                    bool moreCommonBytes = decompressed[decompressedOff + matchLen] == decompressed[decompressedOff - matchDec + matchLen];
+                    bool nextSequenceHasLiterals = ((int)((uint)(compressed[off] & 0xFF) >> 4)) != 0;
+                    Assert.IsTrue(!moreCommonBytes || !nextSequenceHasLiterals);
+                }
+
+                decompressedOff += matchLen;
+            }
+            Assert.AreEqual(decompressed.Length, decompressedOff);
+            return compressed;
+        }
+
+        // [Test] // LUCENENET NOTE: For now, we are overriding this test in every subclass to pull it into the right context for the subclass
+        public virtual void TestShortLiteralsAndMatchs()
+        {
+            // literals and matchs lengths <= 15
+            var decompressed = "1234562345673456745678910123".GetBytes(Encoding.UTF8);
+            Test(decompressed);
+        }
+
+        // [Test] // LUCENENET NOTE: For now, we are overriding this test in every subclass to pull it into the right context for the subclass
+        public virtual void TestLongMatchs()
+        {
+            // match length >= 20
+            var decompressed = new byte[RandomInts.NextIntBetween(Random(), 300, 1024)];
+            for (int i = 0; i < decompressed.Length; ++i)
+            {
+                decompressed[i] = (byte)i;
+            }
+            Test(decompressed);
+        }
+
+        // [Test] // LUCENENET NOTE: For now, we are overriding this test in every subclass to pull it into the right context for the subclass
+        public virtual void TestLongLiterals()
+        {
+            // long literals (length >= 16) which are not the last literals
+            var decompressed = RandomArray(RandomInts.NextIntBetween(Random(), 400, 1024), 256);
+            int matchRef = Random().Next(30);
+            int matchOff = RandomInts.NextIntBetween(Random(), decompressed.Length - 40, decompressed.Length - 20);
+            int matchLength = RandomInts.NextIntBetween(Random(), 4, 10);
+            Array.Copy(decompressed, matchRef, decompressed, matchOff, matchLength);
+            Test(decompressed);
+        }
+
+        // [Test] // LUCENENET NOTE: For now, we are overriding this test in every subclass to pull it into the right context for the subclass
+        public virtual void TestMatchRightBeforeLastLiterals()
+        {
+            Test(new byte[] { 1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4, 5 });
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Codecs/Compressing/TestCompressingStoredFieldsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Compressing/TestCompressingStoredFieldsFormat.cs b/src/Lucene.Net.Tests/Codecs/Compressing/TestCompressingStoredFieldsFormat.cs
new file mode 100644
index 0000000..ef5ac7e
--- /dev/null
+++ b/src/Lucene.Net.Tests/Codecs/Compressing/TestCompressingStoredFieldsFormat.cs
@@ -0,0 +1,200 @@
+using Lucene.Net.Documents;
+using Field = Lucene.Net.Documents.Field;
+
+namespace Lucene.Net.Codecs.Compressing
+{
+    using Lucene.Net.Randomized.Generators;
+    using NUnit.Framework;
+    using System;
+    using BaseStoredFieldsFormatTestCase = Lucene.Net.Index.BaseStoredFieldsFormatTestCase;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldType = FieldType;
+    using IndexWriterConfig = Lucene.Net.Index.IndexWriterConfig;
+    using Int32Field = Int32Field;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Attributes;
+
+    [TestFixture]
+    public class TestCompressingStoredFieldsFormat : BaseStoredFieldsFormatTestCase
+    {
+        protected override Codec Codec
+        {
+            get
+            {
+                return CompressingCodec.RandomInstance(Random());
+            }
+        }
+
+        [Test]
+        public virtual void TestDeletePartiallyWrittenFilesIfAbort()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig iwConf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            iwConf.SetMaxBufferedDocs(RandomInts.NextIntBetween(Random(), 2, 30));
+            iwConf.SetCodec(CompressingCodec.RandomInstance(Random()));
+            // disable CFS because this test checks file names
+            iwConf.SetMergePolicy(NewLogMergePolicy(false));
+            iwConf.SetUseCompoundFile(false);
+            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwConf);
+
+            Document validDoc = new Document();
+            validDoc.Add(new Int32Field("id", 0, Field.Store.YES));
+            iw.AddDocument(validDoc);
+            iw.Commit();
+
+            // make sure that #writeField will fail to trigger an abort
+            Document invalidDoc = new Document();
+            FieldType fieldType = new FieldType();
+            fieldType.IsStored = true;
+            invalidDoc.Add(new FieldAnonymousInnerClassHelper(this, fieldType));
+
+            try
+            {
+                Assert.Throws<ArgumentException>(() => {
+                    iw.AddDocument(invalidDoc);
+                    iw.Commit();
+                });
+            }
+            finally
+            {
+                int counter = 0;
+                foreach (string fileName in dir.ListAll())
+                {
+                    if (fileName.EndsWith(".fdt") || fileName.EndsWith(".fdx"))
+                    {
+                        counter++;
+                    }
+                }
+                // Only one .fdt and one .fdx files must have been found
+                Assert.AreEqual(2, counter);
+                iw.Dispose();
+                dir.Dispose();
+            }
+        }
+
+        private class FieldAnonymousInnerClassHelper : Field
+        {
+            private readonly TestCompressingStoredFieldsFormat OuterInstance;
+
+            public FieldAnonymousInnerClassHelper(TestCompressingStoredFieldsFormat outerInstance, FieldType fieldType)
+                : base("invalid", fieldType)
+            {
+                this.OuterInstance = outerInstance;
+            }
+        }
+
+
+        #region BaseStoredFieldsFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestRandomStoredFields()
+        {
+            base.TestRandomStoredFields();
+        }
+
+        [Test]
+        // LUCENE-1727: make sure doc fields are stored in order
+        public override void TestStoredFieldsOrder()
+        {
+            base.TestStoredFieldsOrder();
+        }
+
+        [Test]
+        // LUCENE-1219
+        public override void TestBinaryFieldOffsetLength()
+        {
+            base.TestBinaryFieldOffsetLength();
+        }
+
+        [Test]
+        public override void TestNumericField()
+        {
+            base.TestNumericField();
+        }
+
+        [Test]
+        public override void TestIndexedBit()
+        {
+            base.TestIndexedBit();
+        }
+
+        [Test]
+        public override void TestReadSkip()
+        {
+            base.TestReadSkip();
+        }
+
+        [Test]
+        public override void TestEmptyDocs()
+        {
+            base.TestEmptyDocs();
+        }
+
+        [Test]
+        public override void TestConcurrentReads()
+        {
+            base.TestConcurrentReads();
+        }
+
+        [Test]
+        public override void TestWriteReadMerge()
+        {
+            base.TestWriteReadMerge();
+        }
+
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(300000)]
+#endif
+        [Test, HasTimeout]
+        public override void TestBigDocuments()
+        {
+            base.TestBigDocuments();
+        }
+
+        [Test]
+        public override void TestBulkMergeWithDeletes()
+        {
+            base.TestBulkMergeWithDeletes();
+        }
+
+        #endregion
+
+        #region BaseIndexFileFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestMergeStability()
+        {
+            base.TestMergeStability();
+        }
+
+        #endregion
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Codecs/Compressing/TestCompressingTermVectorsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Compressing/TestCompressingTermVectorsFormat.cs b/src/Lucene.Net.Tests/Codecs/Compressing/TestCompressingTermVectorsFormat.cs
new file mode 100644
index 0000000..1ce3276
--- /dev/null
+++ b/src/Lucene.Net.Tests/Codecs/Compressing/TestCompressingTermVectorsFormat.cs
@@ -0,0 +1,164 @@
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Codecs.Compressing
+{
+    using Attributes;
+    using NUnit.Framework;
+    using AtomicReader = Lucene.Net.Index.AtomicReader;
+    using BaseTermVectorsFormatTestCase = Lucene.Net.Index.BaseTermVectorsFormatTestCase;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldType = FieldType;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Terms = Lucene.Net.Index.Terms;
+    using TermsEnum = Lucene.Net.Index.TermsEnum;
+    using TextField = TextField;
+
+    //using Repeat = com.carrotsearch.randomizedtesting.annotations.Repeat;
+
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    [TestFixture]
+    public class TestCompressingTermVectorsFormat : BaseTermVectorsFormatTestCase
+    {
+        protected override Codec Codec
+        {
+            get
+            {
+                return CompressingCodec.RandomInstance(Random());
+            }
+        }
+
+        // https://issues.apache.org/jira/browse/LUCENE-5156
+        [Test]
+        public virtual void TestNoOrds()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            Document doc = new Document();
+            FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
+            ft.StoreTermVectors = true;
+            doc.Add(new Field("foo", "this is a test", ft));
+            iw.AddDocument(doc);
+            AtomicReader ir = GetOnlySegmentReader(iw.Reader);
+            Terms terms = ir.GetTermVector(0, "foo");
+            Assert.IsNotNull(terms);
+            TermsEnum termsEnum = terms.GetIterator(null);
+            Assert.AreEqual(TermsEnum.SeekStatus.FOUND, termsEnum.SeekCeil(new BytesRef("this")));
+            try
+            {
+                var _ = termsEnum.Ord;
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (System.NotSupportedException expected)
+#pragma warning restore 168
+            {
+                // expected exception
+            }
+
+            try
+            {
+                termsEnum.SeekExact(0);
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (System.NotSupportedException expected)
+#pragma warning restore 168
+            {
+                // expected exception
+            }
+            ir.Dispose();
+            iw.Dispose();
+            dir.Dispose();
+        }
+
+
+        #region BaseTermVectorsFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        // only one doc with vectors
+        public override void TestRareVectors()
+        {
+            base.TestRareVectors();
+        }
+
+        [Test]
+        public override void TestHighFreqs()
+        {
+            base.TestHighFreqs();
+        }
+
+        [Test]
+        public override void TestLotsOfFields()
+        {
+            base.TestLotsOfFields();
+        }
+
+        [Test]
+        // different options for the same field
+        public override void TestMixedOptions()
+        {
+            base.TestMixedOptions();
+        }
+
+        [Test]
+        public override void TestRandom()
+        {
+            base.TestRandom();
+        }
+
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(30000)]
+#endif
+        [Test, HasTimeout]
+        public override void TestMerge()
+        {
+            base.TestMerge();
+        }
+
+        [Test]
+        // run random tests from different threads to make sure the per-thread clones
+        // don't share mutable data
+        public override void TestClone()
+        {
+            base.TestClone();
+        }
+
+        #endregion
+
+        #region BaseIndexFileFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestMergeStability()
+        {
+            base.TestMergeStability();
+        }
+
+        #endregion
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Codecs/Compressing/TestFastCompressionMode.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Compressing/TestFastCompressionMode.cs b/src/Lucene.Net.Tests/Codecs/Compressing/TestFastCompressionMode.cs
new file mode 100644
index 0000000..6a74a51
--- /dev/null
+++ b/src/Lucene.Net.Tests/Codecs/Compressing/TestFastCompressionMode.cs
@@ -0,0 +1,113 @@
+using NUnit.Framework;
+
+namespace Lucene.Net.Codecs.Compressing
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    [TestFixture]
+    public class TestFastCompressionMode : AbstractTestLZ4CompressionMode
+    {
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Mode = CompressionMode.FAST;
+        }
+
+
+        #region AbstractTestLZ4CompressionMode
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestShortLiteralsAndMatchs()
+        {
+            base.TestShortLiteralsAndMatchs();
+        }
+
+        [Test]
+        public override void TestLongMatchs()
+        {
+            base.TestLongMatchs();
+        }
+
+        [Test]
+        public override void TestLongLiterals()
+        {
+            base.TestLongLiterals();
+        }
+
+        [Test]
+        public override void TestMatchRightBeforeLastLiterals()
+        {
+            base.TestMatchRightBeforeLastLiterals();
+        }
+
+        #endregion
+
+        #region AbstractTestCompressionMode
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestDecompress()
+        {
+            base.TestDecompress();
+        }
+
+        [Test]
+        public override void TestPartialDecompress()
+        {
+            base.TestPartialDecompress();
+        }
+
+        [Test]
+        public override void TestEmptySequence()
+        {
+            base.TestEmptySequence();
+        }
+
+        [Test]
+        public override void TestShortSequence()
+        {
+            base.TestShortSequence();
+        }
+
+        [Test]
+        public override void TestIncompressible()
+        {
+            base.TestIncompressible();
+        }
+
+        [Test]
+        public override void TestConstant()
+        {
+            base.TestConstant();
+        }
+
+        [Test]
+        public override void TestLUCENE5201()
+        {
+            base.TestLUCENE5201();
+        }
+
+        #endregion
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Codecs/Compressing/TestFastDecompressionMode.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Compressing/TestFastDecompressionMode.cs b/src/Lucene.Net.Tests/Codecs/Compressing/TestFastDecompressionMode.cs
new file mode 100644
index 0000000..b90a582
--- /dev/null
+++ b/src/Lucene.Net.Tests/Codecs/Compressing/TestFastDecompressionMode.cs
@@ -0,0 +1,123 @@
+using NUnit.Framework;
+
+namespace Lucene.Net.Codecs.Compressing
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    [TestFixture]
+    public class TestFastDecompressionMode : AbstractTestLZ4CompressionMode
+    {
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Mode = CompressionMode.FAST_DECOMPRESSION;
+        }
+
+        public override byte[] Test(byte[] decompressed, int off, int len)
+        {
+            var compressed = base.Test(decompressed, off, len);
+            var compressed2 = Compress(CompressionMode.FAST.NewCompressor(), decompressed, off, len);
+            // because of the way this compression mode works, its output is necessarily
+            // smaller than the output of CompressionMode.FAST
+            Assert.IsTrue(compressed.Length <= compressed2.Length);
+            return compressed;
+        }
+
+
+        #region AbstractTestLZ4CompressionMode
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestShortLiteralsAndMatchs()
+        {
+            base.TestShortLiteralsAndMatchs();
+        }
+
+        [Test]
+        public override void TestLongMatchs()
+        {
+            base.TestLongMatchs();
+        }
+
+        [Test]
+        public override void TestLongLiterals()
+        {
+            base.TestLongLiterals();
+        }
+
+        [Test]
+        public override void TestMatchRightBeforeLastLiterals()
+        {
+            base.TestMatchRightBeforeLastLiterals();
+        }
+
+        #endregion
+
+        #region AbstractTestCompressionMode
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestDecompress()
+        {
+            base.TestDecompress();
+        }
+
+        [Test]
+        public override void TestPartialDecompress()
+        {
+            base.TestPartialDecompress();
+        }
+
+        [Test]
+        public override void TestEmptySequence()
+        {
+            base.TestEmptySequence();
+        }
+
+        [Test]
+        public override void TestShortSequence()
+        {
+            base.TestShortSequence();
+        }
+
+        [Test]
+        public override void TestIncompressible()
+        {
+            base.TestIncompressible();
+        }
+
+        [Test]
+        public override void TestConstant()
+        {
+            base.TestConstant();
+        }
+
+        [Test]
+        public override void TestLUCENE5201()
+        {
+            base.TestLUCENE5201();
+        }
+
+        #endregion
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Codecs/Compressing/TestHighCompressionMode.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Compressing/TestHighCompressionMode.cs b/src/Lucene.Net.Tests/Codecs/Compressing/TestHighCompressionMode.cs
new file mode 100644
index 0000000..679d1f0
--- /dev/null
+++ b/src/Lucene.Net.Tests/Codecs/Compressing/TestHighCompressionMode.cs
@@ -0,0 +1,82 @@
+using NUnit.Framework;
+
+namespace Lucene.Net.Codecs.Compressing
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    [TestFixture]
+    public class TestHighCompressionMode : AbstractTestCompressionMode
+    {
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Mode = CompressionMode.HIGH_COMPRESSION;
+        }
+
+
+        #region AbstractTestCompressionMode
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestDecompress()
+        {
+            base.TestDecompress();
+        }
+
+        [Test]
+        public override void TestPartialDecompress()
+        {
+            base.TestPartialDecompress();
+        }
+
+        [Test]
+        public override void TestEmptySequence()
+        {
+            base.TestEmptySequence();
+        }
+
+        [Test]
+        public override void TestShortSequence()
+        {
+            base.TestShortSequence();
+        }
+
+        [Test]
+        public override void TestIncompressible()
+        {
+            base.TestIncompressible();
+        }
+
+        [Test]
+        public override void TestConstant()
+        {
+            base.TestConstant();
+        }
+
+        [Test]
+        public override void TestLUCENE5201()
+        {
+            base.TestLUCENE5201();
+        }
+        #endregion
+
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Codecs/Lucene3x/TestImpersonation.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene3x/TestImpersonation.cs b/src/Lucene.Net.Tests/Codecs/Lucene3x/TestImpersonation.cs
new file mode 100644
index 0000000..ca9e9b9
--- /dev/null
+++ b/src/Lucene.Net.Tests/Codecs/Lucene3x/TestImpersonation.cs
@@ -0,0 +1,39 @@
+namespace Lucene.Net.Codecs.Lucene3x
+{
+    using NUnit.Framework;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /// <summary>
+    /// Test that the SPI magic is returning "PreFlexRWCodec" for Lucene3x
+    ///
+    /// @lucene.experimental
+    /// </summary>
+    [TestFixture]
+    public class TestImpersonation : LuceneTestCase
+    {
+        [Test]
+        public virtual void Test()
+        {
+            Codec codec = Codec.ForName("Lucene3x");
+            Assert.IsTrue(codec is PreFlexRWCodec);
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Codecs/Lucene3x/TestLucene3xPostingsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene3x/TestLucene3xPostingsFormat.cs b/src/Lucene.Net.Tests/Codecs/Lucene3x/TestLucene3xPostingsFormat.cs
new file mode 100644
index 0000000..0ab9a7b
--- /dev/null
+++ b/src/Lucene.Net.Tests/Codecs/Lucene3x/TestLucene3xPostingsFormat.cs
@@ -0,0 +1,109 @@
+using NUnit.Framework;
+
+namespace Lucene.Net.Codecs.Lucene3x
+{
+    using BasePostingsFormatTestCase = Lucene.Net.Index.BasePostingsFormatTestCase;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Tests Lucene3x postings format
+    /// </summary>
+    public class TestLucene3xPostingsFormat : BasePostingsFormatTestCase
+    {
+        private readonly Codec Codec_Renamed;
+
+        public TestLucene3xPostingsFormat() : base()
+        {
+            OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true; // explicitly instantiates ancient codec
+            Codec_Renamed = new PreFlexRWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE);
+        }
+
+        protected override Codec Codec
+        {
+            get
+            {
+                return Codec_Renamed;
+            }
+        }
+
+
+        #region BasePostingsFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestDocsOnly()
+        {
+            base.TestDocsOnly();
+        }
+
+        [Test]
+        public override void TestDocsAndFreqs()
+        {
+            base.TestDocsAndFreqs();
+        }
+
+        [Test]
+        public override void TestDocsAndFreqsAndPositions()
+        {
+            base.TestDocsAndFreqsAndPositions();
+        }
+
+        [Test]
+        public override void TestDocsAndFreqsAndPositionsAndPayloads()
+        {
+            base.TestDocsAndFreqsAndPositionsAndPayloads();
+        }
+
+        [Test]
+        public override void TestDocsAndFreqsAndPositionsAndOffsets()
+        {
+            base.TestDocsAndFreqsAndPositionsAndOffsets();
+        }
+
+        [Test]
+        public override void TestDocsAndFreqsAndPositionsAndOffsetsAndPayloads()
+        {
+            base.TestDocsAndFreqsAndPositionsAndOffsetsAndPayloads();
+        }
+
+        [Test]
+        public override void TestRandom()
+        {
+            base.TestRandom();
+        }
+
+        #endregion
+
+        #region BaseIndexFileFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestMergeStability()
+        {
+            base.TestMergeStability();
+        }
+
+        #endregion
+    }
+}
\ No newline at end of file


[22/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestOmitPositions.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestOmitPositions.cs b/src/Lucene.Net.Tests/Index/TestOmitPositions.cs
new file mode 100644
index 0000000..ff8ae7d
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestOmitPositions.cs
@@ -0,0 +1,294 @@
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Randomized.Generators;
+    using NUnit.Framework;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Analyzer = Lucene.Net.Analysis.Analyzer;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldType = FieldType;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using TextField = TextField;
+
+    ///
+    /// <summary>
+    /// @lucene.experimental
+    /// </summary>
+    [TestFixture]
+    public class TestOmitPositions : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestBasic()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            Document doc = new Document();
+            FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
+            ft.IndexOptions = IndexOptions.DOCS_AND_FREQS;
+            Field f = NewField("foo", "this is a test test", ft);
+            doc.Add(f);
+            for (int i = 0; i < 100; i++)
+            {
+                w.AddDocument(doc);
+            }
+
+            IndexReader reader = w.Reader;
+            w.Dispose();
+
+            Assert.IsNull(MultiFields.GetTermPositionsEnum(reader, null, "foo", new BytesRef("test")));
+
+            DocsEnum de = TestUtil.Docs(Random(), reader, "foo", new BytesRef("test"), null, null, DocsEnum.FLAG_FREQS);
+            while (de.NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
+            {
+                Assert.AreEqual(2, de.Freq);
+            }
+
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        // Tests whether the DocumentWriter correctly enable the
+        // omitTermFreqAndPositions bit in the FieldInfo
+        [Test]
+        public virtual void TestPositions()
+        {
+            Directory ram = NewDirectory();
+            Analyzer analyzer = new MockAnalyzer(Random());
+            IndexWriter writer = new IndexWriter(ram, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
+            Document d = new Document();
+
+            // f1,f2,f3: docs only
+            FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
+            ft.IndexOptions = IndexOptions.DOCS_ONLY;
+
+            Field f1 = NewField("f1", "this field has docs only", ft);
+            d.Add(f1);
+
+            Field f2 = NewField("f2", "this field has docs only", ft);
+            d.Add(f2);
+
+            Field f3 = NewField("f3", "this field has docs only", ft);
+            d.Add(f3);
+
+            FieldType ft2 = new FieldType(TextField.TYPE_NOT_STORED);
+            ft2.IndexOptions = IndexOptions.DOCS_AND_FREQS;
+
+            // f4,f5,f6 docs and freqs
+            Field f4 = NewField("f4", "this field has docs and freqs", ft2);
+            d.Add(f4);
+
+            Field f5 = NewField("f5", "this field has docs and freqs", ft2);
+            d.Add(f5);
+
+            Field f6 = NewField("f6", "this field has docs and freqs", ft2);
+            d.Add(f6);
+
+            FieldType ft3 = new FieldType(TextField.TYPE_NOT_STORED);
+            ft3.IndexOptions = IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
+
+            // f7,f8,f9 docs/freqs/positions
+            Field f7 = NewField("f7", "this field has docs and freqs and positions", ft3);
+            d.Add(f7);
+
+            Field f8 = NewField("f8", "this field has docs and freqs and positions", ft3);
+            d.Add(f8);
+
+            Field f9 = NewField("f9", "this field has docs and freqs and positions", ft3);
+            d.Add(f9);
+
+            writer.AddDocument(d);
+            writer.ForceMerge(1);
+
+            // now we add another document which has docs-only for f1, f4, f7, docs/freqs for f2, f5, f8,
+            // and docs/freqs/positions for f3, f6, f9
+            d = new Document();
+
+            // f1,f4,f7: docs only
+            f1 = NewField("f1", "this field has docs only", ft);
+            d.Add(f1);
+
+            f4 = NewField("f4", "this field has docs only", ft);
+            d.Add(f4);
+
+            f7 = NewField("f7", "this field has docs only", ft);
+            d.Add(f7);
+
+            // f2, f5, f8: docs and freqs
+            f2 = NewField("f2", "this field has docs and freqs", ft2);
+            d.Add(f2);
+
+            f5 = NewField("f5", "this field has docs and freqs", ft2);
+            d.Add(f5);
+
+            f8 = NewField("f8", "this field has docs and freqs", ft2);
+            d.Add(f8);
+
+            // f3, f6, f9: docs and freqs and positions
+            f3 = NewField("f3", "this field has docs and freqs and positions", ft3);
+            d.Add(f3);
+
+            f6 = NewField("f6", "this field has docs and freqs and positions", ft3);
+            d.Add(f6);
+
+            f9 = NewField("f9", "this field has docs and freqs and positions", ft3);
+            d.Add(f9);
+
+            writer.AddDocument(d);
+
+            // force merge
+            writer.ForceMerge(1);
+            // flush
+            writer.Dispose();
+
+            SegmentReader reader = GetOnlySegmentReader(DirectoryReader.Open(ram));
+            FieldInfos fi = reader.FieldInfos;
+            // docs + docs = docs
+            Assert.AreEqual(IndexOptions.DOCS_ONLY, fi.FieldInfo("f1").IndexOptions);
+            // docs + docs/freqs = docs
+            Assert.AreEqual(IndexOptions.DOCS_ONLY, fi.FieldInfo("f2").IndexOptions);
+            // docs + docs/freqs/pos = docs
+            Assert.AreEqual(IndexOptions.DOCS_ONLY, fi.FieldInfo("f3").IndexOptions);
+            // docs/freqs + docs = docs
+            Assert.AreEqual(IndexOptions.DOCS_ONLY, fi.FieldInfo("f4").IndexOptions);
+            // docs/freqs + docs/freqs = docs/freqs
+            Assert.AreEqual(IndexOptions.DOCS_AND_FREQS, fi.FieldInfo("f5").IndexOptions);
+            // docs/freqs + docs/freqs/pos = docs/freqs
+            Assert.AreEqual(IndexOptions.DOCS_AND_FREQS, fi.FieldInfo("f6").IndexOptions);
+            // docs/freqs/pos + docs = docs
+            Assert.AreEqual(IndexOptions.DOCS_ONLY, fi.FieldInfo("f7").IndexOptions);
+            // docs/freqs/pos + docs/freqs = docs/freqs
+            Assert.AreEqual(IndexOptions.DOCS_AND_FREQS, fi.FieldInfo("f8").IndexOptions);
+            // docs/freqs/pos + docs/freqs/pos = docs/freqs/pos
+            Assert.AreEqual(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, fi.FieldInfo("f9").IndexOptions);
+
+            reader.Dispose();
+            ram.Dispose();
+        }
+
+        private void AssertNoPrx(Directory dir)
+        {
+            string[] files = dir.ListAll();
+            for (int i = 0; i < files.Length; i++)
+            {
+                Assert.IsFalse(files[i].EndsWith(".prx"));
+                Assert.IsFalse(files[i].EndsWith(".pos"));
+            }
+        }
+
+        // Verifies no *.prx exists when all fields omit term positions:
+        [Test]
+        public virtual void TestNoPrxFile()
+        {
+            Directory ram = NewDirectory();
+            Analyzer analyzer = new MockAnalyzer(Random());
+            IndexWriter writer = new IndexWriter(ram, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetMaxBufferedDocs(3).SetMergePolicy(NewLogMergePolicy()));
+            LogMergePolicy lmp = (LogMergePolicy)writer.Config.MergePolicy;
+            lmp.MergeFactor = 2;
+            lmp.NoCFSRatio = 0.0;
+            Document d = new Document();
+
+            FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
+            ft.IndexOptions = IndexOptions.DOCS_AND_FREQS;
+            Field f1 = NewField("f1", "this field has term freqs", ft);
+            d.Add(f1);
+
+            for (int i = 0; i < 30; i++)
+            {
+                writer.AddDocument(d);
+            }
+
+            writer.Commit();
+
+            AssertNoPrx(ram);
+
+            // now add some documents with positions, and check there is no prox after optimization
+            d = new Document();
+            f1 = NewTextField("f1", "this field has positions", Field.Store.NO);
+            d.Add(f1);
+
+            for (int i = 0; i < 30; i++)
+            {
+                writer.AddDocument(d);
+            }
+
+            // force merge
+            writer.ForceMerge(1);
+            // flush
+            writer.Dispose();
+
+            AssertNoPrx(ram);
+            ram.Dispose();
+        }
+
+        /// <summary>
+        /// make sure we downgrade positions and payloads correctly </summary>
+        [Test]
+        public virtual void TestMixing()
+        {
+            // no positions
+            FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
+            ft.IndexOptions = IndexOptions.DOCS_AND_FREQS;
+
+            Directory dir = NewDirectory();
+            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+
+            for (int i = 0; i < 20; i++)
+            {
+                Document doc = new Document();
+                if (i < 19 && Random().NextBoolean())
+                {
+                    for (int j = 0; j < 50; j++)
+                    {
+                        doc.Add(new TextField("foo", "i have positions", Field.Store.NO));
+                    }
+                }
+                else
+                {
+                    for (int j = 0; j < 50; j++)
+                    {
+                        doc.Add(new Field("foo", "i have no positions", ft));
+                    }
+                }
+                iw.AddDocument(doc);
+                iw.Commit();
+            }
+
+            if (Random().NextBoolean())
+            {
+                iw.ForceMerge(1);
+            }
+
+            DirectoryReader ir = iw.Reader;
+            FieldInfos fis = MultiFields.GetMergedFieldInfos(ir);
+            Assert.AreEqual(IndexOptions.DOCS_AND_FREQS, fis.FieldInfo("foo").IndexOptions);
+            Assert.IsFalse(fis.FieldInfo("foo").HasPayloads);
+            iw.Dispose();
+            ir.Dispose();
+            dir.Dispose(); // checkindex
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestOmitTf.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestOmitTf.cs b/src/Lucene.Net.Tests/Index/TestOmitTf.cs
new file mode 100644
index 0000000..3286d4b
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestOmitTf.cs
@@ -0,0 +1,588 @@
+using System;
+using System.Text;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Analyzer = Lucene.Net.Analysis.Analyzer;
+    using BooleanQuery = Lucene.Net.Search.BooleanQuery;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using CollectionStatistics = Lucene.Net.Search.CollectionStatistics;
+    using ICollector = Lucene.Net.Search.ICollector;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Explanation = Lucene.Net.Search.Explanation;
+    using Field = Field;
+    using FieldType = FieldType;
+    using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using Occur = Lucene.Net.Search.Occur;
+    using PhraseQuery = Lucene.Net.Search.PhraseQuery;
+    using Scorer = Lucene.Net.Search.Scorer;
+    using TermQuery = Lucene.Net.Search.TermQuery;
+    using TermStatistics = Lucene.Net.Search.TermStatistics;
+    using TextField = TextField;
+    using TFIDFSimilarity = Lucene.Net.Search.Similarities.TFIDFSimilarity;
+
+    [TestFixture]
+    public class TestOmitTf : LuceneTestCase
+    {
+        public class SimpleSimilarity : TFIDFSimilarity
+        {
+            public override float DecodeNormValue(long norm)
+            {
+                return norm;
+            }
+
+            public override long EncodeNormValue(float f)
+            {
+                return (long)f;
+            }
+
+            public override float QueryNorm(float sumOfSquaredWeights)
+            {
+                return 1.0f;
+            }
+
+            public override float Coord(int overlap, int maxOverlap)
+            {
+                return 1.0f;
+            }
+
+            public override float LengthNorm(FieldInvertState state)
+            {
+                return state.Boost;
+            }
+
+            public override float Tf(float freq)
+            {
+                return freq;
+            }
+
+            public override float SloppyFreq(int distance)
+            {
+                return 2.0f;
+            }
+
+            public override float Idf(long docFreq, long numDocs)
+            {
+                return 1.0f;
+            }
+
+            public override Explanation IdfExplain(CollectionStatistics collectionStats, TermStatistics[] termStats)
+            {
+                return new Explanation(1.0f, "Inexplicable");
+            }
+
+            public override float ScorePayload(int doc, int start, int end, BytesRef payload)
+            {
+                return 1.0f;
+            }
+        }
+
+        private static readonly FieldType OmitType = new FieldType(TextField.TYPE_NOT_STORED);
+        private static readonly FieldType NormalType = new FieldType(TextField.TYPE_NOT_STORED);
+
+        static TestOmitTf()
+        {
+            OmitType.IndexOptions = IndexOptions.DOCS_ONLY;
+        }
+
+        // Tests whether the DocumentWriter correctly enable the
+        // omitTermFreqAndPositions bit in the FieldInfo
+        [Test]
+        public virtual void TestOmitTermFreqAndPositions()
+        {
+            Directory ram = NewDirectory();
+            Analyzer analyzer = new MockAnalyzer(Random());
+            IndexWriter writer = new IndexWriter(ram, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
+            Document d = new Document();
+
+            // this field will have Tf
+            Field f1 = NewField("f1", "this field has term freqs", NormalType);
+            d.Add(f1);
+
+            // this field will NOT have Tf
+            Field f2 = NewField("f2", "this field has NO Tf in all docs", OmitType);
+            d.Add(f2);
+
+            writer.AddDocument(d);
+            writer.ForceMerge(1);
+            // now we add another document which has term freq for field f2 and not for f1 and verify if the SegmentMerger
+            // keep things constant
+            d = new Document();
+
+            // Reverse
+            f1 = NewField("f1", "this field has term freqs", OmitType);
+            d.Add(f1);
+
+            f2 = NewField("f2", "this field has NO Tf in all docs", NormalType);
+            d.Add(f2);
+
+            writer.AddDocument(d);
+
+            // force merge
+            writer.ForceMerge(1);
+            // flush
+            writer.Dispose();
+
+            SegmentReader reader = GetOnlySegmentReader(DirectoryReader.Open(ram));
+            FieldInfos fi = reader.FieldInfos;
+            Assert.AreEqual(IndexOptions.DOCS_ONLY, fi.FieldInfo("f1").IndexOptions, "OmitTermFreqAndPositions field bit should be set.");
+            Assert.AreEqual(IndexOptions.DOCS_ONLY, fi.FieldInfo("f2").IndexOptions, "OmitTermFreqAndPositions field bit should be set.");
+
+            reader.Dispose();
+            ram.Dispose();
+        }
+
+        // Tests whether merging of docs that have different
+        // omitTermFreqAndPositions for the same field works
+        [Test]
+        public virtual void TestMixedMerge()
+        {
+            Directory ram = NewDirectory();
+            Analyzer analyzer = new MockAnalyzer(Random());
+            IndexWriter writer = new IndexWriter(ram, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetMaxBufferedDocs(3).SetMergePolicy(NewLogMergePolicy(2)));
+            Document d = new Document();
+
+            // this field will have Tf
+            Field f1 = NewField("f1", "this field has term freqs", NormalType);
+            d.Add(f1);
+
+            // this field will NOT have Tf
+            Field f2 = NewField("f2", "this field has NO Tf in all docs", OmitType);
+            d.Add(f2);
+
+            for (int i = 0; i < 30; i++)
+            {
+                writer.AddDocument(d);
+            }
+
+            // now we add another document which has term freq for field f2 and not for f1 and verify if the SegmentMerger
+            // keep things constant
+            d = new Document();
+
+            // Reverese
+            f1 = NewField("f1", "this field has term freqs", OmitType);
+            d.Add(f1);
+
+            f2 = NewField("f2", "this field has NO Tf in all docs", NormalType);
+            d.Add(f2);
+
+            for (int i = 0; i < 30; i++)
+            {
+                writer.AddDocument(d);
+            }
+
+            // force merge
+            writer.ForceMerge(1);
+            // flush
+            writer.Dispose();
+
+            SegmentReader reader = GetOnlySegmentReader(DirectoryReader.Open(ram));
+            FieldInfos fi = reader.FieldInfos;
+            Assert.AreEqual(IndexOptions.DOCS_ONLY, fi.FieldInfo("f1").IndexOptions, "OmitTermFreqAndPositions field bit should be set.");
+            Assert.AreEqual(IndexOptions.DOCS_ONLY, fi.FieldInfo("f2").IndexOptions, "OmitTermFreqAndPositions field bit should be set.");
+
+            reader.Dispose();
+            ram.Dispose();
+        }
+
+        // Make sure first adding docs that do not omitTermFreqAndPositions for
+        // field X, then adding docs that do omitTermFreqAndPositions for that same
+        // field,
+        [Test]
+        public virtual void TestMixedRAM()
+        {
+            Directory ram = NewDirectory();
+            Analyzer analyzer = new MockAnalyzer(Random());
+            IndexWriter writer = new IndexWriter(ram, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetMaxBufferedDocs(10).SetMergePolicy(NewLogMergePolicy(2)));
+            Document d = new Document();
+
+            // this field will have Tf
+            Field f1 = NewField("f1", "this field has term freqs", NormalType);
+            d.Add(f1);
+
+            // this field will NOT have Tf
+            Field f2 = NewField("f2", "this field has NO Tf in all docs", OmitType);
+            d.Add(f2);
+
+            for (int i = 0; i < 5; i++)
+            {
+                writer.AddDocument(d);
+            }
+
+            for (int i = 0; i < 20; i++)
+            {
+                writer.AddDocument(d);
+            }
+
+            // force merge
+            writer.ForceMerge(1);
+
+            // flush
+            writer.Dispose();
+
+            SegmentReader reader = GetOnlySegmentReader(DirectoryReader.Open(ram));
+            FieldInfos fi = reader.FieldInfos;
+            Assert.AreEqual(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, fi.FieldInfo("f1").IndexOptions, "OmitTermFreqAndPositions field bit should not be set.");
+            Assert.AreEqual(IndexOptions.DOCS_ONLY, fi.FieldInfo("f2").IndexOptions, "OmitTermFreqAndPositions field bit should be set.");
+
+            reader.Dispose();
+            ram.Dispose();
+        }
+
+        private void AssertNoPrx(Directory dir)
+        {
+            string[] files = dir.ListAll();
+            for (int i = 0; i < files.Length; i++)
+            {
+                Assert.IsFalse(files[i].EndsWith(".prx"));
+                Assert.IsFalse(files[i].EndsWith(".pos"));
+            }
+        }
+
+        // Verifies no *.prx exists when all fields omit term freq:
+        [Test]
+        public virtual void TestNoPrxFile()
+        {
+            Directory ram = NewDirectory();
+            Analyzer analyzer = new MockAnalyzer(Random());
+            IndexWriter writer = new IndexWriter(ram, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetMaxBufferedDocs(3).SetMergePolicy(NewLogMergePolicy()));
+            LogMergePolicy lmp = (LogMergePolicy)writer.Config.MergePolicy;
+            lmp.MergeFactor = 2;
+            lmp.NoCFSRatio = 0.0;
+            Document d = new Document();
+
+            Field f1 = NewField("f1", "this field has term freqs", OmitType);
+            d.Add(f1);
+
+            for (int i = 0; i < 30; i++)
+            {
+                writer.AddDocument(d);
+            }
+
+            writer.Commit();
+
+            AssertNoPrx(ram);
+
+            // now add some documents with positions, and check
+            // there is no prox after full merge
+            d = new Document();
+            f1 = NewTextField("f1", "this field has positions", Field.Store.NO);
+            d.Add(f1);
+
+            for (int i = 0; i < 30; i++)
+            {
+                writer.AddDocument(d);
+            }
+
+            // force merge
+            writer.ForceMerge(1);
+            // flush
+            writer.Dispose();
+
+            AssertNoPrx(ram);
+            ram.Dispose();
+        }
+
+        // Test scores with one field with Term Freqs and one without, otherwise with equal content
+        [Test]
+        public virtual void TestBasic()
+        {
+            Directory dir = NewDirectory();
+            Analyzer analyzer = new MockAnalyzer(Random());
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetMaxBufferedDocs(2).SetSimilarity(new SimpleSimilarity()).SetMergePolicy(NewLogMergePolicy(2)));
+
+            StringBuilder sb = new StringBuilder(265);
+            string term = "term";
+            for (int i = 0; i < 30; i++)
+            {
+                Document doc = new Document();
+                sb.Append(term).Append(" ");
+                string content = sb.ToString();
+                Field noTf = NewField("noTf", content + (i % 2 == 0 ? "" : " notf"), OmitType);
+                doc.Add(noTf);
+
+                Field tf = NewField("tf", content + (i % 2 == 0 ? " tf" : ""), NormalType);
+                doc.Add(tf);
+
+                writer.AddDocument(doc);
+                //System.out.println(d);
+            }
+
+            writer.ForceMerge(1);
+            // flush
+            writer.Dispose();
+
+            /*
+             * Verify the index
+             */
+            IndexReader reader = DirectoryReader.Open(dir);
+            IndexSearcher searcher = NewSearcher(reader);
+            searcher.Similarity = new SimpleSimilarity();
+
+            Term a = new Term("noTf", term);
+            Term b = new Term("tf", term);
+            Term c = new Term("noTf", "notf");
+            Term d = new Term("tf", "tf");
+            TermQuery q1 = new TermQuery(a);
+            TermQuery q2 = new TermQuery(b);
+            TermQuery q3 = new TermQuery(c);
+            TermQuery q4 = new TermQuery(d);
+
+            PhraseQuery pq = new PhraseQuery();
+            pq.Add(a);
+            pq.Add(c);
+            try
+            {
+                searcher.Search(pq, 10);
+                Assert.Fail("did not hit expected exception");
+            }
+            catch (Exception e)
+            {
+                Exception cause = e;
+                // If the searcher uses an executor service, the IAE is wrapped into other exceptions
+                while (cause.InnerException != null)
+                {
+                    cause = cause.InnerException;
+                }
+                if (!(cause is InvalidOperationException))
+                {
+                    throw new InvalidOperationException("Expected an IAE", e);
+                } // else OK because positions are not indexed
+            }
+
+            searcher.Search(q1, new CountingHitCollectorAnonymousInnerClassHelper(this));
+            //System.out.println(CountingHitCollector.getCount());
+
+            searcher.Search(q2, new CountingHitCollectorAnonymousInnerClassHelper2(this));
+            //System.out.println(CountingHitCollector.getCount());
+
+            searcher.Search(q3, new CountingHitCollectorAnonymousInnerClassHelper3(this));
+            //System.out.println(CountingHitCollector.getCount());
+
+            searcher.Search(q4, new CountingHitCollectorAnonymousInnerClassHelper4(this));
+            //System.out.println(CountingHitCollector.getCount());
+
+            BooleanQuery bq = new BooleanQuery();
+            bq.Add(q1, Occur.MUST);
+            bq.Add(q4, Occur.MUST);
+
+            searcher.Search(bq, new CountingHitCollectorAnonymousInnerClassHelper5(this));
+            Assert.AreEqual(15, CountingHitCollector.Count);
+
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        private class CountingHitCollectorAnonymousInnerClassHelper : CountingHitCollector
+        {
+            private readonly TestOmitTf OuterInstance;
+
+            public CountingHitCollectorAnonymousInnerClassHelper(TestOmitTf outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            private Scorer scorer;
+
+            public override sealed void SetScorer(Scorer scorer)
+            {
+                this.scorer = scorer;
+            }
+
+            public override sealed void Collect(int doc)
+            {
+                //System.out.println("Q1: Doc=" + doc + " score=" + score);
+                float score = scorer.GetScore();
+                Assert.IsTrue(score == 1.0f, "got score=" + score);
+                base.Collect(doc);
+            }
+        }
+
+        private class CountingHitCollectorAnonymousInnerClassHelper2 : CountingHitCollector
+        {
+            private readonly TestOmitTf OuterInstance;
+
+            public CountingHitCollectorAnonymousInnerClassHelper2(TestOmitTf outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            private Scorer scorer;
+
+            public override sealed void SetScorer(Scorer scorer)
+            {
+                this.scorer = scorer;
+            }
+
+            public override sealed void Collect(int doc)
+            {
+                //System.out.println("Q2: Doc=" + doc + " score=" + score);
+                float score = scorer.GetScore();
+                Assert.AreEqual(1.0f + doc, score, 0.00001f);
+                base.Collect(doc);
+            }
+        }
+
+        private class CountingHitCollectorAnonymousInnerClassHelper3 : CountingHitCollector
+        {
+            private readonly TestOmitTf OuterInstance;
+
+            public CountingHitCollectorAnonymousInnerClassHelper3(TestOmitTf outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            private Scorer scorer;
+
+            public override sealed void SetScorer(Scorer scorer)
+            {
+                this.scorer = scorer;
+            }
+
+            public override sealed void Collect(int doc)
+            {
+                //System.out.println("Q1: Doc=" + doc + " score=" + score);
+                float score = scorer.GetScore();
+                Assert.IsTrue(score == 1.0f);
+                Assert.IsFalse(doc % 2 == 0);
+                base.Collect(doc);
+            }
+        }
+
+        private class CountingHitCollectorAnonymousInnerClassHelper4 : CountingHitCollector
+        {
+            private readonly TestOmitTf OuterInstance;
+
+            public CountingHitCollectorAnonymousInnerClassHelper4(TestOmitTf outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            private Scorer scorer;
+
+            public override sealed void SetScorer(Scorer scorer)
+            {
+                this.scorer = scorer;
+            }
+
+            public override sealed void Collect(int doc)
+            {
+                float score = scorer.GetScore();
+                //System.out.println("Q1: Doc=" + doc + " score=" + score);
+                Assert.IsTrue(score == 1.0f);
+                Assert.IsTrue(doc % 2 == 0);
+                base.Collect(doc);
+            }
+        }
+
+        private class CountingHitCollectorAnonymousInnerClassHelper5 : CountingHitCollector
+        {
+            private readonly TestOmitTf OuterInstance;
+
+            public CountingHitCollectorAnonymousInnerClassHelper5(TestOmitTf outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            public override sealed void Collect(int doc)
+            {
+                //System.out.println("BQ: Doc=" + doc + " score=" + score);
+                base.Collect(doc);
+            }
+        }
+
+        public class CountingHitCollector : ICollector
+        {
+            internal static int Count_Renamed = 0;
+            internal static int Sum_Renamed = 0;
+            internal int DocBase = -1;
+
+            internal CountingHitCollector()
+            {
+                Count_Renamed = 0;
+                Sum_Renamed = 0;
+            }
+
+            public virtual void SetScorer(Scorer scorer)
+            {
+            }
+
+            public virtual void Collect(int doc)
+            {
+                Count_Renamed++;
+                Sum_Renamed += doc + DocBase; // use it to avoid any possibility of being merged away
+            }
+
+            public static int Count
+            {
+                get
+                {
+                    return Count_Renamed;
+                }
+            }
+
+            public static int Sum
+            {
+                get
+                {
+                    return Sum_Renamed;
+                }
+            }
+
+            public virtual void SetNextReader(AtomicReaderContext context)
+            {
+                DocBase = context.DocBase;
+            }
+
+            public virtual bool AcceptsDocsOutOfOrder
+            {
+                get { return true; }
+            }
+        }
+
+        /// <summary>
+        /// test that when freqs are omitted, that totalTermFreq and sumTotalTermFreq are -1 </summary>
+        [Test]
+        public virtual void TestStats()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document doc = new Document();
+            FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
+            ft.IndexOptions = IndexOptions.DOCS_ONLY;
+            ft.Freeze();
+            Field f = NewField("foo", "bar", ft);
+            doc.Add(f);
+            iw.AddDocument(doc);
+            IndexReader ir = iw.Reader;
+            iw.Dispose();
+            Assert.AreEqual(-1, ir.TotalTermFreq(new Term("foo", new BytesRef("bar"))));
+            Assert.AreEqual(-1, ir.GetSumTotalTermFreq("foo"));
+            ir.Dispose();
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestParallelAtomicReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestParallelAtomicReader.cs b/src/Lucene.Net.Tests/Index/TestParallelAtomicReader.cs
new file mode 100644
index 0000000..c6e896a
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestParallelAtomicReader.cs
@@ -0,0 +1,357 @@
+using System;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Randomized.Generators;
+    using Lucene.Net.Search;
+    using NUnit.Framework;
+    using AlreadyClosedException = Lucene.Net.Store.AlreadyClosedException;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using Occur = Lucene.Net.Search.Occur;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    [TestFixture]
+    public class TestParallelAtomicReader : LuceneTestCase
+    {
+        private IndexSearcher Parallel_Renamed, Single_Renamed;
+        private Directory Dir, Dir1, Dir2;
+
+        [Test]
+        public virtual void TestQueries()
+        {
+            Single_Renamed = Single(Random());
+            Parallel_Renamed = Parallel(Random());
+
+            QueryTest(new TermQuery(new Term("f1", "v1")));
+            QueryTest(new TermQuery(new Term("f1", "v2")));
+            QueryTest(new TermQuery(new Term("f2", "v1")));
+            QueryTest(new TermQuery(new Term("f2", "v2")));
+            QueryTest(new TermQuery(new Term("f3", "v1")));
+            QueryTest(new TermQuery(new Term("f3", "v2")));
+            QueryTest(new TermQuery(new Term("f4", "v1")));
+            QueryTest(new TermQuery(new Term("f4", "v2")));
+
+            BooleanQuery bq1 = new BooleanQuery();
+            bq1.Add(new TermQuery(new Term("f1", "v1")), Occur.MUST);
+            bq1.Add(new TermQuery(new Term("f4", "v1")), Occur.MUST);
+            QueryTest(bq1);
+
+            Single_Renamed.IndexReader.Dispose();
+            Single_Renamed = null;
+            Parallel_Renamed.IndexReader.Dispose();
+            Parallel_Renamed = null;
+            Dir.Dispose();
+            Dir = null;
+            Dir1.Dispose();
+            Dir1 = null;
+            Dir2.Dispose();
+            Dir2 = null;
+        }
+
+        [Test]
+        public virtual void TestFieldNames()
+        {
+            Directory dir1 = GetDir1(Random());
+            Directory dir2 = GetDir2(Random());
+            ParallelAtomicReader pr = new ParallelAtomicReader(SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir1)), SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir2)));
+            FieldInfos fieldInfos = pr.FieldInfos;
+            Assert.AreEqual(4, fieldInfos.Count);
+            Assert.IsNotNull(fieldInfos.FieldInfo("f1"));
+            Assert.IsNotNull(fieldInfos.FieldInfo("f2"));
+            Assert.IsNotNull(fieldInfos.FieldInfo("f3"));
+            Assert.IsNotNull(fieldInfos.FieldInfo("f4"));
+            pr.Dispose();
+            dir1.Dispose();
+            dir2.Dispose();
+        }
+
+        [Test]
+        public virtual void TestRefCounts1()
+        {
+            Directory dir1 = GetDir1(Random());
+            Directory dir2 = GetDir2(Random());
+            AtomicReader ir1, ir2;
+            // close subreaders, ParallelReader will not change refCounts, but close on its own close
+            ParallelAtomicReader pr = new ParallelAtomicReader(ir1 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir1)), ir2 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir2)));
+
+            // check RefCounts
+            Assert.AreEqual(1, ir1.RefCount);
+            Assert.AreEqual(1, ir2.RefCount);
+            pr.Dispose();
+            Assert.AreEqual(0, ir1.RefCount);
+            Assert.AreEqual(0, ir2.RefCount);
+            dir1.Dispose();
+            dir2.Dispose();
+        }
+
+        [Test]
+        public virtual void TestRefCounts2()
+        {
+            Directory dir1 = GetDir1(Random());
+            Directory dir2 = GetDir2(Random());
+            AtomicReader ir1 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir1));
+            AtomicReader ir2 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir2));
+            // don't close subreaders, so ParallelReader will increment refcounts
+            ParallelAtomicReader pr = new ParallelAtomicReader(false, ir1, ir2);
+            // check RefCounts
+            Assert.AreEqual(2, ir1.RefCount);
+            Assert.AreEqual(2, ir2.RefCount);
+            pr.Dispose();
+            Assert.AreEqual(1, ir1.RefCount);
+            Assert.AreEqual(1, ir2.RefCount);
+            ir1.Dispose();
+            ir2.Dispose();
+            Assert.AreEqual(0, ir1.RefCount);
+            Assert.AreEqual(0, ir2.RefCount);
+            dir1.Dispose();
+            dir2.Dispose();
+        }
+
+        [Test]
+        public virtual void TestCloseInnerReader()
+        {
+            Directory dir1 = GetDir1(Random());
+            AtomicReader ir1 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir1));
+
+            // with overlapping
+            ParallelAtomicReader pr = new ParallelAtomicReader(true, new AtomicReader[] { ir1 }, new AtomicReader[] { ir1 });
+
+            ir1.Dispose();
+
+            try
+            {
+                pr.Document(0);
+                Assert.Fail("ParallelAtomicReader should be already closed because inner reader was closed!");
+            }
+#pragma warning disable 168
+            catch (AlreadyClosedException e)
+#pragma warning restore 168
+            {
+                // pass
+            }
+
+            // noop:
+            pr.Dispose();
+            dir1.Dispose();
+        }
+
+        [Test]
+        public virtual void TestIncompatibleIndexes()
+        {
+            // two documents:
+            Directory dir1 = GetDir1(Random());
+
+            // one document only:
+            Directory dir2 = NewDirectory();
+            IndexWriter w2 = new IndexWriter(dir2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document d3 = new Document();
+
+            d3.Add(NewTextField("f3", "v1", Field.Store.YES));
+            w2.AddDocument(d3);
+            w2.Dispose();
+
+            AtomicReader ir1 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir1));
+            AtomicReader ir2 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir2));
+
+            try
+            {
+                new ParallelAtomicReader(ir1, ir2);
+                Assert.Fail("didn't get exptected exception: indexes don't have same number of documents");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException e)
+#pragma warning restore 168
+            {
+                // expected exception
+            }
+
+            try
+            {
+                new ParallelAtomicReader(Random().NextBoolean(), new AtomicReader[] { ir1, ir2 }, new AtomicReader[] { ir1, ir2 });
+                Assert.Fail("didn't get expected exception: indexes don't have same number of documents");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException e)
+#pragma warning restore 168
+            {
+                // expected exception
+            }
+            // check RefCounts
+            Assert.AreEqual(1, ir1.RefCount);
+            Assert.AreEqual(1, ir2.RefCount);
+            ir1.Dispose();
+            ir2.Dispose();
+            dir1.Dispose();
+            dir2.Dispose();
+        }
+
+        [Test]
+        public virtual void TestIgnoreStoredFields()
+        {
+            Directory dir1 = GetDir1(Random());
+            Directory dir2 = GetDir2(Random());
+            AtomicReader ir1 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir1));
+            AtomicReader ir2 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir2));
+
+            // with overlapping
+            ParallelAtomicReader pr = new ParallelAtomicReader(false, new AtomicReader[] { ir1, ir2 }, new AtomicReader[] { ir1 });
+            Assert.AreEqual("v1", pr.Document(0).Get("f1"));
+            Assert.AreEqual("v1", pr.Document(0).Get("f2"));
+            Assert.IsNull(pr.Document(0).Get("f3"));
+            Assert.IsNull(pr.Document(0).Get("f4"));
+            // check that fields are there
+            Assert.IsNotNull(pr.Terms("f1"));
+            Assert.IsNotNull(pr.Terms("f2"));
+            Assert.IsNotNull(pr.Terms("f3"));
+            Assert.IsNotNull(pr.Terms("f4"));
+            pr.Dispose();
+
+            // no stored fields at all
+            pr = new ParallelAtomicReader(false, new AtomicReader[] { ir2 }, new AtomicReader[0]);
+            Assert.IsNull(pr.Document(0).Get("f1"));
+            Assert.IsNull(pr.Document(0).Get("f2"));
+            Assert.IsNull(pr.Document(0).Get("f3"));
+            Assert.IsNull(pr.Document(0).Get("f4"));
+            // check that fields are there
+            Assert.IsNull(pr.Terms("f1"));
+            Assert.IsNull(pr.Terms("f2"));
+            Assert.IsNotNull(pr.Terms("f3"));
+            Assert.IsNotNull(pr.Terms("f4"));
+            pr.Dispose();
+
+            // without overlapping
+            pr = new ParallelAtomicReader(true, new AtomicReader[] { ir2 }, new AtomicReader[] { ir1 });
+            Assert.AreEqual("v1", pr.Document(0).Get("f1"));
+            Assert.AreEqual("v1", pr.Document(0).Get("f2"));
+            Assert.IsNull(pr.Document(0).Get("f3"));
+            Assert.IsNull(pr.Document(0).Get("f4"));
+            // check that fields are there
+            Assert.IsNull(pr.Terms("f1"));
+            Assert.IsNull(pr.Terms("f2"));
+            Assert.IsNotNull(pr.Terms("f3"));
+            Assert.IsNotNull(pr.Terms("f4"));
+            pr.Dispose();
+
+            // no main readers
+            try
+            {
+                new ParallelAtomicReader(true, new AtomicReader[0], new AtomicReader[] { ir1 });
+                Assert.Fail("didn't get expected exception: need a non-empty main-reader array");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // pass
+            }
+
+            dir1.Dispose();
+            dir2.Dispose();
+        }
+
+        private void QueryTest(Query query)
+        {
+            ScoreDoc[] parallelHits = Parallel_Renamed.Search(query, null, 1000).ScoreDocs;
+            ScoreDoc[] singleHits = Single_Renamed.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(parallelHits.Length, singleHits.Length);
+            for (int i = 0; i < parallelHits.Length; i++)
+            {
+                Assert.AreEqual(parallelHits[i].Score, singleHits[i].Score, 0.001f);
+                Document docParallel = Parallel_Renamed.Doc(parallelHits[i].Doc);
+                Document docSingle = Single_Renamed.Doc(singleHits[i].Doc);
+                Assert.AreEqual(docParallel.Get("f1"), docSingle.Get("f1"));
+                Assert.AreEqual(docParallel.Get("f2"), docSingle.Get("f2"));
+                Assert.AreEqual(docParallel.Get("f3"), docSingle.Get("f3"));
+                Assert.AreEqual(docParallel.Get("f4"), docSingle.Get("f4"));
+            }
+        }
+
+        // Fields 1-4 indexed together:
+        private IndexSearcher Single(Random random)
+        {
+            Dir = NewDirectory();
+            IndexWriter w = new IndexWriter(Dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
+            Document d1 = new Document();
+            d1.Add(NewTextField("f1", "v1", Field.Store.YES));
+            d1.Add(NewTextField("f2", "v1", Field.Store.YES));
+            d1.Add(NewTextField("f3", "v1", Field.Store.YES));
+            d1.Add(NewTextField("f4", "v1", Field.Store.YES));
+            w.AddDocument(d1);
+            Document d2 = new Document();
+            d2.Add(NewTextField("f1", "v2", Field.Store.YES));
+            d2.Add(NewTextField("f2", "v2", Field.Store.YES));
+            d2.Add(NewTextField("f3", "v2", Field.Store.YES));
+            d2.Add(NewTextField("f4", "v2", Field.Store.YES));
+            w.AddDocument(d2);
+            w.Dispose();
+
+            DirectoryReader ir = DirectoryReader.Open(Dir);
+            return NewSearcher(ir);
+        }
+
+        // Fields 1 & 2 in one index, 3 & 4 in other, with ParallelReader:
+        private IndexSearcher Parallel(Random random)
+        {
+            Dir1 = GetDir1(random);
+            Dir2 = GetDir2(random);
+            ParallelAtomicReader pr = new ParallelAtomicReader(SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(Dir1)), SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(Dir2)));
+            TestUtil.CheckReader(pr);
+            return NewSearcher(pr);
+        }
+
+        private Directory GetDir1(Random random)
+        {
+            Directory dir1 = NewDirectory();
+            IndexWriter w1 = new IndexWriter(dir1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
+            Document d1 = new Document();
+            d1.Add(NewTextField("f1", "v1", Field.Store.YES));
+            d1.Add(NewTextField("f2", "v1", Field.Store.YES));
+            w1.AddDocument(d1);
+            Document d2 = new Document();
+            d2.Add(NewTextField("f1", "v2", Field.Store.YES));
+            d2.Add(NewTextField("f2", "v2", Field.Store.YES));
+            w1.AddDocument(d2);
+            w1.Dispose();
+            return dir1;
+        }
+
+        private Directory GetDir2(Random random)
+        {
+            Directory dir2 = NewDirectory();
+            IndexWriter w2 = new IndexWriter(dir2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
+            Document d3 = new Document();
+            d3.Add(NewTextField("f3", "v1", Field.Store.YES));
+            d3.Add(NewTextField("f4", "v1", Field.Store.YES));
+            w2.AddDocument(d3);
+            Document d4 = new Document();
+            d4.Add(NewTextField("f3", "v2", Field.Store.YES));
+            d4.Add(NewTextField("f4", "v2", Field.Store.YES));
+            w2.AddDocument(d4);
+            w2.Dispose();
+            return dir2;
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestParallelCompositeReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestParallelCompositeReader.cs b/src/Lucene.Net.Tests/Index/TestParallelCompositeReader.cs
new file mode 100644
index 0000000..8b7da0e
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestParallelCompositeReader.cs
@@ -0,0 +1,666 @@
+using System;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Randomized.Generators;
+    using Lucene.Net.Search;
+    using NUnit.Framework;
+    using AlreadyClosedException = Lucene.Net.Store.AlreadyClosedException;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using Occur = Lucene.Net.Search.Occur;
+    using IReaderClosedListener = Lucene.Net.Index.IndexReader.IReaderClosedListener;
+
+    [TestFixture]
+    public class TestParallelCompositeReader : LuceneTestCase
+    {
+        private IndexSearcher Parallel_Renamed, Single_Renamed;
+        private Directory Dir, Dir1, Dir2;
+
+        [Test]
+        public virtual void TestQueries()
+        {
+            Single_Renamed = Single(Random(), false);
+            Parallel_Renamed = Parallel(Random(), false);
+
+            Queries();
+
+            Single_Renamed.IndexReader.Dispose();
+            Single_Renamed = null;
+            Parallel_Renamed.IndexReader.Dispose();
+            Parallel_Renamed = null;
+            Dir.Dispose();
+            Dir = null;
+            Dir1.Dispose();
+            Dir1 = null;
+            Dir2.Dispose();
+            Dir2 = null;
+        }
+
+        [Test]
+        public virtual void TestQueriesCompositeComposite()
+        {
+            Single_Renamed = Single(Random(), true);
+            Parallel_Renamed = Parallel(Random(), true);
+
+            Queries();
+
+            Single_Renamed.IndexReader.Dispose();
+            Single_Renamed = null;
+            Parallel_Renamed.IndexReader.Dispose();
+            Parallel_Renamed = null;
+            Dir.Dispose();
+            Dir = null;
+            Dir1.Dispose();
+            Dir1 = null;
+            Dir2.Dispose();
+            Dir2 = null;
+        }
+
+        private void Queries()
+        {
+            QueryTest(new TermQuery(new Term("f1", "v1")));
+            QueryTest(new TermQuery(new Term("f1", "v2")));
+            QueryTest(new TermQuery(new Term("f2", "v1")));
+            QueryTest(new TermQuery(new Term("f2", "v2")));
+            QueryTest(new TermQuery(new Term("f3", "v1")));
+            QueryTest(new TermQuery(new Term("f3", "v2")));
+            QueryTest(new TermQuery(new Term("f4", "v1")));
+            QueryTest(new TermQuery(new Term("f4", "v2")));
+
+            BooleanQuery bq1 = new BooleanQuery();
+            bq1.Add(new TermQuery(new Term("f1", "v1")), Occur.MUST);
+            bq1.Add(new TermQuery(new Term("f4", "v1")), Occur.MUST);
+            QueryTest(bq1);
+        }
+
+        [Test]
+        public virtual void TestRefCounts1()
+        {
+            Directory dir1 = GetDir1(Random());
+            Directory dir2 = GetDir2(Random());
+            DirectoryReader ir1, ir2;
+            // close subreaders, ParallelReader will not change refCounts, but close on its own close
+            ParallelCompositeReader pr = new ParallelCompositeReader(ir1 = DirectoryReader.Open(dir1), ir2 = DirectoryReader.Open(dir2));
+            IndexReader psub1 = pr.GetSequentialSubReaders()[0];
+            // check RefCounts
+            Assert.AreEqual(1, ir1.RefCount);
+            Assert.AreEqual(1, ir2.RefCount);
+            Assert.AreEqual(1, psub1.RefCount);
+            pr.Dispose();
+            Assert.AreEqual(0, ir1.RefCount);
+            Assert.AreEqual(0, ir2.RefCount);
+            Assert.AreEqual(0, psub1.RefCount);
+            dir1.Dispose();
+            dir2.Dispose();
+        }
+
+        [Test]
+        public virtual void TestRefCounts2()
+        {
+            Directory dir1 = GetDir1(Random());
+            Directory dir2 = GetDir2(Random());
+            DirectoryReader ir1 = DirectoryReader.Open(dir1);
+            DirectoryReader ir2 = DirectoryReader.Open(dir2);
+
+            // don't close subreaders, so ParallelReader will increment refcounts
+            ParallelCompositeReader pr = new ParallelCompositeReader(false, ir1, ir2);
+            IndexReader psub1 = pr.GetSequentialSubReaders()[0];
+            // check RefCounts
+            Assert.AreEqual(2, ir1.RefCount);
+            Assert.AreEqual(2, ir2.RefCount);
+            Assert.AreEqual(1, psub1.RefCount, "refCount must be 1, as the synthetic reader was created by ParallelCompositeReader");
+            pr.Dispose();
+            Assert.AreEqual(1, ir1.RefCount);
+            Assert.AreEqual(1, ir2.RefCount);
+            Assert.AreEqual(0, psub1.RefCount, "refcount must be 0 because parent was closed");
+            ir1.Dispose();
+            ir2.Dispose();
+            Assert.AreEqual(0, ir1.RefCount);
+            Assert.AreEqual(0, ir2.RefCount);
+            Assert.AreEqual(0, psub1.RefCount, "refcount should not change anymore");
+            dir1.Dispose();
+            dir2.Dispose();
+        }
+
+        // closeSubreaders=false
+        [Test]
+        public virtual void TestReaderClosedListener1()
+        {
+            Directory dir1 = GetDir1(Random());
+            CompositeReader ir1 = DirectoryReader.Open(dir1);
+
+            // with overlapping
+            ParallelCompositeReader pr = new ParallelCompositeReader(false, new CompositeReader[] { ir1 }, new CompositeReader[] { ir1 });
+
+            int[] listenerClosedCount = new int[1];
+
+            Assert.AreEqual(3, pr.Leaves.Count);
+
+            foreach (AtomicReaderContext cxt in pr.Leaves)
+            {
+                cxt.Reader.AddReaderClosedListener(new ReaderClosedListenerAnonymousInnerClassHelper(this, listenerClosedCount));
+            }
+            pr.Dispose();
+            ir1.Dispose();
+            Assert.AreEqual(3, listenerClosedCount[0]);
+            dir1.Dispose();
+        }
+
+        private class ReaderClosedListenerAnonymousInnerClassHelper : IReaderClosedListener
+        {
+            private readonly TestParallelCompositeReader OuterInstance;
+
+            private int[] ListenerClosedCount;
+
+            public ReaderClosedListenerAnonymousInnerClassHelper(TestParallelCompositeReader outerInstance, int[] listenerClosedCount)
+            {
+                this.OuterInstance = outerInstance;
+                this.ListenerClosedCount = listenerClosedCount;
+            }
+
+            public void OnClose(IndexReader reader)
+            {
+                ListenerClosedCount[0]++;
+            }
+        }
+
+        // closeSubreaders=true
+        [Test]
+        public virtual void TestReaderClosedListener2()
+        {
+            Directory dir1 = GetDir1(Random());
+            CompositeReader ir1 = DirectoryReader.Open(dir1);
+
+            // with overlapping
+            ParallelCompositeReader pr = new ParallelCompositeReader(true, new CompositeReader[] { ir1 }, new CompositeReader[] { ir1 });
+
+            int[] listenerClosedCount = new int[1];
+
+            Assert.AreEqual(3, pr.Leaves.Count);
+
+            foreach (AtomicReaderContext cxt in pr.Leaves)
+            {
+                cxt.Reader.AddReaderClosedListener(new ReaderClosedListenerAnonymousInnerClassHelper2(this, listenerClosedCount));
+            }
+            pr.Dispose();
+            Assert.AreEqual(3, listenerClosedCount[0]);
+            dir1.Dispose();
+        }
+
+        private class ReaderClosedListenerAnonymousInnerClassHelper2 : IReaderClosedListener
+        {
+            private readonly TestParallelCompositeReader OuterInstance;
+
+            private int[] ListenerClosedCount;
+
+            public ReaderClosedListenerAnonymousInnerClassHelper2(TestParallelCompositeReader outerInstance, int[] listenerClosedCount)
+            {
+                this.OuterInstance = outerInstance;
+                this.ListenerClosedCount = listenerClosedCount;
+            }
+
+            public void OnClose(IndexReader reader)
+            {
+                ListenerClosedCount[0]++;
+            }
+        }
+
+        [Test]
+        public virtual void TestCloseInnerReader()
+        {
+            Directory dir1 = GetDir1(Random());
+            CompositeReader ir1 = DirectoryReader.Open(dir1);
+            Assert.AreEqual(1, ir1.GetSequentialSubReaders()[0].RefCount);
+
+            // with overlapping
+            ParallelCompositeReader pr = new ParallelCompositeReader(true, new CompositeReader[] { ir1 }, new CompositeReader[] { ir1 });
+
+            IndexReader psub = pr.GetSequentialSubReaders()[0];
+            Assert.AreEqual(1, psub.RefCount);
+
+            ir1.Dispose();
+
+            Assert.AreEqual(1, psub.RefCount, "refCount of synthetic subreader should be unchanged");
+            try
+            {
+                psub.Document(0);
+                Assert.Fail("Subreader should be already closed because inner reader was closed!");
+            }
+#pragma warning disable 168
+            catch (AlreadyClosedException e)
+#pragma warning restore 168
+            {
+                // pass
+            }
+
+            try
+            {
+                pr.Document(0);
+                Assert.Fail("ParallelCompositeReader should be already closed because inner reader was closed!");
+            }
+#pragma warning disable 168
+            catch (AlreadyClosedException e)
+#pragma warning restore 168
+            {
+                // pass
+            }
+
+            // noop:
+            pr.Dispose();
+            Assert.AreEqual(0, psub.RefCount);
+            dir1.Dispose();
+        }
+
+        [Test]
+        public virtual void TestIncompatibleIndexes1()
+        {
+            // two documents:
+            Directory dir1 = GetDir1(Random());
+
+            // one document only:
+            Directory dir2 = NewDirectory();
+            IndexWriter w2 = new IndexWriter(dir2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document d3 = new Document();
+
+            d3.Add(NewTextField("f3", "v1", Field.Store.YES));
+            w2.AddDocument(d3);
+            w2.Dispose();
+
+            DirectoryReader ir1 = DirectoryReader.Open(dir1), ir2 = DirectoryReader.Open(dir2);
+            try
+            {
+                new ParallelCompositeReader(ir1, ir2);
+                Assert.Fail("didn't get expected exception: indexes don't have same number of documents");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException e)
+#pragma warning restore 168
+            {
+                // expected exception
+            }
+            try
+            {
+                new ParallelCompositeReader(Random().NextBoolean(), ir1, ir2);
+                Assert.Fail("didn't get expected exception: indexes don't have same number of documents");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException e)
+#pragma warning restore 168
+            {
+                // expected exception
+            }
+            Assert.AreEqual(1, ir1.RefCount);
+            Assert.AreEqual(1, ir2.RefCount);
+            ir1.Dispose();
+            ir2.Dispose();
+            Assert.AreEqual(0, ir1.RefCount);
+            Assert.AreEqual(0, ir2.RefCount);
+            dir1.Dispose();
+            dir2.Dispose();
+        }
+
+        [Test]
+        public virtual void TestIncompatibleIndexes2()
+        {
+            Directory dir1 = GetDir1(Random());
+            Directory dir2 = GetInvalidStructuredDir2(Random());
+
+            DirectoryReader ir1 = DirectoryReader.Open(dir1), ir2 = DirectoryReader.Open(dir2);
+            CompositeReader[] readers = new CompositeReader[] { ir1, ir2 };
+            try
+            {
+                new ParallelCompositeReader(readers);
+                Assert.Fail("didn't get expected exception: indexes don't have same subreader structure");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException e)
+#pragma warning restore 168
+            {
+                // expected exception
+            }
+            try
+            {
+                new ParallelCompositeReader(Random().NextBoolean(), readers, readers);
+                Assert.Fail("didn't get expected exception: indexes don't have same subreader structure");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException e)
+#pragma warning restore 168
+            {
+                // expected exception
+            }
+            Assert.AreEqual(1, ir1.RefCount);
+            Assert.AreEqual(1, ir2.RefCount);
+            ir1.Dispose();
+            ir2.Dispose();
+            Assert.AreEqual(0, ir1.RefCount);
+            Assert.AreEqual(0, ir2.RefCount);
+            dir1.Dispose();
+            dir2.Dispose();
+        }
+
+        [Test]
+        public virtual void TestIncompatibleIndexes3()
+        {
+            Directory dir1 = GetDir1(Random());
+            Directory dir2 = GetDir2(Random());
+
+            CompositeReader ir1 = new MultiReader(DirectoryReader.Open(dir1), SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir1))), ir2 = new MultiReader(DirectoryReader.Open(dir2), DirectoryReader.Open(dir2));
+            CompositeReader[] readers = new CompositeReader[] { ir1, ir2 };
+            try
+            {
+                new ParallelCompositeReader(readers);
+                Assert.Fail("didn't get expected exception: indexes don't have same subreader structure");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException e)
+#pragma warning restore 168
+            {
+                // expected exception
+            }
+            try
+            {
+                new ParallelCompositeReader(Random().NextBoolean(), readers, readers);
+                Assert.Fail("didn't get expected exception: indexes don't have same subreader structure");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException e)
+#pragma warning restore 168
+            {
+                // expected exception
+            }
+            Assert.AreEqual(1, ir1.RefCount);
+            Assert.AreEqual(1, ir2.RefCount);
+            ir1.Dispose();
+            ir2.Dispose();
+            Assert.AreEqual(0, ir1.RefCount);
+            Assert.AreEqual(0, ir2.RefCount);
+            dir1.Dispose();
+            dir2.Dispose();
+        }
+
+        [Test]
+        public virtual void TestIgnoreStoredFields()
+        {
+            Directory dir1 = GetDir1(Random());
+            Directory dir2 = GetDir2(Random());
+            CompositeReader ir1 = DirectoryReader.Open(dir1);
+            CompositeReader ir2 = DirectoryReader.Open(dir2);
+
+            // with overlapping
+            ParallelCompositeReader pr = new ParallelCompositeReader(false, new CompositeReader[] { ir1, ir2 }, new CompositeReader[] { ir1 });
+            Assert.AreEqual("v1", pr.Document(0).Get("f1"));
+            Assert.AreEqual("v1", pr.Document(0).Get("f2"));
+            Assert.IsNull(pr.Document(0).Get("f3"));
+            Assert.IsNull(pr.Document(0).Get("f4"));
+            // check that fields are there
+            AtomicReader slow = SlowCompositeReaderWrapper.Wrap(pr);
+            Assert.IsNotNull(slow.Terms("f1"));
+            Assert.IsNotNull(slow.Terms("f2"));
+            Assert.IsNotNull(slow.Terms("f3"));
+            Assert.IsNotNull(slow.Terms("f4"));
+            pr.Dispose();
+
+            // no stored fields at all
+            pr = new ParallelCompositeReader(false, new CompositeReader[] { ir2 }, new CompositeReader[0]);
+            Assert.IsNull(pr.Document(0).Get("f1"));
+            Assert.IsNull(pr.Document(0).Get("f2"));
+            Assert.IsNull(pr.Document(0).Get("f3"));
+            Assert.IsNull(pr.Document(0).Get("f4"));
+            // check that fields are there
+            slow = SlowCompositeReaderWrapper.Wrap(pr);
+            Assert.IsNull(slow.Terms("f1"));
+            Assert.IsNull(slow.Terms("f2"));
+            Assert.IsNotNull(slow.Terms("f3"));
+            Assert.IsNotNull(slow.Terms("f4"));
+            pr.Dispose();
+
+            // without overlapping
+            pr = new ParallelCompositeReader(true, new CompositeReader[] { ir2 }, new CompositeReader[] { ir1 });
+            Assert.AreEqual("v1", pr.Document(0).Get("f1"));
+            Assert.AreEqual("v1", pr.Document(0).Get("f2"));
+            Assert.IsNull(pr.Document(0).Get("f3"));
+            Assert.IsNull(pr.Document(0).Get("f4"));
+            // check that fields are there
+            slow = SlowCompositeReaderWrapper.Wrap(pr);
+            Assert.IsNull(slow.Terms("f1"));
+            Assert.IsNull(slow.Terms("f2"));
+            Assert.IsNotNull(slow.Terms("f3"));
+            Assert.IsNotNull(slow.Terms("f4"));
+            pr.Dispose();
+
+            // no main readers
+            try
+            {
+                new ParallelCompositeReader(true, new CompositeReader[0], new CompositeReader[] { ir1 });
+                Assert.Fail("didn't get expected exception: need a non-empty main-reader array");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // pass
+            }
+
+            dir1.Dispose();
+            dir2.Dispose();
+        }
+
+        [Test]
+        public virtual void TestToString()
+        {
+            Directory dir1 = GetDir1(Random());
+            CompositeReader ir1 = DirectoryReader.Open(dir1);
+            ParallelCompositeReader pr = new ParallelCompositeReader(new CompositeReader[] { ir1 });
+
+            string s = pr.ToString();
+            Assert.IsTrue(s.StartsWith("ParallelCompositeReader(ParallelAtomicReader("), "toString incorrect: " + s);
+
+            pr.Dispose();
+            dir1.Dispose();
+        }
+
+        [Test]
+        public virtual void TestToStringCompositeComposite()
+        {
+            Directory dir1 = GetDir1(Random());
+            CompositeReader ir1 = DirectoryReader.Open(dir1);
+            ParallelCompositeReader pr = new ParallelCompositeReader(new CompositeReader[] { new MultiReader(ir1) });
+
+            string s = pr.ToString();
+
+            Assert.IsTrue(s.StartsWith("ParallelCompositeReader(ParallelCompositeReaderAnonymousInnerClassHelper(ParallelAtomicReader("), "toString incorrect: " + s);
+
+            pr.Dispose();
+            dir1.Dispose();
+        }
+
+        private void QueryTest(Query query)
+        {
+            ScoreDoc[] parallelHits = Parallel_Renamed.Search(query, null, 1000).ScoreDocs;
+            ScoreDoc[] singleHits = Single_Renamed.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(parallelHits.Length, singleHits.Length);
+            for (int i = 0; i < parallelHits.Length; i++)
+            {
+                Assert.AreEqual(parallelHits[i].Score, singleHits[i].Score, 0.001f);
+                Document docParallel = Parallel_Renamed.Doc(parallelHits[i].Doc);
+                Document docSingle = Single_Renamed.Doc(singleHits[i].Doc);
+                Assert.AreEqual(docParallel.Get("f1"), docSingle.Get("f1"));
+                Assert.AreEqual(docParallel.Get("f2"), docSingle.Get("f2"));
+                Assert.AreEqual(docParallel.Get("f3"), docSingle.Get("f3"));
+                Assert.AreEqual(docParallel.Get("f4"), docSingle.Get("f4"));
+            }
+        }
+
+        // Fields 1-4 indexed together:
+        private IndexSearcher Single(Random random, bool compositeComposite)
+        {
+            Dir = NewDirectory();
+            IndexWriter w = new IndexWriter(Dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
+            Document d1 = new Document();
+            d1.Add(NewTextField("f1", "v1", Field.Store.YES));
+            d1.Add(NewTextField("f2", "v1", Field.Store.YES));
+            d1.Add(NewTextField("f3", "v1", Field.Store.YES));
+            d1.Add(NewTextField("f4", "v1", Field.Store.YES));
+            w.AddDocument(d1);
+            Document d2 = new Document();
+            d2.Add(NewTextField("f1", "v2", Field.Store.YES));
+            d2.Add(NewTextField("f2", "v2", Field.Store.YES));
+            d2.Add(NewTextField("f3", "v2", Field.Store.YES));
+            d2.Add(NewTextField("f4", "v2", Field.Store.YES));
+            w.AddDocument(d2);
+            Document d3 = new Document();
+            d3.Add(NewTextField("f1", "v3", Field.Store.YES));
+            d3.Add(NewTextField("f2", "v3", Field.Store.YES));
+            d3.Add(NewTextField("f3", "v3", Field.Store.YES));
+            d3.Add(NewTextField("f4", "v3", Field.Store.YES));
+            w.AddDocument(d3);
+            Document d4 = new Document();
+            d4.Add(NewTextField("f1", "v4", Field.Store.YES));
+            d4.Add(NewTextField("f2", "v4", Field.Store.YES));
+            d4.Add(NewTextField("f3", "v4", Field.Store.YES));
+            d4.Add(NewTextField("f4", "v4", Field.Store.YES));
+            w.AddDocument(d4);
+            w.Dispose();
+
+            CompositeReader ir;
+            if (compositeComposite)
+            {
+                ir = new MultiReader(DirectoryReader.Open(Dir), DirectoryReader.Open(Dir));
+            }
+            else
+            {
+                ir = DirectoryReader.Open(Dir);
+            }
+            return NewSearcher(ir);
+        }
+
+        // Fields 1 & 2 in one index, 3 & 4 in other, with ParallelReader:
+        private IndexSearcher Parallel(Random random, bool compositeComposite)
+        {
+            Dir1 = GetDir1(random);
+            Dir2 = GetDir2(random);
+            CompositeReader rd1, rd2;
+            if (compositeComposite)
+            {
+                rd1 = new MultiReader(DirectoryReader.Open(Dir1), DirectoryReader.Open(Dir1));
+                rd2 = new MultiReader(DirectoryReader.Open(Dir2), DirectoryReader.Open(Dir2));
+                Assert.AreEqual(2, rd1.Context.Children.Count);
+                Assert.AreEqual(2, rd2.Context.Children.Count);
+            }
+            else
+            {
+                rd1 = DirectoryReader.Open(Dir1);
+                rd2 = DirectoryReader.Open(Dir2);
+                Assert.AreEqual(3, rd1.Context.Children.Count);
+                Assert.AreEqual(3, rd2.Context.Children.Count);
+            }
+            ParallelCompositeReader pr = new ParallelCompositeReader(rd1, rd2);
+            return NewSearcher(pr);
+        }
+
+        // subreader structure: (1,2,1)
+        private Directory GetDir1(Random random)
+        {
+            Directory dir1 = NewDirectory();
+            IndexWriter w1 = new IndexWriter(dir1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).SetMergePolicy(NoMergePolicy.NO_COMPOUND_FILES));
+            Document d1 = new Document();
+            d1.Add(NewTextField("f1", "v1", Field.Store.YES));
+            d1.Add(NewTextField("f2", "v1", Field.Store.YES));
+            w1.AddDocument(d1);
+            w1.Commit();
+            Document d2 = new Document();
+            d2.Add(NewTextField("f1", "v2", Field.Store.YES));
+            d2.Add(NewTextField("f2", "v2", Field.Store.YES));
+            w1.AddDocument(d2);
+            Document d3 = new Document();
+            d3.Add(NewTextField("f1", "v3", Field.Store.YES));
+            d3.Add(NewTextField("f2", "v3", Field.Store.YES));
+            w1.AddDocument(d3);
+            w1.Commit();
+            Document d4 = new Document();
+            d4.Add(NewTextField("f1", "v4", Field.Store.YES));
+            d4.Add(NewTextField("f2", "v4", Field.Store.YES));
+            w1.AddDocument(d4);
+            w1.Dispose();
+            return dir1;
+        }
+
+        // subreader structure: (1,2,1)
+        private Directory GetDir2(Random random)
+        {
+            Directory dir2 = NewDirectory();
+            IndexWriter w2 = new IndexWriter(dir2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).SetMergePolicy(NoMergePolicy.NO_COMPOUND_FILES));
+            Document d1 = new Document();
+            d1.Add(NewTextField("f3", "v1", Field.Store.YES));
+            d1.Add(NewTextField("f4", "v1", Field.Store.YES));
+            w2.AddDocument(d1);
+            w2.Commit();
+            Document d2 = new Document();
+            d2.Add(NewTextField("f3", "v2", Field.Store.YES));
+            d2.Add(NewTextField("f4", "v2", Field.Store.YES));
+            w2.AddDocument(d2);
+            Document d3 = new Document();
+            d3.Add(NewTextField("f3", "v3", Field.Store.YES));
+            d3.Add(NewTextField("f4", "v3", Field.Store.YES));
+            w2.AddDocument(d3);
+            w2.Commit();
+            Document d4 = new Document();
+            d4.Add(NewTextField("f3", "v4", Field.Store.YES));
+            d4.Add(NewTextField("f4", "v4", Field.Store.YES));
+            w2.AddDocument(d4);
+            w2.Dispose();
+            return dir2;
+        }
+
+        // this dir has a different subreader structure (1,1,2);
+        private Directory GetInvalidStructuredDir2(Random random)
+        {
+            Directory dir2 = NewDirectory();
+            IndexWriter w2 = new IndexWriter(dir2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).SetMergePolicy(NoMergePolicy.NO_COMPOUND_FILES));
+            Document d1 = new Document();
+            d1.Add(NewTextField("f3", "v1", Field.Store.YES));
+            d1.Add(NewTextField("f4", "v1", Field.Store.YES));
+            w2.AddDocument(d1);
+            w2.Commit();
+            Document d2 = new Document();
+            d2.Add(NewTextField("f3", "v2", Field.Store.YES));
+            d2.Add(NewTextField("f4", "v2", Field.Store.YES));
+            w2.AddDocument(d2);
+            w2.Commit();
+            Document d3 = new Document();
+            d3.Add(NewTextField("f3", "v3", Field.Store.YES));
+            d3.Add(NewTextField("f4", "v3", Field.Store.YES));
+            w2.AddDocument(d3);
+            Document d4 = new Document();
+            d4.Add(NewTextField("f3", "v4", Field.Store.YES));
+            d4.Add(NewTextField("f4", "v4", Field.Store.YES));
+            w2.AddDocument(d4);
+            w2.Dispose();
+            return dir2;
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestParallelReaderEmptyIndex.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestParallelReaderEmptyIndex.cs b/src/Lucene.Net.Tests/Index/TestParallelReaderEmptyIndex.cs
new file mode 100644
index 0000000..f51128e
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestParallelReaderEmptyIndex.cs
@@ -0,0 +1,162 @@
+using System;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldType = FieldType;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using TextField = TextField;
+
+    /// <summary>
+    /// Some tests for <seealso cref="ParallelAtomicReader"/>s with empty indexes
+    /// </summary>
+    [TestFixture]
+    public class TestParallelReaderEmptyIndex : LuceneTestCase
+    {
+        /// <summary>
+        /// Creates two empty indexes and wraps a ParallelReader around. Adding this
+        /// reader to a new index should not throw any exception.
+        /// </summary>
+        [Test]
+        public virtual void TestEmptyIndex()
+        {
+            Directory rd1 = NewDirectory();
+            IndexWriter iw = new IndexWriter(rd1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            iw.Dispose();
+            // create a copy:
+            Directory rd2 = NewDirectory(rd1);
+
+            Directory rdOut = NewDirectory();
+
+            IndexWriter iwOut = new IndexWriter(rdOut, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+
+            ParallelAtomicReader apr = new ParallelAtomicReader(SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(rd1)), SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(rd2)));
+
+            // When unpatched, Lucene crashes here with a NoSuchElementException (caused by ParallelTermEnum)
+            iwOut.AddIndexes(apr);
+            iwOut.ForceMerge(1);
+
+            // 2nd try with a readerless parallel reader
+            iwOut.AddIndexes(new ParallelAtomicReader());
+            iwOut.ForceMerge(1);
+
+            ParallelCompositeReader cpr = new ParallelCompositeReader(DirectoryReader.Open(rd1), DirectoryReader.Open(rd2));
+
+            // When unpatched, Lucene crashes here with a NoSuchElementException (caused by ParallelTermEnum)
+            iwOut.AddIndexes(cpr);
+            iwOut.ForceMerge(1);
+
+            // 2nd try with a readerless parallel reader
+            iwOut.AddIndexes(new ParallelCompositeReader());
+            iwOut.ForceMerge(1);
+
+            iwOut.Dispose();
+            rdOut.Dispose();
+            rd1.Dispose();
+            rd2.Dispose();
+        }
+
+        /// <summary>
+        /// this method creates an empty index (numFields=0, numDocs=0) but is marked
+        /// to have TermVectors. Adding this index to another index should not throw
+        /// any exception.
+        /// </summary>
+        [Test]
+        public virtual void TestEmptyIndexWithVectors()
+        {
+            Directory rd1 = NewDirectory();
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("\nTEST: make 1st writer");
+                }
+                IndexWriter iw = new IndexWriter(rd1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+                Document doc = new Document();
+                Field idField = NewTextField("id", "", Field.Store.NO);
+                doc.Add(idField);
+                FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
+                customType.StoreTermVectors = true;
+                doc.Add(NewField("test", "", customType));
+                idField.SetStringValue("1");
+                iw.AddDocument(doc);
+                doc.Add(NewTextField("test", "", Field.Store.NO));
+                idField.SetStringValue("2");
+                iw.AddDocument(doc);
+                iw.Dispose();
+
+                IndexWriterConfig dontMergeConfig = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetMergePolicy(NoMergePolicy.COMPOUND_FILES);
+                if (VERBOSE)
+                {
+                    Console.WriteLine("\nTEST: make 2nd writer");
+                }
+                IndexWriter writer = new IndexWriter(rd1, dontMergeConfig);
+
+                writer.DeleteDocuments(new Term("id", "1"));
+                writer.Dispose();
+                IndexReader ir = DirectoryReader.Open(rd1);
+                Assert.AreEqual(2, ir.MaxDoc);
+                Assert.AreEqual(1, ir.NumDocs);
+                ir.Dispose();
+
+                iw = new IndexWriter(rd1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND));
+                iw.ForceMerge(1);
+                iw.Dispose();
+            }
+
+            Directory rd2 = NewDirectory();
+            {
+                IndexWriter iw = new IndexWriter(rd2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+                Document doc = new Document();
+                iw.AddDocument(doc);
+                iw.Dispose();
+            }
+
+            Directory rdOut = NewDirectory();
+
+            IndexWriter iwOut = new IndexWriter(rdOut, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            DirectoryReader reader1, reader2;
+            ParallelAtomicReader pr = new ParallelAtomicReader(SlowCompositeReaderWrapper.Wrap(reader1 = DirectoryReader.Open(rd1)), SlowCompositeReaderWrapper.Wrap(reader2 = DirectoryReader.Open(rd2)));
+
+            // When unpatched, Lucene crashes here with an ArrayIndexOutOfBoundsException (caused by TermVectorsWriter)
+            iwOut.AddIndexes(pr);
+
+            // ParallelReader closes any IndexReader you added to it:
+            pr.Dispose();
+
+            // assert subreaders were closed
+            Assert.AreEqual(0, reader1.RefCount);
+            Assert.AreEqual(0, reader2.RefCount);
+
+            rd1.Dispose();
+            rd2.Dispose();
+
+            iwOut.ForceMerge(1);
+            iwOut.Dispose();
+
+            rdOut.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestParallelTermEnum.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestParallelTermEnum.cs b/src/Lucene.Net.Tests/Index/TestParallelTermEnum.cs
new file mode 100644
index 0000000..9b6ac85
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestParallelTermEnum.cs
@@ -0,0 +1,127 @@
+using System.Collections.Generic;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+    using IBits = Lucene.Net.Util.IBits;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using Document = Documents.Document;
+    using Field = Field;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    [TestFixture]
+    public class TestParallelTermEnum : LuceneTestCase
+    {
+        private AtomicReader Ir1;
+        private AtomicReader Ir2;
+        private Directory Rd1;
+        private Directory Rd2;
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Document doc;
+            Rd1 = NewDirectory();
+            IndexWriter iw1 = new IndexWriter(Rd1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+
+            doc = new Document();
+            doc.Add(NewTextField("field1", "the quick brown fox jumps", Field.Store.YES));
+            doc.Add(NewTextField("field2", "the quick brown fox jumps", Field.Store.YES));
+            iw1.AddDocument(doc);
+
+            iw1.Dispose();
+            Rd2 = NewDirectory();
+            IndexWriter iw2 = new IndexWriter(Rd2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+
+            doc = new Document();
+            doc.Add(NewTextField("field1", "the fox jumps over the lazy dog", Field.Store.YES));
+            doc.Add(NewTextField("field3", "the fox jumps over the lazy dog", Field.Store.YES));
+            iw2.AddDocument(doc);
+
+            iw2.Dispose();
+
+            this.Ir1 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(Rd1));
+            this.Ir2 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(Rd2));
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            Ir1.Dispose();
+            Ir2.Dispose();
+            Rd1.Dispose();
+            Rd2.Dispose();
+            base.TearDown();
+        }
+
+        private void CheckTerms(Terms terms, IBits liveDocs, params string[] termsList)
+        {
+            Assert.IsNotNull(terms);
+            TermsEnum te = terms.GetIterator(null);
+
+            foreach (string t in termsList)
+            {
+                BytesRef b = te.Next();
+                Assert.IsNotNull(b);
+                Assert.AreEqual(t, b.Utf8ToString());
+                DocsEnum td = TestUtil.Docs(Random(), te, liveDocs, null, DocsEnum.FLAG_NONE);
+                Assert.IsTrue(td.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+                Assert.AreEqual(0, td.DocID);
+                Assert.AreEqual(td.NextDoc(), DocIdSetIterator.NO_MORE_DOCS);
+            }
+            Assert.IsNull(te.Next());
+        }
+
+        [Test]
+        public virtual void Test1()
+        {
+            ParallelAtomicReader pr = new ParallelAtomicReader(Ir1, Ir2);
+
+            IBits liveDocs = pr.LiveDocs;
+
+            Fields fields = pr.Fields;
+            IEnumerator<string> fe = fields.GetEnumerator();
+
+            fe.MoveNext();
+            string f = fe.Current;
+            Assert.AreEqual("field1", f);
+            CheckTerms(fields.GetTerms(f), liveDocs, "brown", "fox", "jumps", "quick", "the");
+
+            fe.MoveNext();
+            f = fe.Current;
+            Assert.AreEqual("field2", f);
+            CheckTerms(fields.GetTerms(f), liveDocs, "brown", "fox", "jumps", "quick", "the");
+
+            fe.MoveNext();
+            f = fe.Current;
+            Assert.AreEqual("field3", f);
+            CheckTerms(fields.GetTerms(f), liveDocs, "dog", "fox", "jumps", "lazy", "over", "the");
+
+            Assert.IsFalse(fe.MoveNext());
+        }
+    }
+}
\ No newline at end of file


[11/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/Spans/TestFieldMaskingSpanQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/Spans/TestFieldMaskingSpanQuery.cs b/src/Lucene.Net.Tests/Search/Spans/TestFieldMaskingSpanQuery.cs
new file mode 100644
index 0000000..7e78239
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/Spans/TestFieldMaskingSpanQuery.cs
@@ -0,0 +1,326 @@
+using System.Collections.Generic;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search.Spans
+{
+    using NUnit.Framework;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Term = Lucene.Net.Index.Term;
+    using TFIDFSimilarity = Lucene.Net.Search.Similarities.TFIDFSimilarity;
+
+    [TestFixture]
+    public class TestFieldMaskingSpanQuery : LuceneTestCase
+    {
+        protected internal static Document Doc(Field[] fields)
+        {
+            Document doc = new Document();
+            for (int i = 0; i < fields.Length; i++)
+            {
+                doc.Add(fields[i]);
+            }
+            return doc;
+        }
+
+        protected internal Field GetField(string name, string value)
+        {
+            return NewTextField(name, value, Field.Store.NO);
+        }
+
+        protected internal static IndexSearcher Searcher;
+        protected internal static Directory Directory;
+        protected internal static IndexReader Reader;
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Is non-static because NewIndexWriterConfig is no longer static.
+        /// </summary>
+        [OneTimeSetUp]
+        public void BeforeClass()
+        {
+            Directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), Directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()));
+
+            writer.AddDocument(Doc(new Field[] { GetField("id", "0"), GetField("gender", "male"), GetField("first", "james"), GetField("last", "jones") }));
+
+            writer.AddDocument(Doc(new Field[] { GetField("id", "1"), GetField("gender", "male"), GetField("first", "james"), GetField("last", "smith"), GetField("gender", "female"), GetField("first", "sally"), GetField("last", "jones") }));
+
+            writer.AddDocument(Doc(new Field[] { GetField("id", "2"), GetField("gender", "female"), GetField("first", "greta"), GetField("last", "jones"), GetField("gender", "female"), GetField("first", "sally"), GetField("last", "smith"), GetField("gender", "male"), GetField("first", "james"), GetField("last", "jones") }));
+
+            writer.AddDocument(Doc(new Field[] { GetField("id", "3"), GetField("gender", "female"), GetField("first", "lisa"), GetField("last", "jones"), GetField("gender", "male"), GetField("first", "bob"), GetField("last", "costas") }));
+
+            writer.AddDocument(Doc(new Field[] { GetField("id", "4"), GetField("gender", "female"), GetField("first", "sally"), GetField("last", "smith"), GetField("gender", "female"), GetField("first", "linda"), GetField("last", "dixit"), GetField("gender", "male"), GetField("first", "bubba"), GetField("last", "jones") }));
+            Reader = writer.Reader;
+            writer.Dispose();
+            Searcher = NewSearcher(Reader);
+        }
+
+        [OneTimeTearDown]
+        public static void AfterClass()
+        {
+            Searcher = null;
+            Reader.Dispose();
+            Reader = null;
+            Directory.Dispose();
+            Directory = null;
+        }
+
+        protected internal virtual void Check(SpanQuery q, int[] docs)
+        {
+            CheckHits.CheckHitCollector(Random(), q, null, Searcher, docs, Similarity);
+        }
+
+        [Test]
+        public virtual void TestRewrite0()
+        {
+            SpanQuery q = new FieldMaskingSpanQuery(new SpanTermQuery(new Term("last", "sally")), "first");
+            q.Boost = 8.7654321f;
+            SpanQuery qr = (SpanQuery)Searcher.Rewrite(q);
+
+            QueryUtils.CheckEqual(q, qr);
+
+            HashSet<Term> terms = new HashSet<Term>();
+            qr.ExtractTerms(terms);
+            Assert.AreEqual(1, terms.Count);
+        }
+
+        [Test]
+        public virtual void TestRewrite1()
+        {
+            // mask an anon SpanQuery class that rewrites to something else.
+            SpanQuery q = new FieldMaskingSpanQuery(new SpanTermQueryAnonymousInnerClassHelper(this, new Term("last", "sally")), "first");
+
+            SpanQuery qr = (SpanQuery)Searcher.Rewrite(q);
+
+            QueryUtils.CheckUnequal(q, qr);
+
+            HashSet<Term> terms = new HashSet<Term>();
+            qr.ExtractTerms(terms);
+            Assert.AreEqual(2, terms.Count);
+        }
+
+        private class SpanTermQueryAnonymousInnerClassHelper : SpanTermQuery
+        {
+            private readonly TestFieldMaskingSpanQuery OuterInstance;
+
+            public SpanTermQueryAnonymousInnerClassHelper(TestFieldMaskingSpanQuery outerInstance, Term term)
+                : base(term)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            public override Query Rewrite(IndexReader reader)
+            {
+                return new SpanOrQuery(new SpanTermQuery(new Term("first", "sally")), new SpanTermQuery(new Term("first", "james")));
+            }
+        }
+
+        [Test]
+        public virtual void TestRewrite2()
+        {
+            SpanQuery q1 = new SpanTermQuery(new Term("last", "smith"));
+            SpanQuery q2 = new SpanTermQuery(new Term("last", "jones"));
+            SpanQuery q = new SpanNearQuery(new SpanQuery[] { q1, new FieldMaskingSpanQuery(q2, "last") }, 1, true);
+            Query qr = Searcher.Rewrite(q);
+
+            QueryUtils.CheckEqual(q, qr);
+
+            HashSet<Term> set = new HashSet<Term>();
+            qr.ExtractTerms(set);
+            Assert.AreEqual(2, set.Count);
+        }
+
+        [Test]
+        public virtual void TestEquality1()
+        {
+            SpanQuery q1 = new FieldMaskingSpanQuery(new SpanTermQuery(new Term("last", "sally")), "first");
+            SpanQuery q2 = new FieldMaskingSpanQuery(new SpanTermQuery(new Term("last", "sally")), "first");
+            SpanQuery q3 = new FieldMaskingSpanQuery(new SpanTermQuery(new Term("last", "sally")), "XXXXX");
+            SpanQuery q4 = new FieldMaskingSpanQuery(new SpanTermQuery(new Term("last", "XXXXX")), "first");
+            SpanQuery q5 = new FieldMaskingSpanQuery(new SpanTermQuery(new Term("xXXX", "sally")), "first");
+            QueryUtils.CheckEqual(q1, q2);
+            QueryUtils.CheckUnequal(q1, q3);
+            QueryUtils.CheckUnequal(q1, q4);
+            QueryUtils.CheckUnequal(q1, q5);
+
+            SpanQuery qA = new FieldMaskingSpanQuery(new SpanTermQuery(new Term("last", "sally")), "first");
+            qA.Boost = 9f;
+            SpanQuery qB = new FieldMaskingSpanQuery(new SpanTermQuery(new Term("last", "sally")), "first");
+            QueryUtils.CheckUnequal(qA, qB);
+            qB.Boost = 9f;
+            QueryUtils.CheckEqual(qA, qB);
+        }
+
+        [Test]
+        public virtual void TestNoop0()
+        {
+            SpanQuery q1 = new SpanTermQuery(new Term("last", "sally"));
+            SpanQuery q = new FieldMaskingSpanQuery(q1, "first");
+            Check(q, new int[] { }); // :EMPTY:
+        }
+
+        [Test]
+        public virtual void TestNoop1()
+        {
+            SpanQuery q1 = new SpanTermQuery(new Term("last", "smith"));
+            SpanQuery q2 = new SpanTermQuery(new Term("last", "jones"));
+            SpanQuery q = new SpanNearQuery(new SpanQuery[] { q1, new FieldMaskingSpanQuery(q2, "last") }, 0, true);
+            Check(q, new int[] { 1, 2 });
+            q = new SpanNearQuery(new SpanQuery[] { new FieldMaskingSpanQuery(q1, "last"), new FieldMaskingSpanQuery(q2, "last") }, 0, true);
+            Check(q, new int[] { 1, 2 });
+        }
+
+        [Test]
+        public virtual void TestSimple1()
+        {
+            SpanQuery q1 = new SpanTermQuery(new Term("first", "james"));
+            SpanQuery q2 = new SpanTermQuery(new Term("last", "jones"));
+            SpanQuery q = new SpanNearQuery(new SpanQuery[] { q1, new FieldMaskingSpanQuery(q2, "first") }, -1, false);
+            Check(q, new int[] { 0, 2 });
+            q = new SpanNearQuery(new SpanQuery[] { new FieldMaskingSpanQuery(q2, "first"), q1 }, -1, false);
+            Check(q, new int[] { 0, 2 });
+            q = new SpanNearQuery(new SpanQuery[] { q2, new FieldMaskingSpanQuery(q1, "last") }, -1, false);
+            Check(q, new int[] { 0, 2 });
+            q = new SpanNearQuery(new SpanQuery[] { new FieldMaskingSpanQuery(q1, "last"), q2 }, -1, false);
+            Check(q, new int[] { 0, 2 });
+        }
+
+        [Test]
+        public virtual void TestSimple2()
+        {
+            AssumeTrue("Broken scoring: LUCENE-3723", Searcher.Similarity is TFIDFSimilarity);
+            SpanQuery q1 = new SpanTermQuery(new Term("gender", "female"));
+            SpanQuery q2 = new SpanTermQuery(new Term("last", "smith"));
+            SpanQuery q = new SpanNearQuery(new SpanQuery[] { q1, new FieldMaskingSpanQuery(q2, "gender") }, -1, false);
+            Check(q, new int[] { 2, 4 });
+            q = new SpanNearQuery(new SpanQuery[] { new FieldMaskingSpanQuery(q1, "id"), new FieldMaskingSpanQuery(q2, "id") }, -1, false);
+            Check(q, new int[] { 2, 4 });
+        }
+
+        [Test]
+        public virtual void TestSpans0()
+        {
+            SpanQuery q1 = new SpanTermQuery(new Term("gender", "female"));
+            SpanQuery q2 = new SpanTermQuery(new Term("first", "james"));
+            SpanQuery q = new SpanOrQuery(q1, new FieldMaskingSpanQuery(q2, "gender"));
+            Check(q, new int[] { 0, 1, 2, 3, 4 });
+
+            Spans span = MultiSpansWrapper.Wrap(Searcher.TopReaderContext, q);
+
+            Assert.AreEqual(true, span.Next());
+            Assert.AreEqual(s(0, 0, 1), s(span));
+
+            Assert.AreEqual(true, span.Next());
+            Assert.AreEqual(s(1, 0, 1), s(span));
+
+            Assert.AreEqual(true, span.Next());
+            Assert.AreEqual(s(1, 1, 2), s(span));
+
+            Assert.AreEqual(true, span.Next());
+            Assert.AreEqual(s(2, 0, 1), s(span));
+
+            Assert.AreEqual(true, span.Next());
+            Assert.AreEqual(s(2, 1, 2), s(span));
+
+            Assert.AreEqual(true, span.Next());
+            Assert.AreEqual(s(2, 2, 3), s(span));
+
+            Assert.AreEqual(true, span.Next());
+            Assert.AreEqual(s(3, 0, 1), s(span));
+
+            Assert.AreEqual(true, span.Next());
+            Assert.AreEqual(s(4, 0, 1), s(span));
+
+            Assert.AreEqual(true, span.Next());
+            Assert.AreEqual(s(4, 1, 2), s(span));
+
+            Assert.AreEqual(false, span.Next());
+        }
+
+        [Test]
+        public virtual void TestSpans1()
+        {
+            SpanQuery q1 = new SpanTermQuery(new Term("first", "sally"));
+            SpanQuery q2 = new SpanTermQuery(new Term("first", "james"));
+            SpanQuery qA = new SpanOrQuery(q1, q2);
+            SpanQuery qB = new FieldMaskingSpanQuery(qA, "id");
+
+            Check(qA, new int[] { 0, 1, 2, 4 });
+            Check(qB, new int[] { 0, 1, 2, 4 });
+
+            Spans spanA = MultiSpansWrapper.Wrap(Searcher.TopReaderContext, qA);
+            Spans spanB = MultiSpansWrapper.Wrap(Searcher.TopReaderContext, qB);
+
+            while (spanA.Next())
+            {
+                Assert.IsTrue(spanB.Next(), "spanB not still going");
+                Assert.AreEqual(s(spanA), s(spanB), "spanA not equal spanB");
+            }
+            Assert.IsTrue(!(spanB.Next()), "spanB still going even tough spanA is done");
+        }
+
+        [Test]
+        public virtual void TestSpans2()
+        {
+            AssumeTrue("Broken scoring: LUCENE-3723", Searcher.Similarity is TFIDFSimilarity);
+            SpanQuery qA1 = new SpanTermQuery(new Term("gender", "female"));
+            SpanQuery qA2 = new SpanTermQuery(new Term("first", "james"));
+            SpanQuery qA = new SpanOrQuery(qA1, new FieldMaskingSpanQuery(qA2, "gender"));
+            SpanQuery qB = new SpanTermQuery(new Term("last", "jones"));
+            SpanQuery q = new SpanNearQuery(new SpanQuery[] { new FieldMaskingSpanQuery(qA, "id"), new FieldMaskingSpanQuery(qB, "id") }, -1, false);
+            Check(q, new int[] { 0, 1, 2, 3 });
+
+            Spans span = MultiSpansWrapper.Wrap(Searcher.TopReaderContext, q);
+
+            Assert.AreEqual(true, span.Next());
+            Assert.AreEqual(s(0, 0, 1), s(span));
+
+            Assert.AreEqual(true, span.Next());
+            Assert.AreEqual(s(1, 1, 2), s(span));
+
+            Assert.AreEqual(true, span.Next());
+            Assert.AreEqual(s(2, 0, 1), s(span));
+
+            Assert.AreEqual(true, span.Next());
+            Assert.AreEqual(s(2, 2, 3), s(span));
+
+            Assert.AreEqual(true, span.Next());
+            Assert.AreEqual(s(3, 0, 1), s(span));
+
+            Assert.AreEqual(false, span.Next());
+        }
+
+        public virtual string s(Spans span)
+        {
+            return s(span.Doc, span.Start, span.End);
+        }
+
+        public virtual string s(int doc, int start, int end)
+        {
+            return "s(" + doc + "," + start + "," + end + ")";
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/Spans/TestNearSpansOrdered.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/Spans/TestNearSpansOrdered.cs b/src/Lucene.Net.Tests/Search/Spans/TestNearSpansOrdered.cs
new file mode 100644
index 0000000..0bbcafb
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/Spans/TestNearSpansOrdered.cs
@@ -0,0 +1,203 @@
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search.Spans
+{
+    using Lucene.Net.Index;
+    using NUnit.Framework;
+    using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using IndexReaderContext = Lucene.Net.Index.IndexReaderContext;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Term = Lucene.Net.Index.Term;
+
+    [TestFixture]
+    public class TestNearSpansOrdered : LuceneTestCase
+    {
+        protected internal IndexSearcher Searcher;
+        protected internal Directory Directory;
+        protected internal IndexReader Reader;
+
+        public const string FIELD = "field";
+
+        [TearDown]
+        public override void TearDown()
+        {
+            Reader.Dispose();
+            Directory.Dispose();
+            base.TearDown();
+        }
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), Directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()));
+            for (int i = 0; i < DocFields.Length; i++)
+            {
+                Document doc = new Document();
+                doc.Add(NewTextField(FIELD, DocFields[i], Field.Store.NO));
+                writer.AddDocument(doc);
+            }
+            Reader = writer.Reader;
+            writer.Dispose();
+            Searcher = NewSearcher(Reader);
+        }
+
+        protected internal string[] DocFields = new string[] { "w1 w2 w3 w4 w5", "w1 w3 w2 w3 zz", "w1 xx w2 yy w3", "w1 w3 xx w2 yy w3 zz" };
+
+        protected internal virtual SpanNearQuery MakeQuery(string s1, string s2, string s3, int slop, bool inOrder)
+        {
+            return new SpanNearQuery(new SpanQuery[] { new SpanTermQuery(new Term(FIELD, s1)), new SpanTermQuery(new Term(FIELD, s2)), new SpanTermQuery(new Term(FIELD, s3)) }, slop, inOrder);
+        }
+
+        protected internal virtual SpanNearQuery MakeQuery()
+        {
+            return MakeQuery("w1", "w2", "w3", 1, true);
+        }
+
+        [Test]
+        public virtual void TestSpanNearQuery()
+        {
+            SpanNearQuery q = MakeQuery();
+            CheckHits.DoCheckHits(Random(), q, FIELD, Searcher, new int[] { 0, 1 }, Similarity);
+        }
+
+        public virtual string s(Spans span)
+        {
+            return s(span.Doc, span.Start, span.End);
+        }
+
+        public virtual string s(int doc, int start, int end)
+        {
+            return "s(" + doc + "," + start + "," + end + ")";
+        }
+
+        [Test]
+        public virtual void TestNearSpansNext()
+        {
+            SpanNearQuery q = MakeQuery();
+            Spans span = MultiSpansWrapper.Wrap(Searcher.TopReaderContext, q);
+            Assert.AreEqual(true, span.Next());
+            Assert.AreEqual(s(0, 0, 3), s(span));
+            Assert.AreEqual(true, span.Next());
+            Assert.AreEqual(s(1, 0, 4), s(span));
+            Assert.AreEqual(false, span.Next());
+        }
+
+        /// <summary>
+        /// test does not imply that skipTo(doc+1) should work exactly the
+        /// same as next -- it's only applicable in this case since we know doc
+        /// does not contain more than one span
+        /// </summary>
+        [Test]
+        public virtual void TestNearSpansSkipToLikeNext()
+        {
+            SpanNearQuery q = MakeQuery();
+            Spans span = MultiSpansWrapper.Wrap(Searcher.TopReaderContext, q);
+            Assert.AreEqual(true, span.SkipTo(0));
+            Assert.AreEqual(s(0, 0, 3), s(span));
+            Assert.AreEqual(true, span.SkipTo(1));
+            Assert.AreEqual(s(1, 0, 4), s(span));
+            Assert.AreEqual(false, span.SkipTo(2));
+        }
+
+        [Test]
+        public virtual void TestNearSpansNextThenSkipTo()
+        {
+            SpanNearQuery q = MakeQuery();
+            Spans span = MultiSpansWrapper.Wrap(Searcher.TopReaderContext, q);
+            Assert.AreEqual(true, span.Next());
+            Assert.AreEqual(s(0, 0, 3), s(span));
+            Assert.AreEqual(true, span.SkipTo(1));
+            Assert.AreEqual(s(1, 0, 4), s(span));
+            Assert.AreEqual(false, span.Next());
+        }
+
+        [Test]
+        public virtual void TestNearSpansNextThenSkipPast()
+        {
+            SpanNearQuery q = MakeQuery();
+            Spans span = MultiSpansWrapper.Wrap(Searcher.TopReaderContext, q);
+            Assert.AreEqual(true, span.Next());
+            Assert.AreEqual(s(0, 0, 3), s(span));
+            Assert.AreEqual(false, span.SkipTo(2));
+        }
+
+        [Test]
+        public virtual void TestNearSpansSkipPast()
+        {
+            SpanNearQuery q = MakeQuery();
+            Spans span = MultiSpansWrapper.Wrap(Searcher.TopReaderContext, q);
+            Assert.AreEqual(false, span.SkipTo(2));
+        }
+
+        [Test]
+        public virtual void TestNearSpansSkipTo0()
+        {
+            SpanNearQuery q = MakeQuery();
+            Spans span = MultiSpansWrapper.Wrap(Searcher.TopReaderContext, q);
+            Assert.AreEqual(true, span.SkipTo(0));
+            Assert.AreEqual(s(0, 0, 3), s(span));
+        }
+
+        [Test]
+        public virtual void TestNearSpansSkipTo1()
+        {
+            SpanNearQuery q = MakeQuery();
+            Spans span = MultiSpansWrapper.Wrap(Searcher.TopReaderContext, q);
+            Assert.AreEqual(true, span.SkipTo(1));
+            Assert.AreEqual(s(1, 0, 4), s(span));
+        }
+
+        /// <summary>
+        /// not a direct test of NearSpans, but a demonstration of how/when
+        /// this causes problems
+        /// </summary>
+        [Test]
+        public virtual void TestSpanNearScorerSkipTo1()
+        {
+            SpanNearQuery q = MakeQuery();
+            Weight w = Searcher.CreateNormalizedWeight(q);
+            IndexReaderContext topReaderContext = Searcher.TopReaderContext;
+            AtomicReaderContext leave = topReaderContext.Leaves[0];
+            Scorer s = w.GetScorer(leave, ((AtomicReader)leave.Reader).LiveDocs);
+            Assert.AreEqual(1, s.Advance(1));
+        }
+
+        /// <summary>
+        /// not a direct test of NearSpans, but a demonstration of how/when
+        /// this causes problems
+        /// </summary>
+        [Test]
+        public virtual void TestSpanNearScorerExplain()
+        {
+            SpanNearQuery q = MakeQuery();
+            Explanation e = Searcher.Explain(q, 1);
+            Assert.IsTrue(0.0f < e.Value, "Scorer explanation value for doc#1 isn't positive: " + e.ToString());
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/Spans/TestPayloadSpans.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/Spans/TestPayloadSpans.cs b/src/Lucene.Net.Tests/Search/Spans/TestPayloadSpans.cs
new file mode 100644
index 0000000..a9393b4
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/Spans/TestPayloadSpans.cs
@@ -0,0 +1,589 @@
+using Lucene.Net.Analysis.TokenAttributes;
+using System;
+using System.Collections.Generic;
+using System.Text;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search.Spans
+{
+    using NUnit.Framework;
+    using System.IO;
+
+    /*
+        /// Copyright 2004 The Apache Software Foundation
+        ///
+        /// Licensed under the Apache License, Version 2.0 (the "License");
+        /// you may not use this file except in compliance with the License.
+        /// You may obtain a copy of the License at
+        ///
+        ///     http://www.apache.org/licenses/LICENSE-2.0
+        ///
+        /// Unless required by applicable law or agreed to in writing, software
+        /// distributed under the License is distributed on an "AS IS" BASIS,
+        /// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+        /// See the License for the specific language governing permissions and
+        /// limitations under the License.
+        */
+
+    using Analyzer = Lucene.Net.Analysis.Analyzer;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using DefaultSimilarity = Lucene.Net.Search.Similarities.DefaultSimilarity;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockTokenizer = Lucene.Net.Analysis.MockTokenizer;
+    using PayloadHelper = Lucene.Net.Search.Payloads.PayloadHelper;
+    using PayloadSpanUtil = Lucene.Net.Search.Payloads.PayloadSpanUtil;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Similarity = Lucene.Net.Search.Similarities.Similarity;
+    using Term = Lucene.Net.Index.Term;
+    using TextField = TextField;
+    using TokenFilter = Lucene.Net.Analysis.TokenFilter;
+    using Tokenizer = Lucene.Net.Analysis.Tokenizer;
+    using TokenStream = Lucene.Net.Analysis.TokenStream;
+
+    [TestFixture]
+    public class TestPayloadSpans : LuceneTestCase
+    {
+        private IndexSearcher Searcher_Renamed;
+        private Similarity similarity = new DefaultSimilarity();
+        protected internal IndexReader IndexReader;
+        private IndexReader CloseIndexReader;
+        private Directory Directory;
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            PayloadHelper helper = new PayloadHelper();
+            Searcher_Renamed = helper.SetUp(Random(), similarity, 1000);
+            IndexReader = Searcher_Renamed.IndexReader;
+        }
+
+        [Test]
+        public virtual void TestSpanTermQuery()
+        {
+            SpanTermQuery stq;
+            Spans spans;
+            stq = new SpanTermQuery(new Term(PayloadHelper.FIELD, "seventy"));
+            spans = MultiSpansWrapper.Wrap(IndexReader.Context, stq);
+            Assert.IsTrue(spans != null, "spans is null and it shouldn't be");
+            CheckSpans(spans, 100, 1, 1, 1);
+
+            stq = new SpanTermQuery(new Term(PayloadHelper.NO_PAYLOAD_FIELD, "seventy"));
+            spans = MultiSpansWrapper.Wrap(IndexReader.Context, stq);
+            Assert.IsTrue(spans != null, "spans is null and it shouldn't be");
+            CheckSpans(spans, 100, 0, 0, 0);
+        }
+
+        [Test]
+        public virtual void TestSpanFirst()
+        {
+            SpanQuery match;
+            SpanFirstQuery sfq;
+            match = new SpanTermQuery(new Term(PayloadHelper.FIELD, "one"));
+            sfq = new SpanFirstQuery(match, 2);
+            Spans spans = MultiSpansWrapper.Wrap(IndexReader.Context, sfq);
+            CheckSpans(spans, 109, 1, 1, 1);
+            //Test more complicated subclause
+            SpanQuery[] clauses = new SpanQuery[2];
+            clauses[0] = new SpanTermQuery(new Term(PayloadHelper.FIELD, "one"));
+            clauses[1] = new SpanTermQuery(new Term(PayloadHelper.FIELD, "hundred"));
+            match = new SpanNearQuery(clauses, 0, true);
+            sfq = new SpanFirstQuery(match, 2);
+            CheckSpans(MultiSpansWrapper.Wrap(IndexReader.Context, sfq), 100, 2, 1, 1);
+
+            match = new SpanNearQuery(clauses, 0, false);
+            sfq = new SpanFirstQuery(match, 2);
+            CheckSpans(MultiSpansWrapper.Wrap(IndexReader.Context, sfq), 100, 2, 1, 1);
+        }
+
+        [Test]
+        public virtual void TestSpanNot()
+        {
+            SpanQuery[] clauses = new SpanQuery[2];
+            clauses[0] = new SpanTermQuery(new Term(PayloadHelper.FIELD, "one"));
+            clauses[1] = new SpanTermQuery(new Term(PayloadHelper.FIELD, "three"));
+            SpanQuery spq = new SpanNearQuery(clauses, 5, true);
+            SpanNotQuery snq = new SpanNotQuery(spq, new SpanTermQuery(new Term(PayloadHelper.FIELD, "two")));
+
+            Directory directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new PayloadAnalyzer(this)).SetSimilarity(similarity));
+
+            Document doc = new Document();
+            doc.Add(NewTextField(PayloadHelper.FIELD, "one two three one four three", Field.Store.YES));
+            writer.AddDocument(doc);
+            IndexReader reader = writer.Reader;
+            writer.Dispose();
+
+            CheckSpans(MultiSpansWrapper.Wrap(reader.Context, snq), 1, new int[] { 2 });
+            reader.Dispose();
+            directory.Dispose();
+        }
+
+        [Test]
+        public virtual void TestNestedSpans()
+        {
+            SpanTermQuery stq;
+            Spans spans;
+            IndexSearcher searcher = Searcher;
+            stq = new SpanTermQuery(new Term(PayloadHelper.FIELD, "mark"));
+            spans = MultiSpansWrapper.Wrap(searcher.TopReaderContext, stq);
+            Assert.IsTrue(spans != null, "spans is null and it shouldn't be");
+            CheckSpans(spans, 0, null);
+
+            SpanQuery[] clauses = new SpanQuery[3];
+            clauses[0] = new SpanTermQuery(new Term(PayloadHelper.FIELD, "rr"));
+            clauses[1] = new SpanTermQuery(new Term(PayloadHelper.FIELD, "yy"));
+            clauses[2] = new SpanTermQuery(new Term(PayloadHelper.FIELD, "xx"));
+            SpanNearQuery spanNearQuery = new SpanNearQuery(clauses, 12, false);
+
+            spans = MultiSpansWrapper.Wrap(searcher.TopReaderContext, spanNearQuery);
+            Assert.IsTrue(spans != null, "spans is null and it shouldn't be");
+            CheckSpans(spans, 2, new int[] { 3, 3 });
+
+            clauses[0] = new SpanTermQuery(new Term(PayloadHelper.FIELD, "xx"));
+            clauses[1] = new SpanTermQuery(new Term(PayloadHelper.FIELD, "rr"));
+            clauses[2] = new SpanTermQuery(new Term(PayloadHelper.FIELD, "yy"));
+
+            spanNearQuery = new SpanNearQuery(clauses, 6, true);
+
+            spans = MultiSpansWrapper.Wrap(searcher.TopReaderContext, spanNearQuery);
+
+            Assert.IsTrue(spans != null, "spans is null and it shouldn't be");
+            CheckSpans(spans, 1, new int[] { 3 });
+
+            clauses = new SpanQuery[2];
+
+            clauses[0] = new SpanTermQuery(new Term(PayloadHelper.FIELD, "xx"));
+            clauses[1] = new SpanTermQuery(new Term(PayloadHelper.FIELD, "rr"));
+
+            spanNearQuery = new SpanNearQuery(clauses, 6, true);
+
+            // xx within 6 of rr
+
+            SpanQuery[] clauses2 = new SpanQuery[2];
+
+            clauses2[0] = new SpanTermQuery(new Term(PayloadHelper.FIELD, "yy"));
+            clauses2[1] = spanNearQuery;
+
+            SpanNearQuery nestedSpanNearQuery = new SpanNearQuery(clauses2, 6, false);
+
+            // yy within 6 of xx within 6 of rr
+
+            spans = MultiSpansWrapper.Wrap(searcher.TopReaderContext, nestedSpanNearQuery);
+            Assert.IsTrue(spans != null, "spans is null and it shouldn't be");
+            CheckSpans(spans, 2, new int[] { 3, 3 });
+            CloseIndexReader.Dispose();
+            Directory.Dispose();
+        }
+
+        [Test]
+        public virtual void TestFirstClauseWithoutPayload()
+        {
+            Spans spans;
+            IndexSearcher searcher = Searcher;
+
+            SpanQuery[] clauses = new SpanQuery[3];
+            clauses[0] = new SpanTermQuery(new Term(PayloadHelper.FIELD, "nopayload"));
+            clauses[1] = new SpanTermQuery(new Term(PayloadHelper.FIELD, "qq"));
+            clauses[2] = new SpanTermQuery(new Term(PayloadHelper.FIELD, "ss"));
+
+            SpanNearQuery spanNearQuery = new SpanNearQuery(clauses, 6, true);
+
+            SpanQuery[] clauses2 = new SpanQuery[2];
+
+            clauses2[0] = new SpanTermQuery(new Term(PayloadHelper.FIELD, "pp"));
+            clauses2[1] = spanNearQuery;
+
+            SpanNearQuery snq = new SpanNearQuery(clauses2, 6, false);
+
+            SpanQuery[] clauses3 = new SpanQuery[2];
+
+            clauses3[0] = new SpanTermQuery(new Term(PayloadHelper.FIELD, "np"));
+            clauses3[1] = snq;
+
+            SpanNearQuery nestedSpanNearQuery = new SpanNearQuery(clauses3, 6, false);
+            spans = MultiSpansWrapper.Wrap(searcher.TopReaderContext, nestedSpanNearQuery);
+
+            Assert.IsTrue(spans != null, "spans is null and it shouldn't be");
+            CheckSpans(spans, 1, new int[] { 3 });
+            CloseIndexReader.Dispose();
+            Directory.Dispose();
+        }
+
+        [Test]
+        public virtual void TestHeavilyNestedSpanQuery()
+        {
+            Spans spans;
+            IndexSearcher searcher = Searcher;
+
+            SpanQuery[] clauses = new SpanQuery[3];
+            clauses[0] = new SpanTermQuery(new Term(PayloadHelper.FIELD, "one"));
+            clauses[1] = new SpanTermQuery(new Term(PayloadHelper.FIELD, "two"));
+            clauses[2] = new SpanTermQuery(new Term(PayloadHelper.FIELD, "three"));
+
+            SpanNearQuery spanNearQuery = new SpanNearQuery(clauses, 5, true);
+
+            clauses = new SpanQuery[3];
+            clauses[0] = spanNearQuery;
+            clauses[1] = new SpanTermQuery(new Term(PayloadHelper.FIELD, "five"));
+            clauses[2] = new SpanTermQuery(new Term(PayloadHelper.FIELD, "six"));
+
+            SpanNearQuery spanNearQuery2 = new SpanNearQuery(clauses, 6, true);
+
+            SpanQuery[] clauses2 = new SpanQuery[2];
+            clauses2[0] = new SpanTermQuery(new Term(PayloadHelper.FIELD, "eleven"));
+            clauses2[1] = new SpanTermQuery(new Term(PayloadHelper.FIELD, "ten"));
+            SpanNearQuery spanNearQuery3 = new SpanNearQuery(clauses2, 2, false);
+
+            SpanQuery[] clauses3 = new SpanQuery[3];
+            clauses3[0] = new SpanTermQuery(new Term(PayloadHelper.FIELD, "nine"));
+            clauses3[1] = spanNearQuery2;
+            clauses3[2] = spanNearQuery3;
+
+            SpanNearQuery nestedSpanNearQuery = new SpanNearQuery(clauses3, 6, false);
+
+            spans = MultiSpansWrapper.Wrap(searcher.TopReaderContext, nestedSpanNearQuery);
+            Assert.IsTrue(spans != null, "spans is null and it shouldn't be");
+            CheckSpans(spans, 2, new int[] { 8, 8 });
+            CloseIndexReader.Dispose();
+            Directory.Dispose();
+        }
+
+        [Test]
+        public virtual void TestShrinkToAfterShortestMatch()
+        {
+            Directory directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new TestPayloadAnalyzer(this)));
+
+            Document doc = new Document();
+            doc.Add(new TextField("content", new StringReader("a b c d e f g h i j a k")));
+            writer.AddDocument(doc);
+
+            IndexReader reader = writer.Reader;
+            IndexSearcher @is = NewSearcher(reader);
+            writer.Dispose();
+
+            SpanTermQuery stq1 = new SpanTermQuery(new Term("content", "a"));
+            SpanTermQuery stq2 = new SpanTermQuery(new Term("content", "k"));
+            SpanQuery[] sqs = new SpanQuery[] { stq1, stq2 };
+            SpanNearQuery snq = new SpanNearQuery(sqs, 1, true);
+            Spans spans = MultiSpansWrapper.Wrap(@is.TopReaderContext, snq);
+
+            TopDocs topDocs = @is.Search(snq, 1);
+            HashSet<string> payloadSet = new HashSet<string>();
+            for (int i = 0; i < topDocs.ScoreDocs.Length; i++)
+            {
+                while (spans.Next())
+                {
+                    var payloads = spans.GetPayload();
+                    foreach (var payload in payloads)
+                    {
+                        payloadSet.Add(Encoding.UTF8.GetString(payload));
+                    }
+                }
+            }
+            Assert.AreEqual(2, payloadSet.Count);
+            Assert.IsTrue(payloadSet.Contains("a:Noise:10"));
+            Assert.IsTrue(payloadSet.Contains("k:Noise:11"));
+            reader.Dispose();
+            directory.Dispose();
+        }
+
+        [Test]
+        public virtual void TestShrinkToAfterShortestMatch2()
+        {
+            Directory directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new TestPayloadAnalyzer(this)));
+
+            Document doc = new Document();
+            doc.Add(new TextField("content", new StringReader("a b a d k f a h i k a k")));
+            writer.AddDocument(doc);
+            IndexReader reader = writer.Reader;
+            IndexSearcher @is = NewSearcher(reader);
+            writer.Dispose();
+
+            SpanTermQuery stq1 = new SpanTermQuery(new Term("content", "a"));
+            SpanTermQuery stq2 = new SpanTermQuery(new Term("content", "k"));
+            SpanQuery[] sqs = { stq1, stq2 };
+            SpanNearQuery snq = new SpanNearQuery(sqs, 0, true);
+            Spans spans = MultiSpansWrapper.Wrap(@is.TopReaderContext, snq);
+
+            TopDocs topDocs = @is.Search(snq, 1);
+            HashSet<string> payloadSet = new HashSet<string>();
+            for (int i = 0; i < topDocs.ScoreDocs.Length; i++)
+            {
+                while (spans.Next())
+                {
+                    var payloads = spans.GetPayload();
+                    foreach (var payload in payloads)
+                    {
+                        payloadSet.Add(Encoding.UTF8.GetString((byte[])(Array)payload));
+                    }
+                }
+            }
+            Assert.AreEqual(2, payloadSet.Count);
+            Assert.IsTrue(payloadSet.Contains("a:Noise:10"));
+            Assert.IsTrue(payloadSet.Contains("k:Noise:11"));
+            reader.Dispose();
+            directory.Dispose();
+        }
+
+        [Test]
+        public virtual void TestShrinkToAfterShortestMatch3()
+        {
+            Directory directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new TestPayloadAnalyzer(this)));
+
+            Document doc = new Document();
+            doc.Add(new TextField("content", new StringReader("j k a l f k k p a t a k l k t a")));
+            writer.AddDocument(doc);
+            IndexReader reader = writer.Reader;
+            IndexSearcher @is = NewSearcher(reader);
+            writer.Dispose();
+
+            SpanTermQuery stq1 = new SpanTermQuery(new Term("content", "a"));
+            SpanTermQuery stq2 = new SpanTermQuery(new Term("content", "k"));
+            SpanQuery[] sqs = new SpanQuery[] { stq1, stq2 };
+            SpanNearQuery snq = new SpanNearQuery(sqs, 0, true);
+            Spans spans = MultiSpansWrapper.Wrap(@is.TopReaderContext, snq);
+
+            TopDocs topDocs = @is.Search(snq, 1);
+            HashSet<string> payloadSet = new HashSet<string>();
+            for (int i = 0; i < topDocs.ScoreDocs.Length; i++)
+            {
+                while (spans.Next())
+                {
+                    var payloads = spans.GetPayload();
+                    foreach (var payload in payloads)
+                    {
+                        payloadSet.Add(Encoding.UTF8.GetString(payload));
+                    }
+                }
+            }
+            Assert.AreEqual(2, payloadSet.Count);
+            if (VERBOSE)
+            {
+                foreach (String payload in payloadSet)
+                {
+                    Console.WriteLine("match:" + payload);
+                }
+            }
+            Assert.IsTrue(payloadSet.Contains("a:Noise:10"));
+            Assert.IsTrue(payloadSet.Contains("k:Noise:11"));
+            reader.Dispose();
+            directory.Dispose();
+        }
+
+        [Test]
+        public virtual void TestPayloadSpanUtil()
+        {
+            Directory directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new PayloadAnalyzer(this)).SetSimilarity(similarity));
+
+            Document doc = new Document();
+            doc.Add(NewTextField(PayloadHelper.FIELD, "xx rr yy mm  pp", Field.Store.YES));
+            writer.AddDocument(doc);
+
+            IndexReader reader = writer.Reader;
+            writer.Dispose();
+            IndexSearcher searcher = NewSearcher(reader);
+
+            PayloadSpanUtil psu = new PayloadSpanUtil(searcher.TopReaderContext);
+
+            var payloads = psu.GetPayloadsForQuery(new TermQuery(new Term(PayloadHelper.FIELD, "rr")));
+            if (VERBOSE)
+            {
+                Console.WriteLine("Num payloads:" + payloads.Count);
+                foreach (var bytes in payloads)
+                {
+                    Console.WriteLine(Encoding.UTF8.GetString((byte[])(Array)bytes));
+                }
+            }
+            reader.Dispose();
+            directory.Dispose();
+        }
+
+        private void CheckSpans(Spans spans, int expectedNumSpans, int expectedNumPayloads, int expectedPayloadLength, int expectedFirstByte)
+        {
+            Assert.IsTrue(spans != null, "spans is null and it shouldn't be");
+            //each position match should have a span associated with it, since there is just one underlying term query, there should
+            //only be one entry in the span
+            int seen = 0;
+            while (spans.Next() == true)
+            {
+                //if we expect payloads, then isPayloadAvailable should be true
+                if (expectedNumPayloads > 0)
+                {
+                    Assert.IsTrue(spans.IsPayloadAvailable == true, "isPayloadAvailable is not returning the correct value: " + spans.IsPayloadAvailable + " and it should be: " + (expectedNumPayloads > 0));
+                }
+                else
+                {
+                    Assert.IsTrue(spans.IsPayloadAvailable == false, "isPayloadAvailable should be false");
+                }
+                //See payload helper, for the PayloadHelper.FIELD field, there is a single byte payload at every token
+                if (spans.IsPayloadAvailable)
+                {
+                    var payload = spans.GetPayload();
+                    Assert.IsTrue(payload.Count == expectedNumPayloads, "payload Size: " + payload.Count + " is not: " + expectedNumPayloads);
+                    foreach (var thePayload in payload)
+                    {
+                        Assert.IsTrue(thePayload.Length == expectedPayloadLength, "payload[0] Size: " + thePayload.Length + " is not: " + expectedPayloadLength);
+                        Assert.IsTrue(thePayload[0] == expectedFirstByte, thePayload[0] + " does not equal: " + expectedFirstByte);
+                    }
+                }
+                seen++;
+            }
+            Assert.IsTrue(seen == expectedNumSpans, seen + " does not equal: " + expectedNumSpans);
+        }
+
+        private IndexSearcher Searcher
+        {
+            get
+            {
+                Directory = NewDirectory();
+                string[] docs = new string[] { "xx rr yy mm  pp", "xx yy mm rr pp", "nopayload qq ss pp np", "one two three four five six seven eight nine ten eleven", "nine one two three four five six seven eight eleven ten" };
+                RandomIndexWriter writer = new RandomIndexWriter(Random(), Directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new PayloadAnalyzer(this)).SetSimilarity(similarity));
+
+                Document doc = null;
+                for (int i = 0; i < docs.Length; i++)
+                {
+                    doc = new Document();
+                    string docText = docs[i];
+                    doc.Add(NewTextField(PayloadHelper.FIELD, docText, Field.Store.YES));
+                    writer.AddDocument(doc);
+                }
+
+                CloseIndexReader = writer.Reader;
+                writer.Dispose();
+
+                IndexSearcher searcher = NewSearcher(CloseIndexReader);
+                return searcher;
+            }
+        }
+
+        private void CheckSpans(Spans spans, int numSpans, int[] numPayloads)
+        {
+            int cnt = 0;
+
+            while (spans.Next() == true)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("\nSpans Dump --");
+                }
+                if (spans.IsPayloadAvailable)
+                {
+                    var payload = spans.GetPayload();
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("payloads for span:" + payload.Count);
+                        foreach (var bytes in payload)
+                        {
+                            Console.WriteLine("doc:" + spans.Doc + " s:" + spans.Start + " e:" + spans.End + " " + Encoding.UTF8.GetString((byte[])(Array)bytes));
+                        }
+                    }
+
+                    Assert.AreEqual(numPayloads[cnt], payload.Count);
+                }
+                else
+                {
+                    Assert.IsFalse(numPayloads.Length > 0 && numPayloads[cnt] > 0, "Expected spans:" + numPayloads[cnt] + " found: 0");
+                }
+                cnt++;
+            }
+
+            Assert.AreEqual(numSpans, cnt);
+        }
+
+        internal sealed class PayloadAnalyzer : Analyzer
+        {
+            private readonly TestPayloadSpans OuterInstance;
+
+            public PayloadAnalyzer(TestPayloadSpans outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                Tokenizer result = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
+                return new TokenStreamComponents(result, new PayloadFilter(OuterInstance, result));
+            }
+        }
+
+        internal sealed class PayloadFilter : TokenFilter
+        {
+            private readonly TestPayloadSpans OuterInstance;
+
+            internal HashSet<string> Entities = new HashSet<string>();
+            internal HashSet<string> Nopayload = new HashSet<string>();
+            internal int Pos;
+            internal IPayloadAttribute PayloadAtt;
+            internal ICharTermAttribute TermAtt;
+            internal IPositionIncrementAttribute PosIncrAtt;
+
+            public PayloadFilter(TestPayloadSpans outerInstance, TokenStream input)
+                : base(input)
+            {
+                this.OuterInstance = outerInstance;
+                Pos = 0;
+                Entities.Add("xx");
+                Entities.Add("one");
+                Nopayload.Add("nopayload");
+                Nopayload.Add("np");
+                TermAtt = AddAttribute<ICharTermAttribute>();
+                PosIncrAtt = AddAttribute<IPositionIncrementAttribute>();
+                PayloadAtt = AddAttribute<IPayloadAttribute>();
+            }
+
+            public override bool IncrementToken()
+            {
+                if (m_input.IncrementToken())
+                {
+                    string token = TermAtt.ToString();
+
+                    if (!Nopayload.Contains(token))
+                    {
+                        if (Entities.Contains(token))
+                        {
+                            PayloadAtt.Payload = new BytesRef(token + ":Entity:" + Pos);
+                        }
+                        else
+                        {
+                            PayloadAtt.Payload = new BytesRef(token + ":Noise:" + Pos);
+                        }
+                    }
+                    Pos += PosIncrAtt.PositionIncrement;
+                    return true;
+                }
+                return false;
+            }
+
+            public override void Reset()
+            {
+                base.Reset();
+                this.Pos = 0;
+            }
+        }
+
+        public sealed class TestPayloadAnalyzer : Analyzer
+        {
+            private readonly TestPayloadSpans OuterInstance;
+
+            public TestPayloadAnalyzer(TestPayloadSpans outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                Tokenizer result = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
+                return new TokenStreamComponents(result, new PayloadFilter(OuterInstance, result));
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/Spans/TestSpanExplanations.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/Spans/TestSpanExplanations.cs b/src/Lucene.Net.Tests/Search/Spans/TestSpanExplanations.cs
new file mode 100644
index 0000000..a5b92ec
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/Spans/TestSpanExplanations.cs
@@ -0,0 +1,260 @@
+using NUnit.Framework;
+
+namespace Lucene.Net.Search.Spans
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    using Lucene.Net.Search;
+
+    /// <summary>
+    /// TestExplanations subclass focusing on span queries
+    /// </summary>
+    [TestFixture]
+    public class TestSpanExplanations : TestExplanations
+    {
+        /* simple SpanTermQueries */
+
+        [Test]
+        public virtual void TestST1()
+        {
+            SpanQuery q = St("w1");
+            Qtest(q, new int[] { 0, 1, 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestST2()
+        {
+            SpanQuery q = St("w1");
+            q.Boost = 1000;
+            Qtest(q, new int[] { 0, 1, 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestST4()
+        {
+            SpanQuery q = St("xx");
+            Qtest(q, new int[] { 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestST5()
+        {
+            SpanQuery q = St("xx");
+            q.Boost = 1000;
+            Qtest(q, new int[] { 2, 3 });
+        }
+
+        /* some SpanFirstQueries */
+
+        [Test]
+        public virtual void TestSF1()
+        {
+            SpanQuery q = Sf(("w1"), 1);
+            Qtest(q, new int[] { 0, 1, 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestSF2()
+        {
+            SpanQuery q = Sf(("w1"), 1);
+            q.Boost = 1000;
+            Qtest(q, new int[] { 0, 1, 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestSF4()
+        {
+            SpanQuery q = Sf(("xx"), 2);
+            Qtest(q, new int[] { 2 });
+        }
+
+        [Test]
+        public virtual void TestSF5()
+        {
+            SpanQuery q = Sf(("yy"), 2);
+            Qtest(q, new int[] { });
+        }
+
+        [Test]
+        public virtual void TestSF6()
+        {
+            SpanQuery q = Sf(("yy"), 4);
+            q.Boost = 1000;
+            Qtest(q, new int[] { 2 });
+        }
+
+        /* some SpanOrQueries */
+
+        [Test]
+        public virtual void TestSO1()
+        {
+            SpanQuery q = Sor("w1", "QQ");
+            Qtest(q, new int[] { 0, 1, 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestSO2()
+        {
+            SpanQuery q = Sor("w1", "w3", "zz");
+            Qtest(q, new int[] { 0, 1, 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestSO3()
+        {
+            SpanQuery q = Sor("w5", "QQ", "yy");
+            Qtest(q, new int[] { 0, 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestSO4()
+        {
+            SpanQuery q = Sor("w5", "QQ", "yy");
+            Qtest(q, new int[] { 0, 2, 3 });
+        }
+
+        /* some SpanNearQueries */
+
+        [Test]
+        public virtual void TestSNear1()
+        {
+            SpanQuery q = Snear("w1", "QQ", 100, true);
+            Qtest(q, new int[] { });
+        }
+
+        [Test]
+        public virtual void TestSNear2()
+        {
+            SpanQuery q = Snear("w1", "xx", 100, true);
+            Qtest(q, new int[] { 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestSNear3()
+        {
+            SpanQuery q = Snear("w1", "xx", 0, true);
+            Qtest(q, new int[] { 2 });
+        }
+
+        [Test]
+        public virtual void TestSNear4()
+        {
+            SpanQuery q = Snear("w1", "xx", 1, true);
+            Qtest(q, new int[] { 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestSNear5()
+        {
+            SpanQuery q = Snear("xx", "w1", 0, false);
+            Qtest(q, new int[] { 2 });
+        }
+
+        [Test]
+        public virtual void TestSNear6()
+        {
+            SpanQuery q = Snear("w1", "w2", "QQ", 100, true);
+            Qtest(q, new int[] { });
+        }
+
+        [Test]
+        public virtual void TestSNear7()
+        {
+            SpanQuery q = Snear("w1", "xx", "w2", 100, true);
+            Qtest(q, new int[] { 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestSNear8()
+        {
+            SpanQuery q = Snear("w1", "xx", "w2", 0, true);
+            Qtest(q, new int[] { 2 });
+        }
+
+        [Test]
+        public virtual void TestSNear9()
+        {
+            SpanQuery q = Snear("w1", "xx", "w2", 1, true);
+            Qtest(q, new int[] { 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestSNear10()
+        {
+            SpanQuery q = Snear("xx", "w1", "w2", 0, false);
+            Qtest(q, new int[] { 2 });
+        }
+
+        [Test]
+        public virtual void TestSNear11()
+        {
+            SpanQuery q = Snear("w1", "w2", "w3", 1, true);
+            Qtest(q, new int[] { 0, 1 });
+        }
+
+        /* some SpanNotQueries */
+
+        [Test]
+        public virtual void TestSNot1()
+        {
+            SpanQuery q = Snot(Sf("w1", 10), St("QQ"));
+            Qtest(q, new int[] { 0, 1, 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestSNot2()
+        {
+            SpanQuery q = Snot(Sf("w1", 10), St("QQ"));
+            q.Boost = 1000;
+            Qtest(q, new int[] { 0, 1, 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestSNot4()
+        {
+            SpanQuery q = Snot(Sf("w1", 10), St("xx"));
+            Qtest(q, new int[] { 0, 1, 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestSNot5()
+        {
+            SpanQuery q = Snot(Sf("w1", 10), St("xx"));
+            q.Boost = 1000;
+            Qtest(q, new int[] { 0, 1, 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestSNot7()
+        {
+            SpanQuery f = Snear("w1", "w3", 10, true);
+            f.Boost = 1000;
+            SpanQuery q = Snot(f, St("xx"));
+            Qtest(q, new int[] { 0, 1, 3 });
+        }
+
+        [Test]
+        public virtual void TestSNot10()
+        {
+            SpanQuery t = St("xx");
+            t.Boost = 10000;
+            SpanQuery q = Snot(Snear("w1", "w3", 10, true), t);
+            Qtest(q, new int[] { 0, 1, 3 });
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/Spans/TestSpanExplanationsOfNonMatches.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/Spans/TestSpanExplanationsOfNonMatches.cs b/src/Lucene.Net.Tests/Search/Spans/TestSpanExplanationsOfNonMatches.cs
new file mode 100644
index 0000000..307c51f
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/Spans/TestSpanExplanationsOfNonMatches.cs
@@ -0,0 +1,251 @@
+using NUnit.Framework;
+
+namespace Lucene.Net.Search.Spans
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// subclass of TestSimpleExplanations that verifies non matches.
+    /// </summary>
+    [TestFixture]
+    public class TestSpanExplanationsOfNonMatches : TestSpanExplanations
+    {
+        /// <summary>
+        /// Overrides superclass to ignore matches and focus on non-matches
+        /// </summary>
+        /// <seealso> cref= CheckHits#checkNoMatchExplanations </seealso>
+        public override void Qtest(Query q, int[] expDocNrs)
+        {
+            CheckHits.CheckNoMatchExplanations(q, FIELD, Searcher, expDocNrs);
+        }
+
+
+        #region TestSpanExplanations
+        // LUCENENET NOTE: Tests in a base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestST1()
+        {
+            base.TestST1();
+        }
+
+        [Test]
+        public override void TestST2()
+        {
+            base.TestST2();
+        }
+
+        [Test]
+        public override void TestST4()
+        {
+            base.TestST4();
+        }
+
+        [Test]
+        public override void TestST5()
+        {
+            base.TestST5();
+        }
+
+        /* some SpanFirstQueries */
+
+        [Test]
+        public override void TestSF1()
+        {
+            base.TestSF1();
+        }
+
+        [Test]
+        public override void TestSF2()
+        {
+            base.TestSF2();
+        }
+
+        [Test]
+        public override void TestSF4()
+        {
+            base.TestSF4();
+        }
+
+        [Test]
+        public override void TestSF5()
+        {
+            base.TestSF5();
+        }
+
+        [Test]
+        public override void TestSF6()
+        {
+            base.TestSF6();
+        }
+
+        /* some SpanOrQueries */
+
+        [Test]
+        public override void TestSO1()
+        {
+            base.TestSO1();
+        }
+
+        [Test]
+        public override void TestSO2()
+        {
+            base.TestSO2();
+        }
+
+        [Test]
+        public override void TestSO3()
+        {
+            base.TestSO3();
+        }
+
+        [Test]
+        public override void TestSO4()
+        {
+            base.TestSO4();
+        }
+
+        /* some SpanNearQueries */
+
+        [Test]
+        public override void TestSNear1()
+        {
+            base.TestSNear1();
+        }
+
+        [Test]
+        public override void TestSNear2()
+        {
+            base.TestSNear2();
+        }
+
+        [Test]
+        public override void TestSNear3()
+        {
+            base.TestSNear3();
+        }
+
+        [Test]
+        public override void TestSNear4()
+        {
+            base.TestSNear4();
+        }
+
+        [Test]
+        public override void TestSNear5()
+        {
+            base.TestSNear5();
+        }
+
+        [Test]
+        public override void TestSNear6()
+        {
+            base.TestSNear6();
+        }
+
+        [Test]
+        public override void TestSNear7()
+        {
+            base.TestSNear7();
+        }
+
+        [Test]
+        public override void TestSNear8()
+        {
+            base.TestSNear8();
+        }
+
+        [Test]
+        public override void TestSNear9()
+        {
+            base.TestSNear9();
+        }
+
+        [Test]
+        public override void TestSNear10()
+        {
+            base.TestSNear10();
+        }
+
+        [Test]
+        public override void TestSNear11()
+        {
+            base.TestSNear11();
+        }
+
+        /* some SpanNotQueries */
+
+        [Test]
+        public override void TestSNot1()
+        {
+            base.TestSNot1();
+        }
+
+        [Test]
+        public override void TestSNot2()
+        {
+            base.TestSNot2();
+        }
+
+        [Test]
+        public override void TestSNot4()
+        {
+            base.TestSNot4();
+        }
+
+        [Test]
+        public override void TestSNot5()
+        {
+            base.TestSNot5();
+        }
+
+        [Test]
+        public override void TestSNot7()
+        {
+            base.TestSNot7();
+        }
+
+        [Test]
+        public override void TestSNot10()
+        {
+            base.TestSNot10();
+        }
+
+        #endregion
+
+        #region TestExplanations
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+
+        /// <summary>
+        /// Placeholder: JUnit freaks if you don't have one test ... making
+        /// class abstract doesn't help
+        /// </summary>
+        [Test]
+        public override void TestNoop()
+        {
+            base.TestNoop();
+        }
+
+        #endregion
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/Spans/TestSpanFirstQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/Spans/TestSpanFirstQuery.cs b/src/Lucene.Net.Tests/Search/Spans/TestSpanFirstQuery.cs
new file mode 100644
index 0000000..2266bf7
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/Spans/TestSpanFirstQuery.cs
@@ -0,0 +1,74 @@
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search.Spans
+{
+    using NUnit.Framework;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Analyzer = Lucene.Net.Analysis.Analyzer;
+    using CharacterRunAutomaton = Lucene.Net.Util.Automaton.CharacterRunAutomaton;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockTokenizer = Lucene.Net.Analysis.MockTokenizer;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using RegExp = Lucene.Net.Util.Automaton.RegExp;
+    using Term = Lucene.Net.Index.Term;
+
+    [TestFixture]
+    public class TestSpanFirstQuery : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestStartPositions()
+        {
+            Directory dir = NewDirectory();
+
+            // mimic StopAnalyzer
+            CharacterRunAutomaton stopSet = new CharacterRunAutomaton((new RegExp("the|a|of")).ToAutomaton());
+            Analyzer analyzer = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true, stopSet);
+
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, analyzer, Similarity, TimeZone);
+            Document doc = new Document();
+            doc.Add(NewTextField("field", "the quick brown fox", Field.Store.NO));
+            writer.AddDocument(doc);
+            Document doc2 = new Document();
+            doc2.Add(NewTextField("field", "quick brown fox", Field.Store.NO));
+            writer.AddDocument(doc2);
+
+            IndexReader reader = writer.Reader;
+            IndexSearcher searcher = NewSearcher(reader);
+
+            // user queries on "starts-with quick"
+            SpanQuery sfq = new SpanFirstQuery(new SpanTermQuery(new Term("field", "quick")), 1);
+            Assert.AreEqual(1, searcher.Search(sfq, 10).TotalHits);
+
+            // user queries on "starts-with the quick"
+            SpanQuery include = new SpanFirstQuery(new SpanTermQuery(new Term("field", "quick")), 2);
+            sfq = new SpanNotQuery(include, sfq);
+            Assert.AreEqual(1, searcher.Search(sfq, 10).TotalHits);
+
+            writer.Dispose();
+            reader.Dispose();
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/Spans/TestSpanMultiTermQueryWrapper.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/Spans/TestSpanMultiTermQueryWrapper.cs b/src/Lucene.Net.Tests/Search/Spans/TestSpanMultiTermQueryWrapper.cs
new file mode 100644
index 0000000..4d27ecc
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/Spans/TestSpanMultiTermQueryWrapper.cs
@@ -0,0 +1,245 @@
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search.Spans
+{
+    using NUnit.Framework;
+    using Directory = Lucene.Net.Store.Directory;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Term = Lucene.Net.Index.Term;
+
+    /// <summary>
+    /// Tests for <seealso cref="SpanMultiTermQueryWrapper"/>, wrapping a few MultiTermQueries.
+    /// </summary>
+    [TestFixture]
+    public class TestSpanMultiTermQueryWrapper : LuceneTestCase
+    {
+        private Directory Directory;
+        private IndexReader Reader;
+        private IndexSearcher Searcher;
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Directory = NewDirectory();
+            RandomIndexWriter iw = new RandomIndexWriter(Random(), Directory, Similarity, TimeZone);
+            Document doc = new Document();
+            Field field = NewTextField("field", "", Field.Store.NO);
+            doc.Add(field);
+
+            field.SetStringValue("quick brown fox");
+            iw.AddDocument(doc);
+            field.SetStringValue("jumps over lazy broun dog");
+            iw.AddDocument(doc);
+            field.SetStringValue("jumps over extremely very lazy broxn dog");
+            iw.AddDocument(doc);
+            Reader = iw.Reader;
+            iw.Dispose();
+            Searcher = NewSearcher(Reader);
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            Reader.Dispose();
+            Directory.Dispose();
+            base.TearDown();
+        }
+
+        [Test]
+        public virtual void TestWildcard()
+        {
+            WildcardQuery wq = new WildcardQuery(new Term("field", "bro?n"));
+            SpanQuery swq = new SpanMultiTermQueryWrapper<MultiTermQuery>(wq);
+            // will only match quick brown fox
+            SpanFirstQuery sfq = new SpanFirstQuery(swq, 2);
+            Assert.AreEqual(1, Searcher.Search(sfq, 10).TotalHits);
+        }
+
+        [Test]
+        public virtual void TestPrefix()
+        {
+            WildcardQuery wq = new WildcardQuery(new Term("field", "extrem*"));
+            SpanQuery swq = new SpanMultiTermQueryWrapper<MultiTermQuery>(wq);
+            // will only match "jumps over extremely very lazy broxn dog"
+            SpanFirstQuery sfq = new SpanFirstQuery(swq, 3);
+            Assert.AreEqual(1, Searcher.Search(sfq, 10).TotalHits);
+        }
+
+        [Test]
+        public virtual void TestFuzzy()
+        {
+            FuzzyQuery fq = new FuzzyQuery(new Term("field", "broan"));
+            SpanQuery sfq = new SpanMultiTermQueryWrapper<MultiTermQuery>(fq);
+            // will not match quick brown fox
+            SpanPositionRangeQuery sprq = new SpanPositionRangeQuery(sfq, 3, 6);
+            Assert.AreEqual(2, Searcher.Search(sprq, 10).TotalHits);
+        }
+
+        [Test]
+        public virtual void TestFuzzy2()
+        {
+            // maximum of 1 term expansion
+            FuzzyQuery fq = new FuzzyQuery(new Term("field", "broan"), 1, 0, 1, false);
+            SpanQuery sfq = new SpanMultiTermQueryWrapper<MultiTermQuery>(fq);
+            // will only match jumps over lazy broun dog
+            SpanPositionRangeQuery sprq = new SpanPositionRangeQuery(sfq, 0, 100);
+            Assert.AreEqual(1, Searcher.Search(sprq, 10).TotalHits);
+        }
+
+        [Test]
+        public virtual void TestNoSuchMultiTermsInNear()
+        {
+            //test to make sure non existent multiterms aren't throwing null pointer exceptions
+            FuzzyQuery fuzzyNoSuch = new FuzzyQuery(new Term("field", "noSuch"), 1, 0, 1, false);
+            SpanQuery spanNoSuch = new SpanMultiTermQueryWrapper<MultiTermQuery>(fuzzyNoSuch);
+            SpanQuery term = new SpanTermQuery(new Term("field", "brown"));
+            SpanQuery near = new SpanNearQuery(new SpanQuery[] { term, spanNoSuch }, 1, true);
+            Assert.AreEqual(0, Searcher.Search(near, 10).TotalHits);
+            //flip order
+            near = new SpanNearQuery(new SpanQuery[] { spanNoSuch, term }, 1, true);
+            Assert.AreEqual(0, Searcher.Search(near, 10).TotalHits);
+
+            WildcardQuery wcNoSuch = new WildcardQuery(new Term("field", "noSuch*"));
+            SpanQuery spanWCNoSuch = new SpanMultiTermQueryWrapper<MultiTermQuery>(wcNoSuch);
+            near = new SpanNearQuery(new SpanQuery[] { term, spanWCNoSuch }, 1, true);
+            Assert.AreEqual(0, Searcher.Search(near, 10).TotalHits);
+
+            RegexpQuery rgxNoSuch = new RegexpQuery(new Term("field", "noSuch"));
+            SpanQuery spanRgxNoSuch = new SpanMultiTermQueryWrapper<MultiTermQuery>(rgxNoSuch);
+            near = new SpanNearQuery(new SpanQuery[] { term, spanRgxNoSuch }, 1, true);
+            Assert.AreEqual(0, Searcher.Search(near, 10).TotalHits);
+
+            PrefixQuery prfxNoSuch = new PrefixQuery(new Term("field", "noSuch"));
+            SpanQuery spanPrfxNoSuch = new SpanMultiTermQueryWrapper<MultiTermQuery>(prfxNoSuch);
+            near = new SpanNearQuery(new SpanQuery[] { term, spanPrfxNoSuch }, 1, true);
+            Assert.AreEqual(0, Searcher.Search(near, 10).TotalHits);
+
+            //test single noSuch
+            near = new SpanNearQuery(new SpanQuery[] { spanPrfxNoSuch }, 1, true);
+            Assert.AreEqual(0, Searcher.Search(near, 10).TotalHits);
+
+            //test double noSuch
+            near = new SpanNearQuery(new SpanQuery[] { spanPrfxNoSuch, spanPrfxNoSuch }, 1, true);
+            Assert.AreEqual(0, Searcher.Search(near, 10).TotalHits);
+        }
+
+        [Test]
+        public virtual void TestNoSuchMultiTermsInNotNear()
+        {
+            //test to make sure non existent multiterms aren't throwing non-matching field exceptions
+            FuzzyQuery fuzzyNoSuch = new FuzzyQuery(new Term("field", "noSuch"), 1, 0, 1, false);
+            SpanQuery spanNoSuch = new SpanMultiTermQueryWrapper<MultiTermQuery>(fuzzyNoSuch);
+            SpanQuery term = new SpanTermQuery(new Term("field", "brown"));
+            SpanNotQuery notNear = new SpanNotQuery(term, spanNoSuch, 0, 0);
+            Assert.AreEqual(1, Searcher.Search(notNear, 10).TotalHits);
+
+            //flip
+            notNear = new SpanNotQuery(spanNoSuch, term, 0, 0);
+            Assert.AreEqual(0, Searcher.Search(notNear, 10).TotalHits);
+
+            //both noSuch
+            notNear = new SpanNotQuery(spanNoSuch, spanNoSuch, 0, 0);
+            Assert.AreEqual(0, Searcher.Search(notNear, 10).TotalHits);
+
+            WildcardQuery wcNoSuch = new WildcardQuery(new Term("field", "noSuch*"));
+            SpanQuery spanWCNoSuch = new SpanMultiTermQueryWrapper<MultiTermQuery>(wcNoSuch);
+            notNear = new SpanNotQuery(term, spanWCNoSuch, 0, 0);
+            Assert.AreEqual(1, Searcher.Search(notNear, 10).TotalHits);
+
+            RegexpQuery rgxNoSuch = new RegexpQuery(new Term("field", "noSuch"));
+            SpanQuery spanRgxNoSuch = new SpanMultiTermQueryWrapper<MultiTermQuery>(rgxNoSuch);
+            notNear = new SpanNotQuery(term, spanRgxNoSuch, 1, 1);
+            Assert.AreEqual(1, Searcher.Search(notNear, 10).TotalHits);
+
+            PrefixQuery prfxNoSuch = new PrefixQuery(new Term("field", "noSuch"));
+            SpanQuery spanPrfxNoSuch = new SpanMultiTermQueryWrapper<MultiTermQuery>(prfxNoSuch);
+            notNear = new SpanNotQuery(term, spanPrfxNoSuch, 1, 1);
+            Assert.AreEqual(1, Searcher.Search(notNear, 10).TotalHits);
+        }
+
+        [Test]
+        public virtual void TestNoSuchMultiTermsInOr()
+        {
+            //test to make sure non existent multiterms aren't throwing null pointer exceptions
+            FuzzyQuery fuzzyNoSuch = new FuzzyQuery(new Term("field", "noSuch"), 1, 0, 1, false);
+            SpanQuery spanNoSuch = new SpanMultiTermQueryWrapper<MultiTermQuery>(fuzzyNoSuch);
+            SpanQuery term = new SpanTermQuery(new Term("field", "brown"));
+            SpanOrQuery near = new SpanOrQuery(new SpanQuery[] { term, spanNoSuch });
+            Assert.AreEqual(1, Searcher.Search(near, 10).TotalHits);
+
+            //flip
+            near = new SpanOrQuery(new SpanQuery[] { spanNoSuch, term });
+            Assert.AreEqual(1, Searcher.Search(near, 10).TotalHits);
+
+            WildcardQuery wcNoSuch = new WildcardQuery(new Term("field", "noSuch*"));
+            SpanQuery spanWCNoSuch = new SpanMultiTermQueryWrapper<MultiTermQuery>(wcNoSuch);
+            near = new SpanOrQuery(new SpanQuery[] { term, spanWCNoSuch });
+            Assert.AreEqual(1, Searcher.Search(near, 10).TotalHits);
+
+            RegexpQuery rgxNoSuch = new RegexpQuery(new Term("field", "noSuch"));
+            SpanQuery spanRgxNoSuch = new SpanMultiTermQueryWrapper<MultiTermQuery>(rgxNoSuch);
+            near = new SpanOrQuery(new SpanQuery[] { term, spanRgxNoSuch });
+            Assert.AreEqual(1, Searcher.Search(near, 10).TotalHits);
+
+            PrefixQuery prfxNoSuch = new PrefixQuery(new Term("field", "noSuch"));
+            SpanQuery spanPrfxNoSuch = new SpanMultiTermQueryWrapper<MultiTermQuery>(prfxNoSuch);
+            near = new SpanOrQuery(new SpanQuery[] { term, spanPrfxNoSuch });
+            Assert.AreEqual(1, Searcher.Search(near, 10).TotalHits);
+
+            near = new SpanOrQuery(new SpanQuery[] { spanPrfxNoSuch });
+            Assert.AreEqual(0, Searcher.Search(near, 10).TotalHits);
+
+            near = new SpanOrQuery(new SpanQuery[] { spanPrfxNoSuch, spanPrfxNoSuch });
+            Assert.AreEqual(0, Searcher.Search(near, 10).TotalHits);
+        }
+
+        [Test]
+        public virtual void TestNoSuchMultiTermsInSpanFirst()
+        {
+            //this hasn't been a problem
+            FuzzyQuery fuzzyNoSuch = new FuzzyQuery(new Term("field", "noSuch"), 1, 0, 1, false);
+            SpanQuery spanNoSuch = new SpanMultiTermQueryWrapper<MultiTermQuery>(fuzzyNoSuch);
+            SpanQuery spanFirst = new SpanFirstQuery(spanNoSuch, 10);
+
+            Assert.AreEqual(0, Searcher.Search(spanFirst, 10).TotalHits);
+
+            WildcardQuery wcNoSuch = new WildcardQuery(new Term("field", "noSuch*"));
+            SpanQuery spanWCNoSuch = new SpanMultiTermQueryWrapper<MultiTermQuery>(wcNoSuch);
+            spanFirst = new SpanFirstQuery(spanWCNoSuch, 10);
+            Assert.AreEqual(0, Searcher.Search(spanFirst, 10).TotalHits);
+
+            RegexpQuery rgxNoSuch = new RegexpQuery(new Term("field", "noSuch"));
+            SpanQuery spanRgxNoSuch = new SpanMultiTermQueryWrapper<MultiTermQuery>(rgxNoSuch);
+            spanFirst = new SpanFirstQuery(spanRgxNoSuch, 10);
+            Assert.AreEqual(0, Searcher.Search(spanFirst, 10).TotalHits);
+
+            PrefixQuery prfxNoSuch = new PrefixQuery(new Term("field", "noSuch"));
+            SpanQuery spanPrfxNoSuch = new SpanMultiTermQueryWrapper<MultiTermQuery>(prfxNoSuch);
+            spanFirst = new SpanFirstQuery(spanPrfxNoSuch, 10);
+            Assert.AreEqual(0, Searcher.Search(spanFirst, 10).TotalHits);
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/Spans/TestSpanSearchEquivalence.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/Spans/TestSpanSearchEquivalence.cs b/src/Lucene.Net.Tests/Search/Spans/TestSpanSearchEquivalence.cs
new file mode 100644
index 0000000..569a03e
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/Spans/TestSpanSearchEquivalence.cs
@@ -0,0 +1,134 @@
+using NUnit.Framework;
+
+namespace Lucene.Net.Search.Spans
+{
+    using Occur = Lucene.Net.Search.Occur;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Term = Lucene.Net.Index.Term;
+
+    /// <summary>
+    /// Basic equivalence tests for span queries
+    /// </summary>
+    [TestFixture]
+    public class TestSpanSearchEquivalence : SearchEquivalenceTestBase
+    {
+        // TODO: we could go a little crazy for a lot of these,
+        // but these are just simple minimal cases in case something
+        // goes horribly wrong. Put more intense tests elsewhere.
+
+        /// <summary>
+        /// SpanTermQuery(A) = TermQuery(A) </summary>
+        [Test]
+        public virtual void TestSpanTermVersusTerm()
+        {
+            Term t1 = RandomTerm();
+            AssertSameSet(new TermQuery(t1), new SpanTermQuery(t1));
+        }
+
+        /// <summary>
+        /// SpanOrQuery(A, B) = (A B) </summary>
+        [Test]
+        public virtual void TestSpanOrVersusBoolean()
+        {
+            Term t1 = RandomTerm();
+            Term t2 = RandomTerm();
+            BooleanQuery q1 = new BooleanQuery();
+            q1.Add(new TermQuery(t1), Occur.SHOULD);
+            q1.Add(new TermQuery(t2), Occur.SHOULD);
+            SpanOrQuery q2 = new SpanOrQuery(new SpanTermQuery(t1), new SpanTermQuery(t2));
+            AssertSameSet(q1, q2);
+        }
+
+        /// <summary>
+        /// SpanNotQuery(A, B) \u2286 SpanTermQuery(A) </summary>
+        [Test]
+        public virtual void TestSpanNotVersusSpanTerm()
+        {
+            Term t1 = RandomTerm();
+            Term t2 = RandomTerm();
+            AssertSubsetOf(new SpanNotQuery(new SpanTermQuery(t1), new SpanTermQuery(t2)), new SpanTermQuery(t1));
+        }
+
+        /// <summary>
+        /// SpanFirstQuery(A, 10) \u2286 SpanTermQuery(A) </summary>
+        [Test]
+        public virtual void TestSpanFirstVersusSpanTerm()
+        {
+            Term t1 = RandomTerm();
+            AssertSubsetOf(new SpanFirstQuery(new SpanTermQuery(t1), 10), new SpanTermQuery(t1));
+        }
+
+        /// <summary>
+        /// SpanNearQuery([A, B], 0, true) = "A B" </summary>
+        [Test]
+        public virtual void TestSpanNearVersusPhrase()
+        {
+            Term t1 = RandomTerm();
+            Term t2 = RandomTerm();
+            SpanQuery[] subquery = new SpanQuery[] { new SpanTermQuery(t1), new SpanTermQuery(t2) };
+            SpanNearQuery q1 = new SpanNearQuery(subquery, 0, true);
+            PhraseQuery q2 = new PhraseQuery();
+            q2.Add(t1);
+            q2.Add(t2);
+            AssertSameSet(q1, q2);
+        }
+
+        /// <summary>
+        /// SpanNearQuery([A, B], \u221e, false) = +A +B </summary>
+        [Test]
+        public virtual void TestSpanNearVersusBooleanAnd()
+        {
+            Term t1 = RandomTerm();
+            Term t2 = RandomTerm();
+            SpanQuery[] subquery = new SpanQuery[] { new SpanTermQuery(t1), new SpanTermQuery(t2) };
+            SpanNearQuery q1 = new SpanNearQuery(subquery, int.MaxValue, false);
+            BooleanQuery q2 = new BooleanQuery();
+            q2.Add(new TermQuery(t1), Occur.MUST);
+            q2.Add(new TermQuery(t2), Occur.MUST);
+            AssertSameSet(q1, q2);
+        }
+
+        /// <summary>
+        /// SpanNearQuery([A B], 0, false) \u2286 SpanNearQuery([A B], 1, false) </summary>
+        [Test]
+        public virtual void TestSpanNearVersusSloppySpanNear()
+        {
+            Term t1 = RandomTerm();
+            Term t2 = RandomTerm();
+            SpanQuery[] subquery = new SpanQuery[] { new SpanTermQuery(t1), new SpanTermQuery(t2) };
+            SpanNearQuery q1 = new SpanNearQuery(subquery, 0, false);
+            SpanNearQuery q2 = new SpanNearQuery(subquery, 1, false);
+            AssertSubsetOf(q1, q2);
+        }
+
+        /// <summary>
+        /// SpanNearQuery([A B], 3, true) \u2286 SpanNearQuery([A B], 3, false) </summary>
+        [Test]
+        public virtual void TestSpanNearInOrderVersusOutOfOrder()
+        {
+            Term t1 = RandomTerm();
+            Term t2 = RandomTerm();
+            SpanQuery[] subquery = new SpanQuery[] { new SpanTermQuery(t1), new SpanTermQuery(t2) };
+            SpanNearQuery q1 = new SpanNearQuery(subquery, 3, true);
+            SpanNearQuery q2 = new SpanNearQuery(subquery, 3, false);
+            AssertSubsetOf(q1, q2);
+        }
+    }
+}
\ No newline at end of file


[03/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestMultiTermConstantScore.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestMultiTermConstantScore.cs b/src/Lucene.Net.Tests/Search/TestMultiTermConstantScore.cs
new file mode 100644
index 0000000..5aeaf7c
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestMultiTermConstantScore.cs
@@ -0,0 +1,563 @@
+using System;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using NUnit.Framework;
+    using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
+    using DefaultSimilarity = Lucene.Net.Search.Similarities.DefaultSimilarity;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldType = FieldType;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockTokenizer = Lucene.Net.Analysis.MockTokenizer;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Term = Lucene.Net.Index.Term;
+    using TextField = TextField;
+
+    [TestFixture]
+    public class TestMultiTermConstantScore : BaseTestRangeFilter
+    {
+        /// <summary>
+        /// threshold for comparing floats </summary>
+        public const float SCORE_COMP_THRESH = 1e-6f;
+
+        internal static Directory Small;
+        internal static IndexReader Reader;
+
+        public static void AssertEquals(string m, int e, int a)
+        {
+            Assert.AreEqual(e, a, m);
+        }
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Is non-static because NewIndexWriterConfig is no longer static.
+        /// </summary>
+        [OneTimeSetUp]
+        public void BeforeClass()
+        {
+            string[] data = new string[] { "A 1 2 3 4 5 6", "Z       4 5 6", null, "B   2   4 5 6", "Y     3   5 6", null, "C     3     6", "X       4 5 6" };
+
+            Small = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), Small, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false)).SetMergePolicy(NewLogMergePolicy()));
+
+            FieldType customType = new FieldType(TextField.TYPE_STORED);
+            customType.IsTokenized = false;
+            for (int i = 0; i < data.Length; i++)
+            {
+                Document doc = new Document();
+                doc.Add(NewField("id", Convert.ToString(i), customType)); // Field.Keyword("id",String.valueOf(i)));
+                doc.Add(NewField("all", "all", customType)); // Field.Keyword("all","all"));
+                if (null != data[i])
+                {
+                    doc.Add(NewTextField("data", data[i], Field.Store.YES)); // Field.Text("data",data[i]));
+                }
+                writer.AddDocument(doc);
+            }
+
+            Reader = writer.Reader;
+            writer.Dispose();
+        }
+
+        [OneTimeTearDown]
+        public static void AfterClass()
+        {
+            Reader.Dispose();
+            Small.Dispose();
+            Reader = null;
+            Small = null;
+        }
+
+        /// <summary>
+        /// macro for readability </summary>
+        public static Query Csrq(string f, string l, string h, bool il, bool ih)
+        {
+            TermRangeQuery query = TermRangeQuery.NewStringRange(f, l, h, il, ih);
+            query.MultiTermRewriteMethod = (MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE);
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: query=" + query);
+            }
+            return query;
+        }
+
+        public static Query Csrq(string f, string l, string h, bool il, bool ih, MultiTermQuery.RewriteMethod method)
+        {
+            TermRangeQuery query = TermRangeQuery.NewStringRange(f, l, h, il, ih);
+            query.MultiTermRewriteMethod = (method);
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: query=" + query + " method=" + method);
+            }
+            return query;
+        }
+
+        /// <summary>
+        /// macro for readability </summary>
+        public static Query Cspq(Term prefix)
+        {
+            PrefixQuery query = new PrefixQuery(prefix);
+            query.MultiTermRewriteMethod = (MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE);
+            return query;
+        }
+
+        /// <summary>
+        /// macro for readability </summary>
+        public static Query Cswcq(Term wild)
+        {
+            WildcardQuery query = new WildcardQuery(wild);
+            query.MultiTermRewriteMethod = (MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE);
+            return query;
+        }
+
+        [Test]
+        public virtual void TestBasics()
+        {
+            QueryUtils.Check(Csrq("data", "1", "6", T, T));
+            QueryUtils.Check(Csrq("data", "A", "Z", T, T));
+            QueryUtils.CheckUnequal(Csrq("data", "1", "6", T, T), Csrq("data", "A", "Z", T, T));
+
+            QueryUtils.Check(Cspq(new Term("data", "p*u?")));
+            QueryUtils.CheckUnequal(Cspq(new Term("data", "pre*")), Cspq(new Term("data", "pres*")));
+
+            QueryUtils.Check(Cswcq(new Term("data", "p")));
+            QueryUtils.CheckUnequal(Cswcq(new Term("data", "pre*n?t")), Cswcq(new Term("data", "pr*t?j")));
+        }
+
+        [Test]
+        public virtual void TestEqualScores()
+        {
+            // NOTE: uses index build in *this* setUp
+
+            IndexSearcher search = NewSearcher(Reader);
+
+            ScoreDoc[] result;
+
+            // some hits match more terms then others, score should be the same
+
+            result = search.Search(Csrq("data", "1", "6", T, T), null, 1000).ScoreDocs;
+            int numHits = result.Length;
+            AssertEquals("wrong number of results", 6, numHits);
+            float score = result[0].Score;
+            for (int i = 1; i < numHits; i++)
+            {
+                Assert.AreEqual(score, result[i].Score, SCORE_COMP_THRESH, "score for " + i + " was not the same");
+            }
+
+            result = search.Search(Csrq("data", "1", "6", T, T, MultiTermQuery.CONSTANT_SCORE_BOOLEAN_QUERY_REWRITE), null, 1000).ScoreDocs;
+            numHits = result.Length;
+            AssertEquals("wrong number of results", 6, numHits);
+            for (int i = 0; i < numHits; i++)
+            {
+                Assert.AreEqual(score, result[i].Score, SCORE_COMP_THRESH, "score for " + i + " was not the same");
+            }
+
+            result = search.Search(Csrq("data", "1", "6", T, T, MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT), null, 1000).ScoreDocs;
+            numHits = result.Length;
+            AssertEquals("wrong number of results", 6, numHits);
+            for (int i = 0; i < numHits; i++)
+            {
+                Assert.AreEqual(score, result[i].Score, SCORE_COMP_THRESH, "score for " + i + " was not the same");
+            }
+        }
+
+        [Test]
+        public virtual void TestEqualScoresWhenNoHits() // Test for LUCENE-5245: Empty MTQ rewrites should have a consistent norm, so always need to return a CSQ!
+        {
+            // NOTE: uses index build in *this* setUp
+
+            IndexSearcher search = NewSearcher(Reader);
+
+            ScoreDoc[] result;
+
+            TermQuery dummyTerm = new TermQuery(new Term("data", "1"));
+
+            BooleanQuery bq = new BooleanQuery();
+            bq.Add(dummyTerm, Occur.SHOULD); // hits one doc
+            bq.Add(Csrq("data", "#", "#", T, T), Occur.SHOULD); // hits no docs
+            result = search.Search(bq, null, 1000).ScoreDocs;
+            int numHits = result.Length;
+            AssertEquals("wrong number of results", 1, numHits);
+            float score = result[0].Score;
+            for (int i = 1; i < numHits; i++)
+            {
+                Assert.AreEqual(score, result[i].Score, SCORE_COMP_THRESH, "score for " + i + " was not the same");
+            }
+
+            bq = new BooleanQuery();
+            bq.Add(dummyTerm, Occur.SHOULD); // hits one doc
+            bq.Add(Csrq("data", "#", "#", T, T, MultiTermQuery.CONSTANT_SCORE_BOOLEAN_QUERY_REWRITE), Occur.SHOULD); // hits no docs
+            result = search.Search(bq, null, 1000).ScoreDocs;
+            numHits = result.Length;
+            AssertEquals("wrong number of results", 1, numHits);
+            for (int i = 0; i < numHits; i++)
+            {
+                Assert.AreEqual(score, result[i].Score, SCORE_COMP_THRESH, "score for " + i + " was not the same");
+            }
+
+            bq = new BooleanQuery();
+            bq.Add(dummyTerm, Occur.SHOULD); // hits one doc
+            bq.Add(Csrq("data", "#", "#", T, T, MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT), Occur.SHOULD); // hits no docs
+            result = search.Search(bq, null, 1000).ScoreDocs;
+            numHits = result.Length;
+            AssertEquals("wrong number of results", 1, numHits);
+            for (int i = 0; i < numHits; i++)
+            {
+                Assert.AreEqual(score, result[i].Score, SCORE_COMP_THRESH, "score for " + i + " was not the same");
+            }
+        }
+
+        [Test]
+        public virtual void TestBoost()
+        {
+            // NOTE: uses index build in *this* setUp
+
+            IndexSearcher search = NewSearcher(Reader);
+
+            // test for correct application of query normalization
+            // must use a non score normalizing method for this.
+
+            search.Similarity = new DefaultSimilarity();
+            Query q = Csrq("data", "1", "6", T, T);
+            q.Boost = 100;
+            search.Search(q, null, new CollectorAnonymousInnerClassHelper(this));
+
+            //
+            // Ensure that boosting works to score one clause of a query higher
+            // than another.
+            //
+            Query q1 = Csrq("data", "A", "A", T, T); // matches document #0
+            q1.Boost = .1f;
+            Query q2 = Csrq("data", "Z", "Z", T, T); // matches document #1
+            BooleanQuery bq = new BooleanQuery(true);
+            bq.Add(q1, Occur.SHOULD);
+            bq.Add(q2, Occur.SHOULD);
+
+            ScoreDoc[] hits = search.Search(bq, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits[0].Doc);
+            Assert.AreEqual(0, hits[1].Doc);
+            Assert.IsTrue(hits[0].Score > hits[1].Score);
+
+            q1 = Csrq("data", "A", "A", T, T, MultiTermQuery.CONSTANT_SCORE_BOOLEAN_QUERY_REWRITE); // matches document #0
+            q1.Boost = .1f;
+            q2 = Csrq("data", "Z", "Z", T, T, MultiTermQuery.CONSTANT_SCORE_BOOLEAN_QUERY_REWRITE); // matches document #1
+            bq = new BooleanQuery(true);
+            bq.Add(q1, Occur.SHOULD);
+            bq.Add(q2, Occur.SHOULD);
+
+            hits = search.Search(bq, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits[0].Doc);
+            Assert.AreEqual(0, hits[1].Doc);
+            Assert.IsTrue(hits[0].Score > hits[1].Score);
+
+            q1 = Csrq("data", "A", "A", T, T); // matches document #0
+            q1.Boost = 10f;
+            q2 = Csrq("data", "Z", "Z", T, T); // matches document #1
+            bq = new BooleanQuery(true);
+            bq.Add(q1, Occur.SHOULD);
+            bq.Add(q2, Occur.SHOULD);
+
+            hits = search.Search(bq, null, 1000).ScoreDocs;
+            Assert.AreEqual(0, hits[0].Doc);
+            Assert.AreEqual(1, hits[1].Doc);
+            Assert.IsTrue(hits[0].Score > hits[1].Score);
+        }
+
+        private class CollectorAnonymousInnerClassHelper : ICollector
+        {
+            private readonly TestMultiTermConstantScore OuterInstance;
+
+            public CollectorAnonymousInnerClassHelper(TestMultiTermConstantScore outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+                @base = 0;
+            }
+
+            private int @base;
+            private Scorer scorer;
+
+            public virtual void SetScorer(Scorer scorer)
+            {
+                this.scorer = scorer;
+            }
+
+            public virtual void Collect(int doc)
+            {
+                Assert.AreEqual(1.0f, scorer.GetScore(), SCORE_COMP_THRESH, "score for doc " + (doc + @base) + " was not correct");
+            }
+
+            public virtual void SetNextReader(AtomicReaderContext context)
+            {
+                @base = context.DocBase;
+            }
+
+            public virtual bool AcceptsDocsOutOfOrder
+            {
+                get { return true; }
+            }
+        }
+
+        [Test]
+        public virtual void TestBooleanOrderUnAffected()
+        {
+            // NOTE: uses index build in *this* setUp
+
+            IndexSearcher search = NewSearcher(Reader);
+
+            // first do a regular TermRangeQuery which uses term expansion so
+            // docs with more terms in range get higher scores
+
+            Query rq = TermRangeQuery.NewStringRange("data", "1", "4", T, T);
+
+            ScoreDoc[] expected = search.Search(rq, null, 1000).ScoreDocs;
+            int numHits = expected.Length;
+
+            // now do a boolean where which also contains a
+            // ConstantScoreRangeQuery and make sure hte order is the same
+
+            BooleanQuery q = new BooleanQuery();
+            q.Add(rq, Occur.MUST); // T, F);
+            q.Add(Csrq("data", "1", "6", T, T), Occur.MUST); // T, F);
+
+            ScoreDoc[] actual = search.Search(q, null, 1000).ScoreDocs;
+
+            AssertEquals("wrong numebr of hits", numHits, actual.Length);
+            for (int i = 0; i < numHits; i++)
+            {
+                AssertEquals("mismatch in docid for hit#" + i, expected[i].Doc, actual[i].Doc);
+            }
+        }
+
+        [Test]
+        public virtual void TestRangeQueryId()
+        {
+            // NOTE: uses index build in *super* setUp
+
+            IndexReader reader = SignedIndexReader;
+            IndexSearcher search = NewSearcher(reader);
+
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: reader=" + reader);
+            }
+
+            int medId = ((MaxId - MinId) / 2);
+
+            string minIP = Pad(MinId);
+            string maxIP = Pad(MaxId);
+            string medIP = Pad(medId);
+
+            int numDocs = reader.NumDocs;
+
+            AssertEquals("num of docs", numDocs, 1 + MaxId - MinId);
+
+            ScoreDoc[] result;
+
+            // test id, bounded on both ends
+
+            result = search.Search(Csrq("id", minIP, maxIP, T, T), null, numDocs).ScoreDocs;
+            AssertEquals("find all", numDocs, result.Length);
+
+            result = search.Search(Csrq("id", minIP, maxIP, T, T, MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT), null, numDocs).ScoreDocs;
+            AssertEquals("find all", numDocs, result.Length);
+
+            result = search.Search(Csrq("id", minIP, maxIP, T, F), null, numDocs).ScoreDocs;
+            AssertEquals("all but last", numDocs - 1, result.Length);
+
+            result = search.Search(Csrq("id", minIP, maxIP, T, F, MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT), null, numDocs).ScoreDocs;
+            AssertEquals("all but last", numDocs - 1, result.Length);
+
+            result = search.Search(Csrq("id", minIP, maxIP, F, T), null, numDocs).ScoreDocs;
+            AssertEquals("all but first", numDocs - 1, result.Length);
+
+            result = search.Search(Csrq("id", minIP, maxIP, F, T, MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT), null, numDocs).ScoreDocs;
+            AssertEquals("all but first", numDocs - 1, result.Length);
+
+            result = search.Search(Csrq("id", minIP, maxIP, F, F), null, numDocs).ScoreDocs;
+            AssertEquals("all but ends", numDocs - 2, result.Length);
+
+            result = search.Search(Csrq("id", minIP, maxIP, F, F, MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT), null, numDocs).ScoreDocs;
+            AssertEquals("all but ends", numDocs - 2, result.Length);
+
+            result = search.Search(Csrq("id", medIP, maxIP, T, T), null, numDocs).ScoreDocs;
+            AssertEquals("med and up", 1 + MaxId - medId, result.Length);
+
+            result = search.Search(Csrq("id", medIP, maxIP, T, T, MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT), null, numDocs).ScoreDocs;
+            AssertEquals("med and up", 1 + MaxId - medId, result.Length);
+
+            result = search.Search(Csrq("id", minIP, medIP, T, T), null, numDocs).ScoreDocs;
+            AssertEquals("up to med", 1 + medId - MinId, result.Length);
+
+            result = search.Search(Csrq("id", minIP, medIP, T, T, MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT), null, numDocs).ScoreDocs;
+            AssertEquals("up to med", 1 + medId - MinId, result.Length);
+
+            // unbounded id
+
+            result = search.Search(Csrq("id", minIP, null, T, F), null, numDocs).ScoreDocs;
+            AssertEquals("min and up", numDocs, result.Length);
+
+            result = search.Search(Csrq("id", null, maxIP, F, T), null, numDocs).ScoreDocs;
+            AssertEquals("max and down", numDocs, result.Length);
+
+            result = search.Search(Csrq("id", minIP, null, F, F), null, numDocs).ScoreDocs;
+            AssertEquals("not min, but up", numDocs - 1, result.Length);
+
+            result = search.Search(Csrq("id", null, maxIP, F, F), null, numDocs).ScoreDocs;
+            AssertEquals("not max, but down", numDocs - 1, result.Length);
+
+            result = search.Search(Csrq("id", medIP, maxIP, T, F), null, numDocs).ScoreDocs;
+            AssertEquals("med and up, not max", MaxId - medId, result.Length);
+
+            result = search.Search(Csrq("id", minIP, medIP, F, T), null, numDocs).ScoreDocs;
+            AssertEquals("not min, up to med", medId - MinId, result.Length);
+
+            // very small sets
+
+            result = search.Search(Csrq("id", minIP, minIP, F, F), null, numDocs).ScoreDocs;
+            AssertEquals("min,min,F,F", 0, result.Length);
+
+            result = search.Search(Csrq("id", minIP, minIP, F, F, MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT), null, numDocs).ScoreDocs;
+            AssertEquals("min,min,F,F", 0, result.Length);
+
+            result = search.Search(Csrq("id", medIP, medIP, F, F), null, numDocs).ScoreDocs;
+            AssertEquals("med,med,F,F", 0, result.Length);
+
+            result = search.Search(Csrq("id", medIP, medIP, F, F, MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT), null, numDocs).ScoreDocs;
+            AssertEquals("med,med,F,F", 0, result.Length);
+
+            result = search.Search(Csrq("id", maxIP, maxIP, F, F), null, numDocs).ScoreDocs;
+            AssertEquals("max,max,F,F", 0, result.Length);
+
+            result = search.Search(Csrq("id", maxIP, maxIP, F, F, MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT), null, numDocs).ScoreDocs;
+            AssertEquals("max,max,F,F", 0, result.Length);
+
+            result = search.Search(Csrq("id", minIP, minIP, T, T), null, numDocs).ScoreDocs;
+            AssertEquals("min,min,T,T", 1, result.Length);
+
+            result = search.Search(Csrq("id", minIP, minIP, T, T, MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT), null, numDocs).ScoreDocs;
+            AssertEquals("min,min,T,T", 1, result.Length);
+
+            result = search.Search(Csrq("id", null, minIP, F, T), null, numDocs).ScoreDocs;
+            AssertEquals("nul,min,F,T", 1, result.Length);
+
+            result = search.Search(Csrq("id", null, minIP, F, T, MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT), null, numDocs).ScoreDocs;
+            AssertEquals("nul,min,F,T", 1, result.Length);
+
+            result = search.Search(Csrq("id", maxIP, maxIP, T, T), null, numDocs).ScoreDocs;
+            AssertEquals("max,max,T,T", 1, result.Length);
+
+            result = search.Search(Csrq("id", maxIP, maxIP, T, T, MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT), null, numDocs).ScoreDocs;
+            AssertEquals("max,max,T,T", 1, result.Length);
+
+            result = search.Search(Csrq("id", maxIP, null, T, F), null, numDocs).ScoreDocs;
+            AssertEquals("max,nul,T,T", 1, result.Length);
+
+            result = search.Search(Csrq("id", maxIP, null, T, F, MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT), null, numDocs).ScoreDocs;
+            AssertEquals("max,nul,T,T", 1, result.Length);
+
+            result = search.Search(Csrq("id", medIP, medIP, T, T), null, numDocs).ScoreDocs;
+            AssertEquals("med,med,T,T", 1, result.Length);
+
+            result = search.Search(Csrq("id", medIP, medIP, T, T, MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT), null, numDocs).ScoreDocs;
+            AssertEquals("med,med,T,T", 1, result.Length);
+        }
+
+        [Test]
+        public virtual void TestRangeQueryRand()
+        {
+            // NOTE: uses index build in *super* setUp
+
+            IndexReader reader = SignedIndexReader;
+            IndexSearcher search = NewSearcher(reader);
+
+            string minRP = Pad(SignedIndexDir.MinR);
+            string maxRP = Pad(SignedIndexDir.MaxR);
+
+            int numDocs = reader.NumDocs;
+
+            AssertEquals("num of docs", numDocs, 1 + MaxId - MinId);
+
+            ScoreDoc[] result;
+
+            // test extremes, bounded on both ends
+
+            result = search.Search(Csrq("rand", minRP, maxRP, T, T), null, numDocs).ScoreDocs;
+            AssertEquals("find all", numDocs, result.Length);
+
+            result = search.Search(Csrq("rand", minRP, maxRP, T, F), null, numDocs).ScoreDocs;
+            AssertEquals("all but biggest", numDocs - 1, result.Length);
+
+            result = search.Search(Csrq("rand", minRP, maxRP, F, T), null, numDocs).ScoreDocs;
+            AssertEquals("all but smallest", numDocs - 1, result.Length);
+
+            result = search.Search(Csrq("rand", minRP, maxRP, F, F), null, numDocs).ScoreDocs;
+            AssertEquals("all but extremes", numDocs - 2, result.Length);
+
+            // unbounded
+
+            result = search.Search(Csrq("rand", minRP, null, T, F), null, numDocs).ScoreDocs;
+            AssertEquals("smallest and up", numDocs, result.Length);
+
+            result = search.Search(Csrq("rand", null, maxRP, F, T), null, numDocs).ScoreDocs;
+            AssertEquals("biggest and down", numDocs, result.Length);
+
+            result = search.Search(Csrq("rand", minRP, null, F, F), null, numDocs).ScoreDocs;
+            AssertEquals("not smallest, but up", numDocs - 1, result.Length);
+
+            result = search.Search(Csrq("rand", null, maxRP, F, F), null, numDocs).ScoreDocs;
+            AssertEquals("not biggest, but down", numDocs - 1, result.Length);
+
+            // very small sets
+
+            result = search.Search(Csrq("rand", minRP, minRP, F, F), null, numDocs).ScoreDocs;
+            AssertEquals("min,min,F,F", 0, result.Length);
+            result = search.Search(Csrq("rand", maxRP, maxRP, F, F), null, numDocs).ScoreDocs;
+            AssertEquals("max,max,F,F", 0, result.Length);
+
+            result = search.Search(Csrq("rand", minRP, minRP, T, T), null, numDocs).ScoreDocs;
+            AssertEquals("min,min,T,T", 1, result.Length);
+            result = search.Search(Csrq("rand", null, minRP, F, T), null, numDocs).ScoreDocs;
+            AssertEquals("nul,min,F,T", 1, result.Length);
+
+            result = search.Search(Csrq("rand", maxRP, maxRP, T, T), null, numDocs).ScoreDocs;
+            AssertEquals("max,max,T,T", 1, result.Length);
+            result = search.Search(Csrq("rand", maxRP, null, T, F), null, numDocs).ScoreDocs;
+            AssertEquals("max,nul,T,T", 1, result.Length);
+        }
+
+
+        #region SorterTestBase
+        // LUCENENET NOTE: Tests in a base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestPad()
+        {
+            base.TestPad();
+        }
+
+        #endregion
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestMultiTermQueryRewrites.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestMultiTermQueryRewrites.cs b/src/Lucene.Net.Tests/Search/TestMultiTermQueryRewrites.cs
new file mode 100644
index 0000000..0d7afc5
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestMultiTermQueryRewrites.cs
@@ -0,0 +1,305 @@
+using System;
+using System.Diagnostics;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using NUnit.Framework;
+    using System.Runtime.CompilerServices;
+    using Util;
+    using AttributeSource = Lucene.Net.Util.AttributeSource;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MultiReader = Lucene.Net.Index.MultiReader;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Term = Lucene.Net.Index.Term;
+    using Terms = Lucene.Net.Index.Terms;
+    using TermsEnum = Lucene.Net.Index.TermsEnum;
+
+    [TestFixture]
+    public class TestMultiTermQueryRewrites : LuceneTestCase
+    {
+        internal static Directory Dir, Sdir1, Sdir2;
+        internal static IndexReader Reader, MultiReader, MultiReaderDupls;
+        internal static IndexSearcher Searcher, MultiSearcher, MultiSearcherDupls;
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Is non-static because Similarity and TimeZone are not static.
+        /// </summary>
+        [OneTimeSetUp]
+        public void BeforeClass()
+        {
+            Dir = NewDirectory();
+            Sdir1 = NewDirectory();
+            Sdir2 = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), Dir, new MockAnalyzer(Random()), Similarity, TimeZone);
+            RandomIndexWriter swriter1 = new RandomIndexWriter(Random(), Sdir1, new MockAnalyzer(Random()), Similarity, TimeZone);
+            RandomIndexWriter swriter2 = new RandomIndexWriter(Random(), Sdir2, new MockAnalyzer(Random()), Similarity, TimeZone);
+
+            for (int i = 0; i < 10; i++)
+            {
+                Document doc = new Document();
+                doc.Add(NewStringField("data", Convert.ToString(i), Field.Store.NO));
+                writer.AddDocument(doc);
+                ((i % 2 == 0) ? swriter1 : swriter2).AddDocument(doc);
+            }
+            writer.ForceMerge(1);
+            swriter1.ForceMerge(1);
+            swriter2.ForceMerge(1);
+            writer.Dispose();
+            swriter1.Dispose();
+            swriter2.Dispose();
+
+            Reader = DirectoryReader.Open(Dir);
+            Searcher = NewSearcher(Reader);
+
+            MultiReader = new MultiReader(new IndexReader[] { DirectoryReader.Open(Sdir1), DirectoryReader.Open(Sdir2) }, true);
+            MultiSearcher = NewSearcher(MultiReader);
+
+            MultiReaderDupls = new MultiReader(new IndexReader[] { DirectoryReader.Open(Sdir1), DirectoryReader.Open(Dir) }, true);
+            MultiSearcherDupls = NewSearcher(MultiReaderDupls);
+        }
+
+        [OneTimeTearDown]
+        public static void AfterClass()
+        {
+            Reader.Dispose();
+            MultiReader.Dispose();
+            MultiReaderDupls.Dispose();
+            Dir.Dispose();
+            Sdir1.Dispose();
+            Sdir2.Dispose();
+            Reader = MultiReader = MultiReaderDupls = null;
+            Searcher = MultiSearcher = MultiSearcherDupls = null;
+            Dir = Sdir1 = Sdir2 = null;
+        }
+
+        private Query ExtractInnerQuery(Query q)
+        {
+            if (q is ConstantScoreQuery)
+            {
+                // wrapped as ConstantScoreQuery
+                q = ((ConstantScoreQuery)q).Query;
+            }
+            return q;
+        }
+
+        private Term ExtractTerm(Query q)
+        {
+            q = ExtractInnerQuery(q);
+            return ((TermQuery)q).Term;
+        }
+
+        private void CheckBooleanQueryOrder(Query q)
+        {
+            q = ExtractInnerQuery(q);
+            BooleanQuery bq = (BooleanQuery)q;
+            Term last = null, act;
+            foreach (BooleanClause clause in bq.Clauses)
+            {
+                act = ExtractTerm(clause.Query);
+                if (last != null)
+                {
+                    Assert.IsTrue(last.CompareTo(act) < 0, "sort order of terms in BQ violated");
+                }
+                last = act;
+            }
+        }
+
+        private void CheckDuplicateTerms(MultiTermQuery.RewriteMethod method)
+        {
+            MultiTermQuery mtq = TermRangeQuery.NewStringRange("data", "2", "7", true, true);
+            mtq.MultiTermRewriteMethod = (method);
+            Query q1 = Searcher.Rewrite(mtq);
+            Query q2 = MultiSearcher.Rewrite(mtq);
+            Query q3 = MultiSearcherDupls.Rewrite(mtq);
+            if (VERBOSE)
+            {
+                Console.WriteLine();
+                Console.WriteLine("single segment: " + q1);
+                Console.WriteLine("multi segment: " + q2);
+                Console.WriteLine("multi segment with duplicates: " + q3);
+            }
+            Assert.IsTrue(q1.Equals(q2), "The multi-segment case must produce same rewritten query");
+            Assert.IsTrue(q1.Equals(q3), "The multi-segment case with duplicates must produce same rewritten query");
+            CheckBooleanQueryOrder(q1);
+            CheckBooleanQueryOrder(q2);
+            CheckBooleanQueryOrder(q3);
+        }
+
+        [Test]
+        public virtual void TestRewritesWithDuplicateTerms()
+        {
+            CheckDuplicateTerms(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
+
+            CheckDuplicateTerms(MultiTermQuery.CONSTANT_SCORE_BOOLEAN_QUERY_REWRITE);
+
+            // use a large PQ here to only test duplicate terms and dont mix up when all scores are equal
+            CheckDuplicateTerms(new MultiTermQuery.TopTermsScoringBooleanQueryRewrite(1024));
+            CheckDuplicateTerms(new MultiTermQuery.TopTermsBoostOnlyBooleanQueryRewrite(1024));
+
+            // Test auto rewrite (but only boolean mode), so we set the limits to large values to always get a BQ
+            MultiTermQuery.ConstantScoreAutoRewrite rewrite = new MultiTermQuery.ConstantScoreAutoRewrite();
+            rewrite.TermCountCutoff = int.MaxValue;
+            rewrite.DocCountPercent = 100.0;
+            CheckDuplicateTerms(rewrite);
+        }
+
+        private void CheckBooleanQueryBoosts(BooleanQuery bq)
+        {
+            foreach (BooleanClause clause in bq.Clauses)
+            {
+                TermQuery mtq = (TermQuery)clause.Query;
+                Assert.AreEqual(Convert.ToSingle(mtq.Term.Text()), mtq.Boost, 0, "Parallel sorting of boosts in rewrite mode broken");
+            }
+        }
+
+        private void CheckBoosts(MultiTermQuery.RewriteMethod method)
+        {
+            MultiTermQuery mtq = new MultiTermQueryAnonymousInnerClassHelper(this);
+            mtq.MultiTermRewriteMethod = (method);
+            Query q1 = Searcher.Rewrite(mtq);
+            Query q2 = MultiSearcher.Rewrite(mtq);
+            Query q3 = MultiSearcherDupls.Rewrite(mtq);
+            if (VERBOSE)
+            {
+                Console.WriteLine();
+                Console.WriteLine("single segment: " + q1);
+                Console.WriteLine("multi segment: " + q2);
+                Console.WriteLine("multi segment with duplicates: " + q3);
+            }
+            Assert.IsTrue(q1.Equals(q2), "The multi-segment case must produce same rewritten query");
+            Assert.IsTrue(q1.Equals(q3), "The multi-segment case with duplicates must produce same rewritten query");
+            CheckBooleanQueryBoosts((BooleanQuery)q1);
+            CheckBooleanQueryBoosts((BooleanQuery)q2);
+            CheckBooleanQueryBoosts((BooleanQuery)q3);
+        }
+
+        private class MultiTermQueryAnonymousInnerClassHelper : MultiTermQuery
+        {
+            private readonly TestMultiTermQueryRewrites OuterInstance;
+
+            public MultiTermQueryAnonymousInnerClassHelper(TestMultiTermQueryRewrites outerInstance)
+                : base("data")
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            protected override TermsEnum GetTermsEnum(Terms terms, AttributeSource atts)
+            {
+                return new TermRangeTermsEnumAnonymousInnerClassHelper(this, terms.GetIterator(null), new BytesRef("2"), new BytesRef("7"));
+            }
+
+            private class TermRangeTermsEnumAnonymousInnerClassHelper : TermRangeTermsEnum
+            {
+                private readonly MultiTermQueryAnonymousInnerClassHelper OuterInstance;
+
+                public TermRangeTermsEnumAnonymousInnerClassHelper(MultiTermQueryAnonymousInnerClassHelper outerInstance, TermsEnum iterator, BytesRef bref1, BytesRef bref2)
+                    : base(iterator, bref1, bref2, true, true)
+                {
+                    this.OuterInstance = outerInstance;
+                    boostAtt = Attributes.AddAttribute<IBoostAttribute>();
+                }
+
+                internal readonly IBoostAttribute boostAtt;
+
+                protected override AcceptStatus Accept(BytesRef term)
+                {
+                    boostAtt.Boost = Convert.ToSingle(term.Utf8ToString());
+                    return base.Accept(term);
+                }
+            }
+
+            public override string ToString(string field)
+            {
+                return "dummy";
+            }
+        }
+
+        [Test]
+        public virtual void TestBoosts()
+        {
+            CheckBoosts(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
+
+            // use a large PQ here to only test boosts and dont mix up when all scores are equal
+            CheckBoosts(new MultiTermQuery.TopTermsScoringBooleanQueryRewrite(1024));
+        }
+
+        private void CheckMaxClauseLimitation(MultiTermQuery.RewriteMethod method, [CallerMemberName] string memberName = "")
+        {
+            int savedMaxClauseCount = BooleanQuery.MaxClauseCount;
+            BooleanQuery.MaxClauseCount = 3;
+
+            MultiTermQuery mtq = TermRangeQuery.NewStringRange("data", "2", "7", true, true);
+            mtq.MultiTermRewriteMethod = (method);
+            try
+            {
+                MultiSearcherDupls.Rewrite(mtq);
+                Assert.Fail("Should throw BooleanQuery.TooManyClauses");
+            }
+            catch (BooleanQuery.TooManyClauses e)
+            {
+                //  Maybe remove this assert in later versions, when internal API changes:
+                Assert.AreEqual("CheckMaxClauseCount", new StackTrace(e, false).GetFrames()[0].GetMethod().Name); //, "Should throw BooleanQuery.TooManyClauses with a stacktrace containing checkMaxClauseCount()");
+            }
+            finally
+            {
+                BooleanQuery.MaxClauseCount = savedMaxClauseCount;
+            }
+        }
+
+        private void CheckNoMaxClauseLimitation(MultiTermQuery.RewriteMethod method)
+        {
+            int savedMaxClauseCount = BooleanQuery.MaxClauseCount;
+            BooleanQuery.MaxClauseCount = 3;
+
+            MultiTermQuery mtq = TermRangeQuery.NewStringRange("data", "2", "7", true, true);
+            mtq.MultiTermRewriteMethod = (method);
+            try
+            {
+                MultiSearcherDupls.Rewrite(mtq);
+            }
+            finally
+            {
+                BooleanQuery.MaxClauseCount = savedMaxClauseCount;
+            }
+        }
+
+        [Test]
+        public virtual void TestMaxClauseLimitations()
+        {
+            CheckMaxClauseLimitation(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
+            CheckMaxClauseLimitation(MultiTermQuery.CONSTANT_SCORE_BOOLEAN_QUERY_REWRITE);
+
+            CheckNoMaxClauseLimitation(MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE);
+            CheckNoMaxClauseLimitation(MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT);
+            CheckNoMaxClauseLimitation(new MultiTermQuery.TopTermsScoringBooleanQueryRewrite(1024));
+            CheckNoMaxClauseLimitation(new MultiTermQuery.TopTermsBoostOnlyBooleanQueryRewrite(1024));
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestMultiThreadTermVectors.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestMultiThreadTermVectors.cs b/src/Lucene.Net.Tests/Search/TestMultiThreadTermVectors.cs
new file mode 100644
index 0000000..89607cf
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestMultiThreadTermVectors.cs
@@ -0,0 +1,244 @@
+using Lucene.Net.Documents;
+using Lucene.Net.Support;
+using System;
+using System.Diagnostics;
+using System.Text;
+using System.Threading;
+
+namespace Lucene.Net.Search
+{
+    
+    using NUnit.Framework;
+    using System.IO;
+    using Directory = Lucene.Net.Store.Directory;
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
+    using English = Lucene.Net.Util.English;
+    using Fields = Lucene.Net.Index.Fields;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using IndexWriter = Lucene.Net.Index.IndexWriter;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using Terms = Lucene.Net.Index.Terms;
+    using TermsEnum = Lucene.Net.Index.TermsEnum;
+
+    [TestFixture]
+    public class TestMultiThreadTermVectors : LuceneTestCase
+    {
+        private Directory Directory;
+        public int NumDocs = 100;
+        public int NumThreads = 3;
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Directory = NewDirectory();
+            IndexWriter writer = new IndexWriter(Directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()));
+            //writer.setNoCFSRatio(0.0);
+            //writer.infoStream = System.out;
+            FieldType customType = new FieldType(TextField.TYPE_STORED);
+            customType.IsTokenized = false;
+            customType.StoreTermVectors = true;
+            for (int i = 0; i < NumDocs; i++)
+            {
+                Documents.Document doc = new Documents.Document();
+                Field fld = NewField("field", English.IntToEnglish(i), customType);
+                doc.Add(fld);
+                writer.AddDocument(doc);
+            }
+            writer.Dispose();
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            Directory.Dispose();
+            base.TearDown();
+        }
+
+        [Test]
+        public virtual void Test()
+        {
+            IndexReader reader = null;
+
+            try
+            {
+                reader = DirectoryReader.Open(Directory);
+                for (int i = 1; i <= NumThreads; i++)
+                {
+                    TestTermPositionVectors(reader, i);
+                }
+            }
+            catch (IOException ioe)
+            {
+                Assert.Fail(ioe.Message);
+            }
+            finally
+            {
+                if (reader != null)
+                {
+                    try
+                    {
+                        /// <summary>
+                        /// close the opened reader </summary>
+                        reader.Dispose();
+                    }
+                    catch (IOException ioe)
+                    {
+                        Console.WriteLine(ioe.ToString());
+                        Console.Write(ioe.StackTrace);
+                    }
+                }
+            }
+        }
+
+        public virtual void TestTermPositionVectors(IndexReader reader, int threadCount)
+        {
+            MultiThreadTermVectorsReader[] mtr = new MultiThreadTermVectorsReader[threadCount];
+            for (int i = 0; i < threadCount; i++)
+            {
+                mtr[i] = new MultiThreadTermVectorsReader();
+                mtr[i].Init(reader);
+            }
+
+            // run until all threads finished
+            int threadsAlive = mtr.Length;
+            while (threadsAlive > 0)
+            {
+                //System.out.println("Threads alive");
+                Thread.Sleep(10);
+                threadsAlive = mtr.Length;
+                for (int i = 0; i < mtr.Length; i++)
+                {
+                    if (mtr[i].Alive == true)
+                    {
+                        break;
+                    }
+
+                    threadsAlive--;
+                }
+            }
+
+            long totalTime = 0L;
+            for (int i = 0; i < mtr.Length; i++)
+            {
+                totalTime += mtr[i].TimeElapsed;
+                mtr[i] = null;
+            }
+
+            //System.out.println("threadcount: " + mtr.Length + " average term vector time: " + totalTime/mtr.Length);
+        }
+    }
+
+    internal class MultiThreadTermVectorsReader : IThreadRunnable
+    {
+        private IndexReader Reader = null;
+        private ThreadClass t = null;
+
+        private readonly int RunsToDo = 100;
+        internal long TimeElapsed = 0;
+
+        public virtual void Init(IndexReader reader)
+        {
+            this.Reader = reader;
+            TimeElapsed = 0;
+            t = new ThreadClass(new System.Threading.ThreadStart(this.Run));
+            t.Start();
+        }
+
+        public virtual bool Alive
+        {
+            get
+            {
+                if (t == null)
+                {
+                    return false;
+                }
+
+                return t.IsAlive;
+            }
+        }
+
+        [Test]
+        public void Run()
+        {
+            try
+            {
+                // run the test 100 times
+                for (int i = 0; i < RunsToDo; i++)
+                {
+                    TestTermVectors();
+                }
+            }
+            catch (Exception e)
+            {
+                Console.WriteLine(e.ToString());
+                Console.Write(e.StackTrace);
+            }
+            return;
+        }
+
+        private void TestTermVectors()
+        {
+            // check:
+            int numDocs = Reader.NumDocs;
+            long start = 0L;
+            for (int docId = 0; docId < numDocs; docId++)
+            {
+                start = Environment.TickCount;
+                Fields vectors = Reader.GetTermVectors(docId);
+                TimeElapsed += Environment.TickCount - start;
+
+                // verify vectors result
+                VerifyVectors(vectors, docId);
+
+                start = Environment.TickCount;
+                Terms vector = Reader.GetTermVectors(docId).GetTerms("field");
+                TimeElapsed += Environment.TickCount - start;
+
+                VerifyVector(vector.GetIterator(null), docId);
+            }
+        }
+
+        private void VerifyVectors(Fields vectors, int num)
+        {
+            foreach (string field in vectors)
+            {
+                Terms terms = vectors.GetTerms(field);
+                Debug.Assert(terms != null);
+                VerifyVector(terms.GetIterator(null), num);
+            }
+        }
+
+        private void VerifyVector(TermsEnum vector, int num)
+        {
+            StringBuilder temp = new StringBuilder();
+            while (vector.Next() != null)
+            {
+                temp.Append(vector.Term.Utf8ToString());
+            }
+            if (!English.IntToEnglish(num).Trim().Equals(temp.ToString().Trim()))
+            {
+                Console.WriteLine("wrong term result");
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestMultiValuedNumericRangeQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestMultiValuedNumericRangeQuery.cs b/src/Lucene.Net.Tests/Search/TestMultiValuedNumericRangeQuery.cs
new file mode 100644
index 0000000..5c66182
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestMultiValuedNumericRangeQuery.cs
@@ -0,0 +1,88 @@
+using System.Globalization;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using NUnit.Framework;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using Int32Field = Int32Field;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    [TestFixture]
+    public class TestMultiValuedNumericRangeQuery : LuceneTestCase
+    {
+        /// <summary>
+        /// Tests NumericRangeQuery on a multi-valued field (multiple numeric values per document).
+        /// this test ensures, that a classical TermRangeQuery returns exactly the same document numbers as
+        /// NumericRangeQuery (see SOLR-1322 for discussion) and the multiple precision terms per numeric value
+        /// do not interfere with multiple numeric values.
+        /// </summary>
+        [Test]
+        public virtual void TestMultiValuedNRQ()
+        {
+            Directory directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(TestUtil.NextInt(Random(), 50, 1000)));
+            const string format = "D11";
+
+            int num = AtLeast(500);
+            for (int l = 0; l < num; l++)
+            {
+                Document doc = new Document();
+                for (int m = 0, c = Random().Next(10); m <= c; m++)
+                {
+                    int value = Random().Next(int.MaxValue);
+                    doc.Add(NewStringField("asc", value.ToString(format), Field.Store.NO));
+                    doc.Add(new Int32Field("trie", value, Field.Store.NO));
+                }
+                writer.AddDocument(doc);
+            }
+            IndexReader reader = writer.Reader;
+            writer.Dispose();
+
+            IndexSearcher searcher = NewSearcher(reader);
+            num = AtLeast(50);
+            for (int i = 0; i < num; i++)
+            {
+                int lower = Random().Next(int.MaxValue);
+                int upper = Random().Next(int.MaxValue);
+                if (lower > upper)
+                {
+                    int a = lower;
+                    lower = upper;
+                    upper = a;
+                }
+                TermRangeQuery cq = TermRangeQuery.NewStringRange("asc", lower.ToString(format), upper.ToString(format), true, true);
+                NumericRangeQuery<int> tq = NumericRangeQuery.NewInt32Range("trie", lower, upper, true, true);
+                TopDocs trTopDocs = searcher.Search(cq, 1);
+                TopDocs nrTopDocs = searcher.Search(tq, 1);
+                Assert.AreEqual(trTopDocs.TotalHits, nrTopDocs.TotalHits, "Returned count for NumericRangeQuery and TermRangeQuery must be equal");
+            }
+            reader.Dispose();
+            directory.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestNGramPhraseQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestNGramPhraseQuery.cs b/src/Lucene.Net.Tests/Search/TestNGramPhraseQuery.cs
new file mode 100644
index 0000000..c445929
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestNGramPhraseQuery.cs
@@ -0,0 +1,113 @@
+namespace Lucene.Net.Search
+{
+    using NUnit.Framework;
+    using Directory = Lucene.Net.Store.Directory;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Term = Lucene.Net.Index.Term;
+
+    [TestFixture]
+    public class TestNGramPhraseQuery : LuceneTestCase
+    {
+        private static IndexReader Reader;
+        private static Directory Directory;
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Is non-static because Similarity and TimeZone are not static.
+        /// </summary>
+        [OneTimeSetUp]
+        public void BeforeClass()
+        {
+            Directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), Directory, Similarity, TimeZone);
+            writer.Dispose();
+            Reader = DirectoryReader.Open(Directory);
+        }
+
+        [OneTimeTearDown]
+        public static void AfterClass()
+        {
+            Reader.Dispose();
+            Reader = null;
+            Directory.Dispose();
+            Directory = null;
+        }
+
+        [Test]
+        public virtual void TestRewrite()
+        {
+            // bi-gram test ABC => AB/BC => AB/BC
+            PhraseQuery pq1 = new NGramPhraseQuery(2);
+            pq1.Add(new Term("f", "AB"));
+            pq1.Add(new Term("f", "BC"));
+
+            Query q = pq1.Rewrite(Reader);
+            Assert.IsTrue(q is NGramPhraseQuery);
+            Assert.AreSame(pq1, q);
+            pq1 = (NGramPhraseQuery)q;
+            Assert.AreEqual(new Term[] { new Term("f", "AB"), new Term("f", "BC") }, pq1.GetTerms());
+            Assert.AreEqual(new int[] { 0, 1 }, pq1.GetPositions());
+
+            // bi-gram test ABCD => AB/BC/CD => AB//CD
+            PhraseQuery pq2 = new NGramPhraseQuery(2);
+            pq2.Add(new Term("f", "AB"));
+            pq2.Add(new Term("f", "BC"));
+            pq2.Add(new Term("f", "CD"));
+
+            q = pq2.Rewrite(Reader);
+            Assert.IsTrue(q is PhraseQuery);
+            Assert.AreNotSame(pq2, q);
+            pq2 = (PhraseQuery)q;
+            Assert.AreEqual(new Term[] { new Term("f", "AB"), new Term("f", "CD") }, pq2.GetTerms());
+            Assert.AreEqual(new int[] { 0, 2 }, pq2.GetPositions());
+
+            // tri-gram test ABCDEFGH => ABC/BCD/CDE/DEF/EFG/FGH => ABC///DEF//FGH
+            PhraseQuery pq3 = new NGramPhraseQuery(3);
+            pq3.Add(new Term("f", "ABC"));
+            pq3.Add(new Term("f", "BCD"));
+            pq3.Add(new Term("f", "CDE"));
+            pq3.Add(new Term("f", "DEF"));
+            pq3.Add(new Term("f", "EFG"));
+            pq3.Add(new Term("f", "FGH"));
+
+            q = pq3.Rewrite(Reader);
+            Assert.IsTrue(q is PhraseQuery);
+            Assert.AreNotSame(pq3, q);
+            pq3 = (PhraseQuery)q;
+            Assert.AreEqual(new Term[] { new Term("f", "ABC"), new Term("f", "DEF"), new Term("f", "FGH") }, pq3.GetTerms());
+            Assert.AreEqual(new int[] { 0, 3, 5 }, pq3.GetPositions());
+
+            // LUCENE-4970: boosting test
+            PhraseQuery pq4 = new NGramPhraseQuery(2);
+            pq4.Add(new Term("f", "AB"));
+            pq4.Add(new Term("f", "BC"));
+            pq4.Add(new Term("f", "CD"));
+            pq4.Boost = 100.0F;
+
+            q = pq4.Rewrite(Reader);
+            Assert.AreNotSame(pq4, q);
+            Assert.AreEqual(pq4.Boost, q.Boost, 0.1f);
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestNot.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestNot.cs b/src/Lucene.Net.Tests/Search/TestNot.cs
new file mode 100644
index 0000000..99611aa
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestNot.cs
@@ -0,0 +1,65 @@
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using NUnit.Framework;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Term = Lucene.Net.Index.Term;
+
+    /// <summary>
+    /// Similarity unit test.
+    ///
+    ///
+    /// </summary>
+    [TestFixture]
+    public class TestNot : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestNot_Mem()
+        {
+            Directory store = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), store, Similarity, TimeZone);
+
+            Document d1 = new Document();
+            d1.Add(NewTextField("field", "a b", Field.Store.YES));
+
+            writer.AddDocument(d1);
+            IndexReader reader = writer.Reader;
+
+            IndexSearcher searcher = NewSearcher(reader);
+
+            BooleanQuery query = new BooleanQuery();
+            query.Add(new TermQuery(new Term("field", "a")), Occur.SHOULD);
+            query.Add(new TermQuery(new Term("field", "b")), Occur.MUST_NOT);
+
+            ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(0, hits.Length);
+            writer.Dispose();
+            reader.Dispose();
+            store.Dispose();
+        }
+    }
+}
\ No newline at end of file


[06/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestEarlyTermination.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestEarlyTermination.cs b/src/Lucene.Net.Tests/Search/TestEarlyTermination.cs
new file mode 100644
index 0000000..8ffddd1
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestEarlyTermination.cs
@@ -0,0 +1,124 @@
+namespace Lucene.Net.Search
+{
+    using Lucene.Net.Randomized.Generators;
+    using NUnit.Framework;
+    using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
+    using Directory = Lucene.Net.Store.Directory;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Document = Documents.Document;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+
+    [TestFixture]
+    public class TestEarlyTermination : LuceneTestCase
+    {
+        internal Directory Dir;
+        internal RandomIndexWriter Writer;
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Dir = NewDirectory();
+            Writer = new RandomIndexWriter(Random(), Dir, Similarity, TimeZone);
+            int numDocs = AtLeast(100);
+            for (int i = 0; i < numDocs; i++)
+            {
+                Writer.AddDocument(new Document());
+                if (Rarely())
+                {
+                    Writer.Commit();
+                }
+            }
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            base.TearDown();
+            Writer.Dispose();
+            Dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestEarlyTermination_Mem()
+        {
+            int iters = AtLeast(5);
+            IndexReader reader = Writer.Reader;
+
+            for (int i = 0; i < iters; ++i)
+            {
+                IndexSearcher searcher = NewSearcher(reader);
+                ICollector collector = new CollectorAnonymousInnerClassHelper(this);
+
+                searcher.Search(new MatchAllDocsQuery(), collector);
+            }
+            reader.Dispose();
+        }
+
+        private class CollectorAnonymousInnerClassHelper : ICollector
+        {
+            private readonly TestEarlyTermination OuterInstance;
+
+            public CollectorAnonymousInnerClassHelper(TestEarlyTermination outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+                outOfOrder = Random().NextBoolean();
+                collectionTerminated = true;
+            }
+
+            internal readonly bool outOfOrder;
+            internal bool collectionTerminated;
+
+            public virtual void SetScorer(Scorer scorer)
+            {
+            }
+
+            public virtual void Collect(int doc)
+            {
+                Assert.IsFalse(collectionTerminated);
+                if (Rarely())
+                {
+                    collectionTerminated = true;
+                    throw new CollectionTerminatedException();
+                }
+            }
+
+            public virtual void SetNextReader(AtomicReaderContext context)
+            {
+                if (Random().NextBoolean())
+                {
+                    collectionTerminated = true;
+                    throw new CollectionTerminatedException();
+                }
+                else
+                {
+                    collectionTerminated = false;
+                }
+            }
+
+            public virtual bool AcceptsDocsOutOfOrder
+            {
+                get { return outOfOrder; }
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestElevationComparator.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestElevationComparator.cs b/src/Lucene.Net.Tests/Search/TestElevationComparator.cs
new file mode 100644
index 0000000..832dc1b
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestElevationComparator.cs
@@ -0,0 +1,240 @@
+using System;
+using System.Collections.Generic;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using Lucene.Net.Index;
+    using Lucene.Net.Store;
+    using NUnit.Framework;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using DefaultSimilarity = Lucene.Net.Search.Similarities.DefaultSimilarity;
+    using Document = Documents.Document;
+    using Entry = Lucene.Net.Search.FieldValueHitQueue.Entry;
+    using Field = Field;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+
+    [TestFixture]
+    public class TestElevationComparer : LuceneTestCase
+    {
+        private readonly IDictionary<BytesRef, int?> Priority = new Dictionary<BytesRef, int?>();
+
+        [Test]
+        public virtual void TestSorting()
+        {
+            Directory directory = NewDirectory();
+            IndexWriter writer = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy(1000)).SetSimilarity(new DefaultSimilarity()));
+            writer.AddDocument(Adoc(new string[] { "id", "a", "title", "ipod", "str_s", "a" }));
+            writer.AddDocument(Adoc(new string[] { "id", "b", "title", "ipod ipod", "str_s", "b" }));
+            writer.AddDocument(Adoc(new string[] { "id", "c", "title", "ipod ipod ipod", "str_s", "c" }));
+            writer.AddDocument(Adoc(new string[] { "id", "x", "title", "boosted", "str_s", "x" }));
+            writer.AddDocument(Adoc(new string[] { "id", "y", "title", "boosted boosted", "str_s", "y" }));
+            writer.AddDocument(Adoc(new string[] { "id", "z", "title", "boosted boosted boosted", "str_s", "z" }));
+
+            IndexReader r = DirectoryReader.Open(writer, true);
+            writer.Dispose();
+
+            IndexSearcher searcher = NewSearcher(r);
+            searcher.Similarity = new DefaultSimilarity();
+
+            RunTest(searcher, true);
+            RunTest(searcher, false);
+
+            r.Dispose();
+            directory.Dispose();
+        }
+
+        private void RunTest(IndexSearcher searcher, bool reversed)
+        {
+            BooleanQuery newq = new BooleanQuery(false);
+            TermQuery query = new TermQuery(new Term("title", "ipod"));
+
+            newq.Add(query, Occur.SHOULD);
+            newq.Add(GetElevatedQuery(new string[] { "id", "a", "id", "x" }), Occur.SHOULD);
+
+            Sort sort = new Sort(new SortField("id", new ElevationComparerSource(Priority), false), new SortField(null, SortFieldType.SCORE, reversed)
+             );
+
+            TopDocsCollector<Entry> topCollector = TopFieldCollector.Create(sort, 50, false, true, true, true);
+            searcher.Search(newq, null, topCollector);
+
+            TopDocs topDocs = topCollector.GetTopDocs(0, 10);
+            int nDocsReturned = topDocs.ScoreDocs.Length;
+
+            Assert.AreEqual(4, nDocsReturned);
+
+            // 0 & 3 were elevated
+            Assert.AreEqual(0, topDocs.ScoreDocs[0].Doc);
+            Assert.AreEqual(3, topDocs.ScoreDocs[1].Doc);
+
+            if (reversed)
+            {
+                Assert.AreEqual(2, topDocs.ScoreDocs[2].Doc);
+                Assert.AreEqual(1, topDocs.ScoreDocs[3].Doc);
+            }
+            else
+            {
+                Assert.AreEqual(1, topDocs.ScoreDocs[2].Doc);
+                Assert.AreEqual(2, topDocs.ScoreDocs[3].Doc);
+            }
+
+            /*
+            for (int i = 0; i < nDocsReturned; i++) {
+             ScoreDoc scoreDoc = topDocs.ScoreDocs[i];
+             ids[i] = scoreDoc.Doc;
+             scores[i] = scoreDoc.Score;
+             documents[i] = searcher.Doc(ids[i]);
+             System.out.println("ids[i] = " + ids[i]);
+             System.out.println("documents[i] = " + documents[i]);
+             System.out.println("scores[i] = " + scores[i]);
+           }
+            */
+        }
+
+        private Query GetElevatedQuery(string[] vals)
+        {
+            BooleanQuery q = new BooleanQuery(false);
+            q.Boost = 0;
+            int max = (vals.Length / 2) + 5;
+            for (int i = 0; i < vals.Length - 1; i += 2)
+            {
+                q.Add(new TermQuery(new Term(vals[i], vals[i + 1])), Occur.SHOULD);
+                Priority[new BytesRef(vals[i + 1])] = Convert.ToInt32(max--);
+                // System.out.println(" pri doc=" + vals[i+1] + " pri=" + (1+max));
+            }
+            return q;
+        }
+
+        private Document Adoc(string[] vals)
+        {
+            Document doc = new Document();
+            for (int i = 0; i < vals.Length - 2; i += 2)
+            {
+                doc.Add(NewTextField(vals[i], vals[i + 1], Field.Store.YES));
+            }
+            return doc;
+        }
+    }
+
+    internal class ElevationComparerSource : FieldComparerSource
+    {
+        private readonly IDictionary<BytesRef, int?> Priority;
+
+        public ElevationComparerSource(IDictionary<BytesRef, int?> boosts)
+        {
+            this.Priority = boosts;
+        }
+
+        public override FieldComparer NewComparer(string fieldname, int numHits, int sortPos, bool reversed)
+        {
+            return new FieldComparerAnonymousInnerClassHelper(this, fieldname, numHits);
+        }
+
+        private class FieldComparerAnonymousInnerClassHelper : FieldComparer
+        {
+            private readonly ElevationComparerSource OuterInstance;
+
+            private string Fieldname;
+            private int NumHits;
+
+            public FieldComparerAnonymousInnerClassHelper(ElevationComparerSource outerInstance, string fieldname, int numHits)
+            {
+                this.OuterInstance = outerInstance;
+                this.Fieldname = fieldname;
+                this.NumHits = numHits;
+                values = new int[numHits];
+                tempBR = new BytesRef();
+            }
+
+            internal SortedDocValues idIndex;
+            private readonly int[] values;
+            private readonly BytesRef tempBR;
+            internal int bottomVal;
+
+            public override int CompareValues(object first, object second)
+            {
+                return ((IComparable) first).CompareTo(second);
+            }
+
+            public override int Compare(int slot1, int slot2)
+            {
+                return values[slot2] - values[slot1]; // values will be small enough that there is no overflow concern
+            }
+
+            public override void SetBottom(int slot)
+            {
+                bottomVal = values[slot];
+            }
+
+            public override void SetTopValue(object value)
+            {
+                throw new System.NotSupportedException();
+            }
+
+            private int DocVal(int doc)
+            {
+                int ord = idIndex.GetOrd(doc);
+                if (ord == -1)
+                {
+                    return 0;
+                }
+                else
+                {
+                    idIndex.LookupOrd(ord, tempBR);
+                    int? prio;
+                    if (OuterInstance.Priority.TryGetValue(tempBR, out prio))
+                    {
+                        return (int)prio;
+                    }
+                    return 0;
+                }
+            }
+
+            public override int CompareBottom(int doc)
+            {
+                return DocVal(doc) - bottomVal;
+            }
+
+            public override void Copy(int slot, int doc)
+            {
+                values[slot] = DocVal(doc);
+            }
+
+            public override FieldComparer SetNextReader(AtomicReaderContext context)
+            {
+                idIndex = FieldCache.DEFAULT.GetTermsIndex(context.AtomicReader, Fieldname);
+                return this;
+            }
+
+            // LUCENENET NOTE: This was value(int) in Lucene.
+            public override IComparable this[int slot]
+            {
+                get { return values[slot]; }
+            }
+
+            public override int CompareTop(int doc)
+            {
+                throw new System.NotSupportedException();
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestExplanations.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestExplanations.cs b/src/Lucene.Net.Tests/Search/TestExplanations.cs
new file mode 100644
index 0000000..ee2abb2
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestExplanations.cs
@@ -0,0 +1,270 @@
+using Lucene.Net.Documents;
+using Lucene.Net.Util;
+using NUnit.Framework;
+
+namespace Lucene.Net.Search
+{
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using SpanFirstQuery = Lucene.Net.Search.Spans.SpanFirstQuery;
+    using SpanNearQuery = Lucene.Net.Search.Spans.SpanNearQuery;
+    using SpanNotQuery = Lucene.Net.Search.Spans.SpanNotQuery;
+    using SpanOrQuery = Lucene.Net.Search.Spans.SpanOrQuery;
+    using SpanQuery = Lucene.Net.Search.Spans.SpanQuery;
+    using SpanTermQuery = Lucene.Net.Search.Spans.SpanTermQuery;
+    using Term = Lucene.Net.Index.Term;
+
+    /// <summary>
+    /// Tests primitive queries (ie: that rewrite to themselves) to
+    /// insure they match the expected set of docs, and that the score of each
+    /// match is equal to the value of the scores explanation.
+    ///
+    /// <p>
+    /// The assumption is that if all of the "primitive" queries work well,
+    /// then anything that rewrites to a primitive will work well also.
+    /// </p>
+    /// </summary>
+    /// <seealso cref= "Subclasses for actual tests" </seealso>
+    [TestFixture]
+    public class TestExplanations : LuceneTestCaseWithReducedFloatPrecision
+    {
+        protected internal static IndexSearcher Searcher;
+        protected internal static IndexReader Reader;
+        protected internal static Directory Directory;
+
+        public const string KEY = "KEY";
+
+        // boost on this field is the same as the iterator for the doc
+        public const string FIELD = "field";
+
+        // same contents, but no field boost
+        public const string ALTFIELD = "alt";
+
+        [OneTimeTearDown]
+        public static void AfterClassTestExplanations()
+        {
+            Searcher = null;
+            Reader.Dispose();
+            Reader = null;
+            Directory.Dispose();
+            Directory = null;
+        }
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Is non-static because NewIndexWriterConfig, NewTextField and
+        /// NewStringField are no longer static.
+        /// </summary>
+        [OneTimeSetUp]
+        public void BeforeClassTestExplanations()
+        {
+            Directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), Directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()));
+            for (int i = 0; i < DocFields.Length; i++)
+            {
+                Document doc = new Document();
+                doc.Add(NewStringField(KEY, "" + i, Field.Store.NO));
+                Field f = NewTextField(FIELD, DocFields[i], Field.Store.NO);
+                f.Boost = i;
+                doc.Add(f);
+                doc.Add(NewTextField(ALTFIELD, DocFields[i], Field.Store.NO));
+                writer.AddDocument(doc);
+            }
+            Reader = writer.Reader;
+            writer.Dispose();
+            Searcher = NewSearcher(Reader);
+        }
+
+        protected internal static readonly string[] DocFields = new string[] { "w1 w2 w3 w4 w5", "w1 w3 w2 w3 zz", "w1 xx w2 yy w3", "w1 w3 xx w2 yy w3 zz" };
+
+        /// <summary>
+        /// check the expDocNrs first, then check the query (and the explanations) </summary>
+        public virtual void Qtest(Query q, int[] expDocNrs)
+        {
+            CheckHits.CheckHitCollector(Random(), q, FIELD, Searcher, expDocNrs, Similarity);
+        }
+
+        /// <summary>
+        /// Tests a query using qtest after wrapping it with both optB and reqB </summary>
+        /// <seealso cref= #qtest </seealso>
+        /// <seealso cref= #reqB </seealso>
+        /// <seealso cref= #optB </seealso>
+        public virtual void Bqtest(Query q, int[] expDocNrs)
+        {
+            Qtest(ReqB(q), expDocNrs);
+            Qtest(OptB(q), expDocNrs);
+        }
+
+        /// <summary>
+        /// Convenience subclass of FieldCacheTermsFilter
+        /// </summary>
+        public class ItemizedFilter : FieldCacheTermsFilter
+        {
+            internal static string[] Int2str(int[] terms)
+            {
+                string[] @out = new string[terms.Length];
+                for (int i = 0; i < terms.Length; i++)
+                {
+                    @out[i] = "" + terms[i];
+                }
+                return @out;
+            }
+
+            public ItemizedFilter(string keyField, int[] keys)
+                : base(keyField, Int2str(keys))
+            {
+            }
+
+            public ItemizedFilter(int[] keys)
+                : base(KEY, Int2str(keys))
+            {
+            }
+        }
+
+        /// <summary>
+        /// helper for generating MultiPhraseQueries </summary>
+        public static Term[] Ta(string[] s)
+        {
+            Term[] t = new Term[s.Length];
+            for (int i = 0; i < s.Length; i++)
+            {
+                t[i] = new Term(FIELD, s[i]);
+            }
+            return t;
+        }
+
+        /// <summary>
+        /// MACRO for SpanTermQuery </summary>
+        public virtual SpanTermQuery St(string s)
+        {
+            return new SpanTermQuery(new Term(FIELD, s));
+        }
+
+        /// <summary>
+        /// MACRO for SpanNotQuery </summary>
+        public virtual SpanNotQuery Snot(SpanQuery i, SpanQuery e)
+        {
+            return new SpanNotQuery(i, e);
+        }
+
+        /// <summary>
+        /// MACRO for SpanOrQuery containing two SpanTerm queries </summary>
+        public virtual SpanOrQuery Sor(string s, string e)
+        {
+            return Sor(St(s), St(e));
+        }
+
+        /// <summary>
+        /// MACRO for SpanOrQuery containing two SpanQueries </summary>
+        public virtual SpanOrQuery Sor(SpanQuery s, SpanQuery e)
+        {
+            return new SpanOrQuery(s, e);
+        }
+
+        /// <summary>
+        /// MACRO for SpanOrQuery containing three SpanTerm queries </summary>
+        public virtual SpanOrQuery Sor(string s, string m, string e)
+        {
+            return Sor(St(s), St(m), St(e));
+        }
+
+        /// <summary>
+        /// MACRO for SpanOrQuery containing two SpanQueries </summary>
+        public virtual SpanOrQuery Sor(SpanQuery s, SpanQuery m, SpanQuery e)
+        {
+            return new SpanOrQuery(s, m, e);
+        }
+
+        /// <summary>
+        /// MACRO for SpanNearQuery containing two SpanTerm queries </summary>
+        public virtual SpanNearQuery Snear(string s, string e, int slop, bool inOrder)
+        {
+            return Snear(St(s), St(e), slop, inOrder);
+        }
+
+        /// <summary>
+        /// MACRO for SpanNearQuery containing two SpanQueries </summary>
+        public virtual SpanNearQuery Snear(SpanQuery s, SpanQuery e, int slop, bool inOrder)
+        {
+            return new SpanNearQuery(new SpanQuery[] { s, e }, slop, inOrder);
+        }
+
+        /// <summary>
+        /// MACRO for SpanNearQuery containing three SpanTerm queries </summary>
+        public virtual SpanNearQuery Snear(string s, string m, string e, int slop, bool inOrder)
+        {
+            return Snear(St(s), St(m), St(e), slop, inOrder);
+        }
+
+        /// <summary>
+        /// MACRO for SpanNearQuery containing three SpanQueries </summary>
+        public virtual SpanNearQuery Snear(SpanQuery s, SpanQuery m, SpanQuery e, int slop, bool inOrder)
+        {
+            return new SpanNearQuery(new SpanQuery[] { s, m, e }, slop, inOrder);
+        }
+
+        /// <summary>
+        /// MACRO for SpanFirst(SpanTermQuery) </summary>
+        public virtual SpanFirstQuery Sf(string s, int b)
+        {
+            return new SpanFirstQuery(St(s), b);
+        }
+
+        /// <summary>
+        /// MACRO: Wraps a Query in a BooleanQuery so that it is optional, along
+        /// with a second prohibited clause which will never match anything
+        /// </summary>
+        public virtual Query OptB(Query q)
+        {
+            BooleanQuery bq = new BooleanQuery(true);
+            bq.Add(q, Occur.SHOULD);
+            bq.Add(new TermQuery(new Term("NEVER", "MATCH")), Occur.MUST_NOT);
+            return bq;
+        }
+
+        /// <summary>
+        /// MACRO: Wraps a Query in a BooleanQuery so that it is required, along
+        /// with a second optional clause which will match everything
+        /// </summary>
+        public virtual Query ReqB(Query q)
+        {
+            BooleanQuery bq = new BooleanQuery(true);
+            bq.Add(q, Occur.MUST);
+            bq.Add(new TermQuery(new Term(FIELD, "w1")), Occur.SHOULD);
+            return bq;
+        }
+
+        /// <summary>
+        /// Placeholder: JUnit freaks if you don't have one test ... making
+        /// class abstract doesn't help
+        /// </summary>
+        // [Test] // LUCENENET NOTE: For now, we are overriding this test in every subclass to pull it into the right context for the subclass
+        public virtual void TestNoop()
+        {
+            /* NOOP */
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestFieldCache.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestFieldCache.cs b/src/Lucene.Net.Tests/Search/TestFieldCache.cs
new file mode 100644
index 0000000..d98b8b5
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestFieldCache.cs
@@ -0,0 +1,1058 @@
+using Lucene.Net.Randomized.Generators;
+using Lucene.Net.Support;
+using Lucene.Net.Util;
+using NUnit.Framework;
+using System;
+using System.Diagnostics;
+using System.Collections.Generic;
+using System.Globalization;
+using System.IO;
+using System.Threading;
+
+namespace Lucene.Net.Search
+{
+    /*
+    * Licensed to the Apache Software Foundation (ASF) under one or more
+    * contributor license agreements.  See the NOTICE file distributed with
+    * this work for additional information regarding copyright ownership.
+    * The ASF licenses this file to You under the Apache License, Version 2.0
+    * (the "License"); you may not use this file except in compliance with
+    * the License.  You may obtain a copy of the License at
+    *
+    *     http://www.apache.org/licenses/LICENSE-2.0
+    *
+    * Unless required by applicable law or agreed to in writing, software
+    * distributed under the License is distributed on an "AS IS" BASIS,
+    * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    * See the License for the specific language governing permissions and
+    * limitations under the License.
+    */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using BinaryDocValuesField = Lucene.Net.Documents.BinaryDocValuesField;
+    using Document = Lucene.Net.Documents.Document;
+    using Field = Lucene.Net.Documents.Field;
+    using Store = Lucene.Net.Documents.Field.Store;
+    using Int32Field = Lucene.Net.Documents.Int32Field;
+    using Int64Field = Lucene.Net.Documents.Int64Field;
+    using NumericDocValuesField = Lucene.Net.Documents.NumericDocValuesField;
+    using SortedDocValuesField = Lucene.Net.Documents.SortedDocValuesField;
+    using SortedSetDocValuesField = Lucene.Net.Documents.SortedSetDocValuesField;
+    using StoredField = Lucene.Net.Documents.StoredField;
+    using AtomicReader = Lucene.Net.Index.AtomicReader;
+    using BinaryDocValues = Lucene.Net.Index.BinaryDocValues;
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
+    using DocTermOrds = Lucene.Net.Index.DocTermOrds;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using IndexWriter = Lucene.Net.Index.IndexWriter;
+    using IndexWriterConfig = Lucene.Net.Index.IndexWriterConfig;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using SlowCompositeReaderWrapper = Lucene.Net.Index.SlowCompositeReaderWrapper;
+    using SortedDocValues = Lucene.Net.Index.SortedDocValues;
+    using SortedSetDocValues = Lucene.Net.Index.SortedSetDocValues;
+    using TermsEnum = Lucene.Net.Index.TermsEnum;
+    using Bytes = Lucene.Net.Search.FieldCache.Bytes;
+    using Doubles = Lucene.Net.Search.FieldCache.Doubles;
+    using Singles = Lucene.Net.Search.FieldCache.Singles;
+    using Int32s = Lucene.Net.Search.FieldCache.Int32s;
+    using Int64s = Lucene.Net.Search.FieldCache.Int64s;
+    using Int16s = Lucene.Net.Search.FieldCache.Int16s;
+    using Directory = Lucene.Net.Store.Directory;
+    using IBits = Lucene.Net.Util.IBits;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using IOUtils = Lucene.Net.Util.IOUtils;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using System.Text;
+
+    [TestFixture]
+    public class TestFieldCache : LuceneTestCase
+    {
+        private static AtomicReader Reader;
+        private static int NUM_DOCS;
+        private static int NUM_ORDS;
+        private static string[] UnicodeStrings;
+        private static BytesRef[][] MultiValued;
+        private static Directory Directory;
+
+        /// <summary>
+        /// LUCENENET specific. Ensure we have an infostream attached to the default FieldCache
+        /// when running the tests. In Java, this was done in the Core.Search.TestFieldCache.TestInfoStream() 
+        /// method (which polluted the state of these tests), but we need to make the tests self-contained 
+        /// so they can be run correctly regardless of order. Not setting the InfoStream skips an execution
+        /// path within these tests, so we should do it to make sure we test all of the code.
+        /// </summary>
+        public override void SetUp()
+        {
+            base.SetUp();
+            FieldCache.DEFAULT.InfoStream = new StringWriter();
+        }
+
+        /// <summary>
+        /// LUCENENET specific. See <see cref="SetUp()"/>. Dispose our InfoStream and set it to null
+        /// to avoid polluting the state of other tests.
+        /// </summary>
+        public override void TearDown()
+        {
+            FieldCache.DEFAULT.InfoStream.Dispose();
+            FieldCache.DEFAULT.InfoStream = null;
+            base.TearDown();
+        }
+
+
+        // LUCENENET: Changed to non-static because NewIndexWriterConfig is non-static
+        [OneTimeSetUp]
+        public void BeforeClass()
+        {
+            NUM_DOCS = AtLeast(500);
+            NUM_ORDS = AtLeast(2);
+            Directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), Directory, NewIndexWriterConfig(Random(), TEST_VERSION_CURRENT, new MockAnalyzer(Random()), Similarity, TimeZone).SetMergePolicy(NewLogMergePolicy()));
+            long theLong = long.MaxValue;
+            double theDouble = double.MaxValue;
+            sbyte theByte = sbyte.MaxValue;
+            short theShort = short.MaxValue;
+            int theInt = int.MaxValue;
+            float theFloat = float.MaxValue;
+            UnicodeStrings = new string[NUM_DOCS];
+            //MultiValued = new BytesRef[NUM_DOCS, NUM_ORDS];
+            MultiValued = RectangularArrays.ReturnRectangularBytesRefArray(NUM_DOCS, NUM_ORDS);
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: setUp");
+            }
+            for (int i = 0; i < NUM_DOCS; i++)
+            {
+                Document doc = new Document();
+                doc.Add(NewStringField("theLong", (theLong--).ToString(CultureInfo.InvariantCulture), Field.Store.NO));
+                doc.Add(NewStringField("theDouble", (theDouble--).ToString("R", CultureInfo.InvariantCulture), Field.Store.NO));
+                doc.Add(NewStringField("theByte", (theByte--).ToString(CultureInfo.InvariantCulture), Field.Store.NO));
+                doc.Add(NewStringField("theShort", (theShort--).ToString(CultureInfo.InvariantCulture), Field.Store.NO));
+                doc.Add(NewStringField("theInt", (theInt--).ToString(CultureInfo.InvariantCulture), Field.Store.NO));
+                doc.Add(NewStringField("theFloat", (theFloat--).ToString("R", CultureInfo.InvariantCulture), Field.Store.NO));
+                if (i % 2 == 0)
+                {
+                    doc.Add(NewStringField("sparse", (i).ToString(CultureInfo.InvariantCulture), Field.Store.NO));
+                }
+
+                if (i % 2 == 0)
+                {
+                    doc.Add(new Int32Field("numInt", i, Field.Store.NO));
+                }
+
+                // sometimes skip the field:
+                if (Random().Next(40) != 17)
+                {
+                    UnicodeStrings[i] = GenerateString(i);
+                    doc.Add(NewStringField("theRandomUnicodeString", UnicodeStrings[i], Field.Store.YES));
+                }
+
+                // sometimes skip the field:
+                if (Random().Next(10) != 8)
+                {
+                    for (int j = 0; j < NUM_ORDS; j++)
+                    {
+                        string newValue = GenerateString(i);
+                        MultiValued[i][j] = new BytesRef(newValue);
+                        doc.Add(NewStringField("theRandomUnicodeMultiValuedField", newValue, Field.Store.YES));
+                    }
+                    Array.Sort(MultiValued[i]);
+                }
+                writer.AddDocument(doc);
+            }
+            IndexReader r = writer.Reader;
+            Reader = SlowCompositeReaderWrapper.Wrap(r);
+            writer.Dispose();
+        }
+
+        [OneTimeTearDown]
+        public static void AfterClass()
+        {
+            Reader.Dispose();
+            Reader = null;
+            Directory.Dispose();
+            Directory = null;
+            UnicodeStrings = null;
+            MultiValued = null;
+        }
+
+        [Test]
+        public virtual void TestInfoStream()
+        {
+            try
+            {
+                IFieldCache cache = FieldCache.DEFAULT;
+                StringBuilder sb = new StringBuilder();
+                using (var bos = new StringWriter(sb))
+                {
+                    cache.InfoStream = bos;
+                    cache.GetDoubles(Reader, "theDouble", false);
+                    cache.GetSingles(Reader, "theDouble", false);
+                }
+                Assert.IsTrue(sb.ToString(/*IOUtils.UTF_8*/).IndexOf("WARNING") != -1);
+            }
+            finally
+            {
+                FieldCache.DEFAULT.PurgeAllCaches();
+            }
+        }
+
+        [Test]
+        public virtual void Test()
+        {
+            IFieldCache cache = FieldCache.DEFAULT;
+            FieldCache.Doubles doubles = cache.GetDoubles(Reader, "theDouble", Random().NextBoolean());
+            Assert.AreSame(doubles, cache.GetDoubles(Reader, "theDouble", Random().NextBoolean()), "Second request to cache return same array");
+            Assert.AreSame(doubles, cache.GetDoubles(Reader, "theDouble", FieldCache.DEFAULT_DOUBLE_PARSER, Random().NextBoolean()), "Second request with explicit parser return same array");
+            for (int i = 0; i < NUM_DOCS; i++)
+            {
+                Assert.IsTrue(doubles.Get(i) == (double.MaxValue - i), doubles.Get(i) + " does not equal: " + (double.MaxValue - i));
+            }
+
+            FieldCache.Int64s longs = cache.GetInt64s(Reader, "theLong", Random().NextBoolean());
+            Assert.AreSame(longs, cache.GetInt64s(Reader, "theLong", Random().NextBoolean()), "Second request to cache return same array");
+            Assert.AreSame(longs, cache.GetInt64s(Reader, "theLong", FieldCache.DEFAULT_INT64_PARSER, Random().NextBoolean()), "Second request with explicit parser return same array");
+            for (int i = 0; i < NUM_DOCS; i++)
+            {
+                Assert.IsTrue(longs.Get(i) == (long.MaxValue - i), longs.Get(i) + " does not equal: " + (long.MaxValue - i) + " i=" + i);
+            }
+
+#pragma warning disable 612, 618
+            FieldCache.Bytes bytes = cache.GetBytes(Reader, "theByte", Random().NextBoolean());
+            Assert.AreSame(bytes, cache.GetBytes(Reader, "theByte", Random().NextBoolean()), "Second request to cache return same array");
+            Assert.AreSame(bytes, cache.GetBytes(Reader, "theByte", FieldCache.DEFAULT_BYTE_PARSER, Random().NextBoolean()), "Second request with explicit parser return same array");
+            for (int i = 0; i < NUM_DOCS; i++)
+            {
+                Assert.IsTrue(bytes.Get(i) == (sbyte)(sbyte.MaxValue - i), bytes.Get(i) + " does not equal: " + (sbyte.MaxValue - i));
+            }
+
+            FieldCache.Int16s shorts = cache.GetInt16s(Reader, "theShort", Random().NextBoolean());
+            Assert.AreSame(shorts, cache.GetInt16s(Reader, "theShort", Random().NextBoolean()), "Second request to cache return same array");
+            Assert.AreSame(shorts, cache.GetInt16s(Reader, "theShort", FieldCache.DEFAULT_INT16_PARSER, Random().NextBoolean()), "Second request with explicit parser return same array");
+            for (int i = 0; i < NUM_DOCS; i++)
+            {
+                Assert.IsTrue(shorts.Get(i) == (short)(short.MaxValue - i), shorts.Get(i) + " does not equal: " + (short.MaxValue - i));
+            }
+#pragma warning restore 612, 618
+
+            FieldCache.Int32s ints = cache.GetInt32s(Reader, "theInt", Random().NextBoolean());
+            Assert.AreSame(ints, cache.GetInt32s(Reader, "theInt", Random().NextBoolean()), "Second request to cache return same array");
+            Assert.AreSame(ints, cache.GetInt32s(Reader, "theInt", FieldCache.DEFAULT_INT32_PARSER, Random().NextBoolean()), "Second request with explicit parser return same array");
+            for (int i = 0; i < NUM_DOCS; i++)
+            {
+                Assert.IsTrue(ints.Get(i) == (int.MaxValue - i), ints.Get(i) + " does not equal: " + (int.MaxValue - i));
+            }
+
+            FieldCache.Singles floats = cache.GetSingles(Reader, "theFloat", Random().NextBoolean());
+            Assert.AreSame(floats, cache.GetSingles(Reader, "theFloat", Random().NextBoolean()), "Second request to cache return same array");
+            Assert.AreSame(floats, cache.GetSingles(Reader, "theFloat", FieldCache.DEFAULT_SINGLE_PARSER, Random().NextBoolean()), "Second request with explicit parser return same array");
+            for (int i = 0; i < NUM_DOCS; i++)
+            {
+                Assert.IsTrue(floats.Get(i) == (float.MaxValue - i), floats.Get(i) + " does not equal: " + (float.MaxValue - i));
+            }
+
+            IBits docsWithField = cache.GetDocsWithField(Reader, "theLong");
+            Assert.AreSame(docsWithField, cache.GetDocsWithField(Reader, "theLong"), "Second request to cache return same array");
+            Assert.IsTrue(docsWithField is Bits.MatchAllBits, "docsWithField(theLong) must be class Bits.MatchAllBits");
+            Assert.IsTrue(docsWithField.Length == NUM_DOCS, "docsWithField(theLong) Size: " + docsWithField.Length + " is not: " + NUM_DOCS);
+            for (int i = 0; i < docsWithField.Length; i++)
+            {
+                Assert.IsTrue(docsWithField.Get(i));
+            }
+
+            docsWithField = cache.GetDocsWithField(Reader, "sparse");
+            Assert.AreSame(docsWithField, cache.GetDocsWithField(Reader, "sparse"), "Second request to cache return same array");
+            Assert.IsFalse(docsWithField is Bits.MatchAllBits, "docsWithField(sparse) must not be class Bits.MatchAllBits");
+            Assert.IsTrue(docsWithField.Length == NUM_DOCS, "docsWithField(sparse) Size: " + docsWithField.Length + " is not: " + NUM_DOCS);
+            for (int i = 0; i < docsWithField.Length; i++)
+            {
+                Assert.AreEqual(i % 2 == 0, docsWithField.Get(i));
+            }
+
+            // getTermsIndex
+            SortedDocValues termsIndex = cache.GetTermsIndex(Reader, "theRandomUnicodeString");
+            Assert.AreSame(termsIndex, cache.GetTermsIndex(Reader, "theRandomUnicodeString"), "Second request to cache return same array");
+            BytesRef br = new BytesRef();
+            for (int i = 0; i < NUM_DOCS; i++)
+            {
+                BytesRef term;
+                int ord = termsIndex.GetOrd(i);
+                if (ord == -1)
+                {
+                    term = null;
+                }
+                else
+                {
+                    termsIndex.LookupOrd(ord, br);
+                    term = br;
+                }
+                string s = term == null ? null : term.Utf8ToString();
+                Assert.IsTrue(UnicodeStrings[i] == null || UnicodeStrings[i].Equals(s), "for doc " + i + ": " + s + " does not equal: " + UnicodeStrings[i]);
+            }
+
+            int nTerms = termsIndex.ValueCount;
+
+            TermsEnum tenum = termsIndex.GetTermsEnum();
+            BytesRef val = new BytesRef();
+            for (int i = 0; i < nTerms; i++)
+            {
+                BytesRef val1 = tenum.Next();
+                termsIndex.LookupOrd(i, val);
+                // System.out.println("i="+i);
+                Assert.AreEqual(val, val1);
+            }
+
+            // seek the enum around (note this isn't a great test here)
+            int num = AtLeast(100);
+            for (int i = 0; i < num; i++)
+            {
+                int k = Random().Next(nTerms);
+                termsIndex.LookupOrd(k, val);
+                Assert.AreEqual(TermsEnum.SeekStatus.FOUND, tenum.SeekCeil(val));
+                Assert.AreEqual(val, tenum.Term);
+            }
+
+            for (int i = 0; i < nTerms; i++)
+            {
+                termsIndex.LookupOrd(i, val);
+                Assert.AreEqual(TermsEnum.SeekStatus.FOUND, tenum.SeekCeil(val));
+                Assert.AreEqual(val, tenum.Term);
+            }
+
+            // test bad field
+            termsIndex = cache.GetTermsIndex(Reader, "bogusfield");
+
+            // getTerms
+            BinaryDocValues terms = cache.GetTerms(Reader, "theRandomUnicodeString", true);
+            Assert.AreSame(terms, cache.GetTerms(Reader, "theRandomUnicodeString", true), "Second request to cache return same array");
+            IBits bits = cache.GetDocsWithField(Reader, "theRandomUnicodeString");
+            for (int i = 0; i < NUM_DOCS; i++)
+            {
+                terms.Get(i, br);
+                BytesRef term;
+                if (!bits.Get(i))
+                {
+                    term = null;
+                }
+                else
+                {
+                    term = br;
+                }
+                string s = term == null ? null : term.Utf8ToString();
+                Assert.IsTrue(UnicodeStrings[i] == null || UnicodeStrings[i].Equals(s), "for doc " + i + ": " + s + " does not equal: " + UnicodeStrings[i]);
+            }
+
+            // test bad field
+            terms = cache.GetTerms(Reader, "bogusfield", false);
+
+            // getDocTermOrds
+            SortedSetDocValues termOrds = cache.GetDocTermOrds(Reader, "theRandomUnicodeMultiValuedField");
+            int numEntries = cache.GetCacheEntries().Length;
+            // ask for it again, and check that we didnt create any additional entries:
+            termOrds = cache.GetDocTermOrds(Reader, "theRandomUnicodeMultiValuedField");
+            Assert.AreEqual(numEntries, cache.GetCacheEntries().Length);
+
+            for (int i = 0; i < NUM_DOCS; i++)
+            {
+                termOrds.SetDocument(i);
+                // this will remove identical terms. A DocTermOrds doesn't return duplicate ords for a docId
+                IList<BytesRef> values = new List<BytesRef>(new /*Linked*/HashSet<BytesRef>(Arrays.AsList(MultiValued[i])));
+                foreach (BytesRef v in values)
+                {
+                    if (v == null)
+                    {
+                        // why does this test use null values... instead of an empty list: confusing
+                        break;
+                    }
+                    long ord = termOrds.NextOrd();
+                    Debug.Assert(ord != SortedSetDocValues.NO_MORE_ORDS);
+                    BytesRef scratch = new BytesRef();
+                    termOrds.LookupOrd(ord, scratch);
+                    Assert.AreEqual(v, scratch);
+                }
+                Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, termOrds.NextOrd());
+            }
+
+            // test bad field
+            termOrds = cache.GetDocTermOrds(Reader, "bogusfield");
+            Assert.IsTrue(termOrds.ValueCount == 0);
+
+            FieldCache.DEFAULT.PurgeByCacheKey(Reader.CoreCacheKey);
+        }
+
+        [Test]
+        public virtual void TestEmptyIndex()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(500));
+            writer.Dispose();
+            IndexReader r = DirectoryReader.Open(dir);
+            AtomicReader reader = SlowCompositeReaderWrapper.Wrap(r);
+            FieldCache.DEFAULT.GetTerms(reader, "foobar", true);
+            FieldCache.DEFAULT.GetTermsIndex(reader, "foobar");
+            FieldCache.DEFAULT.PurgeByCacheKey(reader.CoreCacheKey);
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        private static string GenerateString(int i)
+        {
+            string s = null;
+            if (i > 0 && Random().Next(3) == 1)
+            {
+                // reuse past string -- try to find one that's not null
+                for (int iter = 0; iter < 10 && s == null; iter++)
+                {
+                    s = UnicodeStrings[Random().Next(i)];
+                }
+                if (s == null)
+                {
+                    s = TestUtil.RandomUnicodeString(Random());
+                }
+            }
+            else
+            {
+                s = TestUtil.RandomUnicodeString(Random());
+            }
+            return s;
+        }
+
+        [Test]
+        public virtual void TestDocsWithField()
+        {
+            IFieldCache cache = FieldCache.DEFAULT;
+            cache.PurgeAllCaches();
+            Assert.AreEqual(0, cache.GetCacheEntries().Length);
+            cache.GetDoubles(Reader, "theDouble", true);
+
+            // The double[] takes two slots (one w/ null parser, one
+            // w/ real parser), and docsWithField should also
+            // have been populated:
+            Assert.AreEqual(3, cache.GetCacheEntries().Length);
+            IBits bits = cache.GetDocsWithField(Reader, "theDouble");
+
+            // No new entries should appear:
+            Assert.AreEqual(3, cache.GetCacheEntries().Length);
+            Assert.IsTrue(bits is Bits.MatchAllBits);
+
+            Int32s ints = cache.GetInt32s(Reader, "sparse", true);
+            Assert.AreEqual(6, cache.GetCacheEntries().Length);
+            IBits docsWithField = cache.GetDocsWithField(Reader, "sparse");
+            Assert.AreEqual(6, cache.GetCacheEntries().Length);
+            for (int i = 0; i < docsWithField.Length; i++)
+            {
+                if (i % 2 == 0)
+                {
+                    Assert.IsTrue(docsWithField.Get(i));
+                    Assert.AreEqual(i, ints.Get(i));
+                }
+                else
+                {
+                    Assert.IsFalse(docsWithField.Get(i));
+                }
+            }
+
+            Int32s numInts = cache.GetInt32s(Reader, "numInt", Random().NextBoolean());
+            docsWithField = cache.GetDocsWithField(Reader, "numInt");
+            for (int i = 0; i < docsWithField.Length; i++)
+            {
+                if (i % 2 == 0)
+                {
+                    Assert.IsTrue(docsWithField.Get(i));
+                    Assert.AreEqual(i, numInts.Get(i));
+                }
+                else
+                {
+                    Assert.IsFalse(docsWithField.Get(i));
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestGetDocsWithFieldThreadSafety()
+        {
+            IFieldCache cache = FieldCache.DEFAULT;
+            cache.PurgeAllCaches();
+
+            int NUM_THREADS = 3;
+            ThreadClass[] threads = new ThreadClass[NUM_THREADS];
+            AtomicBoolean failed = new AtomicBoolean();
+            AtomicInt32 iters = new AtomicInt32();
+            int NUM_ITER = 200 * RANDOM_MULTIPLIER;
+            Barrier restart = new Barrier(NUM_THREADS, (barrier) => new RunnableAnonymousInnerClassHelper(this, cache, iters).Run());
+            for (int threadIDX = 0; threadIDX < NUM_THREADS; threadIDX++)
+            {
+                threads[threadIDX] = new ThreadAnonymousInnerClassHelper(this, cache, failed, iters, NUM_ITER, restart);
+                threads[threadIDX].Start();
+            }
+
+            for (int threadIDX = 0; threadIDX < NUM_THREADS; threadIDX++)
+            {
+                threads[threadIDX].Join();
+            }
+            Assert.IsFalse(failed.Get());
+        }
+
+        private class RunnableAnonymousInnerClassHelper : IThreadRunnable
+        {
+            private readonly TestFieldCache OuterInstance;
+
+            private IFieldCache Cache;
+            private AtomicInt32 Iters;
+
+            public RunnableAnonymousInnerClassHelper(TestFieldCache outerInstance, IFieldCache cache, AtomicInt32 iters)
+            {
+                this.OuterInstance = outerInstance;
+                this.Cache = cache;
+                this.Iters = iters;
+            }
+
+            public void Run()
+            {
+                Cache.PurgeAllCaches();
+                Iters.IncrementAndGet();
+            }
+        }
+
+        private class ThreadAnonymousInnerClassHelper : ThreadClass
+        {
+            private readonly TestFieldCache OuterInstance;
+
+            private IFieldCache Cache;
+            private AtomicBoolean Failed;
+            private AtomicInt32 Iters;
+            private int NUM_ITER;
+            private Barrier Restart;
+
+            public ThreadAnonymousInnerClassHelper(TestFieldCache outerInstance, IFieldCache cache, AtomicBoolean failed, AtomicInt32 iters, int NUM_ITER, Barrier restart)
+            {
+                this.OuterInstance = outerInstance;
+                this.Cache = cache;
+                this.Failed = failed;
+                this.Iters = iters;
+                this.NUM_ITER = NUM_ITER;
+                this.Restart = restart;
+            }
+
+            public override void Run()
+            {
+
+                try
+                {
+                    while (!Failed.Get())
+                    {
+                        int op = Random().Next(3);
+                        if (op == 0)
+                        {
+                            // Purge all caches & resume, once all
+                            // threads get here:
+                            Restart.SignalAndWait();
+                            if (Iters.Get() >= NUM_ITER)
+                            {
+                                break;
+                            }
+                        }
+                        else if (op == 1)
+                        {
+                            IBits docsWithField = Cache.GetDocsWithField(Reader, "sparse");
+                            for (int i = 0; i < docsWithField.Length; i++)
+                            {
+                                Assert.AreEqual(i % 2 == 0, docsWithField.Get(i));
+                            }
+                        }
+                        else
+                        {
+                            Int32s ints = Cache.GetInt32s(Reader, "sparse", true);
+                            IBits docsWithField = Cache.GetDocsWithField(Reader, "sparse");
+                            for (int i = 0; i < docsWithField.Length; i++)
+                            {
+                                if (i % 2 == 0)
+                                {
+                                    Assert.IsTrue(docsWithField.Get(i));
+                                    Assert.AreEqual(i, ints.Get(i));
+                                }
+                                else
+                                {
+                                    Assert.IsFalse(docsWithField.Get(i));
+                                }
+                            }
+                        }
+                    }
+                }
+                catch (Exception t)
+                {
+                    Failed.Set(true);
+                    throw new Exception(t.Message, t);
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestDocValuesIntegration()
+        {
+            AssumeTrue("3.x does not support docvalues", DefaultCodecSupportsDocValues());
+            Directory dir = NewDirectory();
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, null);
+            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwc);
+            Document doc = new Document();
+            doc.Add(new BinaryDocValuesField("binary", new BytesRef("binary value")));
+            doc.Add(new SortedDocValuesField("sorted", new BytesRef("sorted value")));
+            doc.Add(new NumericDocValuesField("numeric", 42));
+            if (DefaultCodecSupportsSortedSet())
+            {
+                doc.Add(new SortedSetDocValuesField("sortedset", new BytesRef("sortedset value1")));
+                doc.Add(new SortedSetDocValuesField("sortedset", new BytesRef("sortedset value2")));
+            }
+            iw.AddDocument(doc);
+            DirectoryReader ir = iw.Reader;
+            iw.Dispose();
+            AtomicReader ar = GetOnlySegmentReader(ir);
+
+            BytesRef scratch = new BytesRef();
+
+            // Binary type: can be retrieved via getTerms()
+            try
+            {
+                FieldCache.DEFAULT.GetInt32s(ar, "binary", false);
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (InvalidOperationException expected)
+#pragma warning restore 168
+            {
+            }
+
+            BinaryDocValues binary = FieldCache.DEFAULT.GetTerms(ar, "binary", true);
+            binary.Get(0, scratch);
+            Assert.AreEqual("binary value", scratch.Utf8ToString());
+
+            try
+            {
+                FieldCache.DEFAULT.GetTermsIndex(ar, "binary");
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (InvalidOperationException expected)
+#pragma warning restore 168
+            {
+            }
+
+            try
+            {
+                FieldCache.DEFAULT.GetDocTermOrds(ar, "binary");
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (InvalidOperationException expected)
+#pragma warning restore 168
+            {
+            }
+
+            try
+            {
+                new DocTermOrds(ar, null, "binary");
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (InvalidOperationException expected)
+#pragma warning restore 168
+            {
+            }
+
+            IBits bits = FieldCache.DEFAULT.GetDocsWithField(ar, "binary");
+            Assert.IsTrue(bits.Get(0));
+
+            // Sorted type: can be retrieved via getTerms(), getTermsIndex(), getDocTermOrds()
+            try
+            {
+                FieldCache.DEFAULT.GetInt32s(ar, "sorted", false);
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (InvalidOperationException expected)
+#pragma warning restore 168
+            {
+            }
+
+            try
+            {
+                new DocTermOrds(ar, null, "sorted");
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (InvalidOperationException expected)
+#pragma warning restore 168
+            {
+            }
+
+            binary = FieldCache.DEFAULT.GetTerms(ar, "sorted", true);
+            binary.Get(0, scratch);
+            Assert.AreEqual("sorted value", scratch.Utf8ToString());
+
+            SortedDocValues sorted = FieldCache.DEFAULT.GetTermsIndex(ar, "sorted");
+            Assert.AreEqual(0, sorted.GetOrd(0));
+            Assert.AreEqual(1, sorted.ValueCount);
+            sorted.Get(0, scratch);
+            Assert.AreEqual("sorted value", scratch.Utf8ToString());
+
+            SortedSetDocValues sortedSet = FieldCache.DEFAULT.GetDocTermOrds(ar, "sorted");
+            sortedSet.SetDocument(0);
+            Assert.AreEqual(0, sortedSet.NextOrd());
+            Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, sortedSet.NextOrd());
+            Assert.AreEqual(1, sortedSet.ValueCount);
+
+            bits = FieldCache.DEFAULT.GetDocsWithField(ar, "sorted");
+            Assert.IsTrue(bits.Get(0));
+
+            // Numeric type: can be retrieved via getInts() and so on
+            Int32s numeric = FieldCache.DEFAULT.GetInt32s(ar, "numeric", false);
+            Assert.AreEqual(42, numeric.Get(0));
+
+            try
+            {
+                FieldCache.DEFAULT.GetTerms(ar, "numeric", true);
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (InvalidOperationException expected)
+#pragma warning restore 168
+            {
+            }
+
+            try
+            {
+                FieldCache.DEFAULT.GetTermsIndex(ar, "numeric");
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (InvalidOperationException expected)
+#pragma warning restore 168
+            {
+            }
+
+            try
+            {
+                FieldCache.DEFAULT.GetDocTermOrds(ar, "numeric");
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (InvalidOperationException expected)
+#pragma warning restore 168
+            {
+            }
+
+            try
+            {
+                new DocTermOrds(ar, null, "numeric");
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (InvalidOperationException expected)
+#pragma warning restore 168
+            {
+            }
+
+            bits = FieldCache.DEFAULT.GetDocsWithField(ar, "numeric");
+            Assert.IsTrue(bits.Get(0));
+
+            // SortedSet type: can be retrieved via getDocTermOrds() 
+            if (DefaultCodecSupportsSortedSet())
+            {
+                try
+                {
+                    FieldCache.DEFAULT.GetInt32s(ar, "sortedset", false);
+                    Assert.Fail();
+                }
+#pragma warning disable 168
+                catch (InvalidOperationException expected)
+#pragma warning restore 168
+                {
+                }
+
+                try
+                {
+                    FieldCache.DEFAULT.GetTerms(ar, "sortedset", true);
+                    Assert.Fail();
+                }
+#pragma warning disable 168
+                catch (InvalidOperationException expected)
+#pragma warning restore 168
+                {
+                }
+
+                try
+                {
+                    FieldCache.DEFAULT.GetTermsIndex(ar, "sortedset");
+                    Assert.Fail();
+                }
+#pragma warning disable 168
+                catch (InvalidOperationException expected)
+#pragma warning restore 168
+                {
+                }
+
+                try
+                {
+                    new DocTermOrds(ar, null, "sortedset");
+                    Assert.Fail();
+                }
+#pragma warning disable 168
+                catch (InvalidOperationException expected)
+#pragma warning restore 168
+                {
+                }
+
+                sortedSet = FieldCache.DEFAULT.GetDocTermOrds(ar, "sortedset");
+                sortedSet.SetDocument(0);
+                Assert.AreEqual(0, sortedSet.NextOrd());
+                Assert.AreEqual(1, sortedSet.NextOrd());
+                Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, sortedSet.NextOrd());
+                Assert.AreEqual(2, sortedSet.ValueCount);
+
+                bits = FieldCache.DEFAULT.GetDocsWithField(ar, "sortedset");
+                Assert.IsTrue(bits.Get(0));
+            }
+
+            ir.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestNonexistantFields()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            Document doc = new Document();
+            iw.AddDocument(doc);
+            DirectoryReader ir = iw.Reader;
+            iw.Dispose();
+
+            AtomicReader ar = GetOnlySegmentReader(ir);
+
+            IFieldCache cache = FieldCache.DEFAULT;
+            cache.PurgeAllCaches();
+            Assert.AreEqual(0, cache.GetCacheEntries().Length);
+
+#pragma warning disable 612, 618
+            Bytes bytes = cache.GetBytes(ar, "bogusbytes", true);
+            Assert.AreEqual(0, bytes.Get(0));
+
+            Int16s shorts = cache.GetInt16s(ar, "bogusshorts", true);
+            Assert.AreEqual(0, shorts.Get(0));
+#pragma warning restore 612, 618
+
+            Int32s ints = cache.GetInt32s(ar, "bogusints", true);
+            Assert.AreEqual(0, ints.Get(0));
+
+            Int64s longs = cache.GetInt64s(ar, "boguslongs", true);
+            Assert.AreEqual(0, longs.Get(0));
+
+            Singles floats = cache.GetSingles(ar, "bogusfloats", true);
+            Assert.AreEqual(0, floats.Get(0), 0.0f);
+
+            Doubles doubles = cache.GetDoubles(ar, "bogusdoubles", true);
+            Assert.AreEqual(0, doubles.Get(0), 0.0D);
+
+            BytesRef scratch = new BytesRef();
+            BinaryDocValues binaries = cache.GetTerms(ar, "bogusterms", true);
+            binaries.Get(0, scratch);
+            Assert.AreEqual(0, scratch.Length);
+
+            SortedDocValues sorted = cache.GetTermsIndex(ar, "bogustermsindex");
+            Assert.AreEqual(-1, sorted.GetOrd(0));
+            sorted.Get(0, scratch);
+            Assert.AreEqual(0, scratch.Length);
+
+            SortedSetDocValues sortedSet = cache.GetDocTermOrds(ar, "bogusmultivalued");
+            sortedSet.SetDocument(0);
+            Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, sortedSet.NextOrd());
+
+            IBits bits = cache.GetDocsWithField(ar, "bogusbits");
+            Assert.IsFalse(bits.Get(0));
+
+            // check that we cached nothing
+            Assert.AreEqual(0, cache.GetCacheEntries().Length);
+            ir.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestNonIndexedFields()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            Document doc = new Document();
+            doc.Add(new StoredField("bogusbytes", "bogus"));
+            doc.Add(new StoredField("bogusshorts", "bogus"));
+            doc.Add(new StoredField("bogusints", "bogus"));
+            doc.Add(new StoredField("boguslongs", "bogus"));
+            doc.Add(new StoredField("bogusfloats", "bogus"));
+            doc.Add(new StoredField("bogusdoubles", "bogus"));
+            doc.Add(new StoredField("bogusterms", "bogus"));
+            doc.Add(new StoredField("bogustermsindex", "bogus"));
+            doc.Add(new StoredField("bogusmultivalued", "bogus"));
+            doc.Add(new StoredField("bogusbits", "bogus"));
+            iw.AddDocument(doc);
+            DirectoryReader ir = iw.Reader;
+            iw.Dispose();
+
+            AtomicReader ar = GetOnlySegmentReader(ir);
+
+            IFieldCache cache = FieldCache.DEFAULT;
+            cache.PurgeAllCaches();
+            Assert.AreEqual(0, cache.GetCacheEntries().Length);
+
+#pragma warning disable 612, 618
+            Bytes bytes = cache.GetBytes(ar, "bogusbytes", true);
+            Assert.AreEqual(0, bytes.Get(0));
+
+            Int16s shorts = cache.GetInt16s(ar, "bogusshorts", true);
+            Assert.AreEqual(0, shorts.Get(0));
+#pragma warning restore 612, 618
+
+            Int32s ints = cache.GetInt32s(ar, "bogusints", true);
+            Assert.AreEqual(0, ints.Get(0));
+
+            Int64s longs = cache.GetInt64s(ar, "boguslongs", true);
+            Assert.AreEqual(0, longs.Get(0));
+
+            Singles floats = cache.GetSingles(ar, "bogusfloats", true);
+            Assert.AreEqual(0, floats.Get(0), 0.0f);
+
+            Doubles doubles = cache.GetDoubles(ar, "bogusdoubles", true);
+            Assert.AreEqual(0, doubles.Get(0), 0.0D);
+
+            BytesRef scratch = new BytesRef();
+            BinaryDocValues binaries = cache.GetTerms(ar, "bogusterms", true);
+            binaries.Get(0, scratch);
+            Assert.AreEqual(0, scratch.Length);
+
+            SortedDocValues sorted = cache.GetTermsIndex(ar, "bogustermsindex");
+            Assert.AreEqual(-1, sorted.GetOrd(0));
+            sorted.Get(0, scratch);
+            Assert.AreEqual(0, scratch.Length);
+
+            SortedSetDocValues sortedSet = cache.GetDocTermOrds(ar, "bogusmultivalued");
+            sortedSet.SetDocument(0);
+            Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, sortedSet.NextOrd());
+
+            IBits bits = cache.GetDocsWithField(ar, "bogusbits");
+            Assert.IsFalse(bits.Get(0));
+
+            // check that we cached nothing
+            Assert.AreEqual(0, cache.GetCacheEntries().Length);
+            ir.Dispose();
+            dir.Dispose();
+        }
+
+        // Make sure that the use of GrowableWriter doesn't prevent from using the full long range
+        [Test]
+        public virtual void TestLongFieldCache()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig cfg = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            cfg.SetMergePolicy(NewLogMergePolicy());
+            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, cfg);
+            Document doc = new Document();
+            Int64Field field = new Int64Field("f", 0L, Field.Store.YES);
+            doc.Add(field);
+            long[] values = new long[TestUtil.NextInt(Random(), 1, 10)];
+            for (int i = 0; i < values.Length; ++i)
+            {
+                long v;
+                switch (Random().Next(10))
+                {
+                    case 0:
+                        v = long.MinValue;
+                        break;
+                    case 1:
+                        v = 0;
+                        break;
+                    case 2:
+                        v = long.MaxValue;
+                        break;
+                    default:
+                        v = TestUtil.NextLong(Random(), -10, 10);
+                        break;
+                }
+                values[i] = v;
+                if (v == 0 && Random().NextBoolean())
+                {
+                    // missing
+                    iw.AddDocument(new Document());
+                }
+                else
+                {
+                    field.SetInt64Value(v);
+                    iw.AddDocument(doc);
+                }
+            }
+            iw.ForceMerge(1);
+            DirectoryReader reader = iw.Reader;
+            Int64s longs = FieldCache.DEFAULT.GetInt64s(GetOnlySegmentReader(reader), "f", false);
+            for (int i = 0; i < values.Length; ++i)
+            {
+                Assert.AreEqual(values[i], longs.Get(i));
+            }
+            reader.Dispose();
+            iw.Dispose();
+            dir.Dispose();
+        }
+
+        // Make sure that the use of GrowableWriter doesn't prevent from using the full int range
+        [Test]
+        public virtual void TestIntFieldCache()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig cfg = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            cfg.SetMergePolicy(NewLogMergePolicy());
+            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, cfg);
+            Document doc = new Document();
+            Int32Field field = new Int32Field("f", 0, Field.Store.YES);
+            doc.Add(field);
+            int[] values = new int[TestUtil.NextInt(Random(), 1, 10)];
+            for (int i = 0; i < values.Length; ++i)
+            {
+                int v;
+                switch (Random().Next(10))
+                {
+                    case 0:
+                        v = int.MinValue;
+                        break;
+                    case 1:
+                        v = 0;
+                        break;
+                    case 2:
+                        v = int.MaxValue;
+                        break;
+                    default:
+                        v = TestUtil.NextInt(Random(), -10, 10);
+                        break;
+                }
+                values[i] = v;
+                if (v == 0 && Random().NextBoolean())
+                {
+                    // missing
+                    iw.AddDocument(new Document());
+                }
+                else
+                {
+                    field.SetInt32Value(v);
+                    iw.AddDocument(doc);
+                }
+            }
+            iw.ForceMerge(1);
+            DirectoryReader reader = iw.Reader;
+            Int32s ints = FieldCache.DEFAULT.GetInt32s(GetOnlySegmentReader(reader), "f", false);
+            for (int i = 0; i < values.Length; ++i)
+            {
+                Assert.AreEqual(values[i], ints.Get(i));
+            }
+            reader.Dispose();
+            iw.Dispose();
+            dir.Dispose();
+        }
+
+    }
+
+}
\ No newline at end of file


[70/72] [abbrv] lucenenet git commit: Lucene.Net.TestFramework: Refactored LuceneTestCase and test codecs back to their original implementation of using a static variable to determine if impersonation is active.

Posted by ni...@apache.org.
Lucene.Net.TestFramework: Refactored LuceneTestCase and test codecs back to their original implementation of using a static variable to determine if impersonation is active.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/4b0fa137
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/4b0fa137
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/4b0fa137

Branch: refs/heads/api-work
Commit: 4b0fa1374d9c30974d9ec68c29aeb2b4fad84e19
Parents: 3437f3b
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Mon Feb 27 06:10:19 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Feb 27 06:18:03 2017 +0700

----------------------------------------------------------------------
 .../Codecs/Lucene3x/PreFlexRWCodec.cs           | 60 ++++++--------------
 .../Codecs/Lucene40/Lucene40RWCodec.cs          | 46 ++-------------
 .../Lucene40/Lucene40RWDocValuesFormat.cs       | 25 +-------
 .../Codecs/Lucene40/Lucene40RWNormsFormat.cs    | 25 +-------
 .../Codecs/Lucene40/Lucene40RWPostingsFormat.cs | 25 +-------
 .../Codecs/Lucene41/Lucene41RWCodec.cs          | 55 ++++--------------
 .../Codecs/Lucene42/Lucene42RWCodec.cs          | 38 +------------
 .../Lucene42/Lucene42RWDocValuesFormat.cs       | 25 +-------
 .../Codecs/Lucene45/Lucene45RWCodec.cs          | 36 +-----------
 .../Util/LuceneTestCase.cs                      | 30 ++++------
 .../Lucene3x/TestLucene3xPostingsFormat.cs      | 14 +++--
 .../Lucene3x/TestLucene3xStoredFieldsFormat.cs  | 16 +++---
 .../Lucene3x/TestLucene3xTermVectorsFormat.cs   |  3 +-
 .../Codecs/Lucene3x/TestSurrogates.cs           |  8 +--
 .../Codecs/Lucene3x/TestTermInfosReaderIndex.cs |  9 +--
 .../Lucene40/TestLucene40DocValuesFormat.cs     | 12 ++--
 .../Lucene40/TestLucene40PostingsFormat.cs      | 12 ++--
 .../Lucene40/TestLucene40PostingsReader.cs      |  7 +--
 .../Lucene40/TestLucene40StoredFieldsFormat.cs  | 10 +---
 .../Lucene40/TestLucene40TermVectorsFormat.cs   | 10 +---
 .../Codecs/Lucene40/TestReuseDocsEnum.cs        | 13 ++---
 .../Lucene41/TestLucene41StoredFieldsFormat.cs  |  9 +--
 .../Lucene42/TestLucene42DocValuesFormat.cs     | 12 ++--
 .../Index/TestBackwardsCompatibility.cs         |  2 +-
 .../Index/TestBackwardsCompatibility3x.cs       | 14 ++---
 .../Index/TestBinaryDocValuesUpdates.cs         | 24 +++-----
 src/Lucene.Net.Tests/Index/TestCodecs.cs        |  5 +-
 .../Index/TestNumericDocValuesUpdates.cs        |  5 +-
 28 files changed, 129 insertions(+), 421 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/4b0fa137/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWCodec.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWCodec.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWCodec.cs
index 4d265d9..642d33f 100644
--- a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWCodec.cs
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWCodec.cs
@@ -26,44 +26,20 @@ namespace Lucene.Net.Codecs.Lucene3x
 #pragma warning disable 612, 618
     public class PreFlexRWCodec : Lucene3xCodec
     {
-        private readonly PostingsFormat Postings = new PreFlexRWPostingsFormat();
-        private readonly Lucene3xNormsFormat Norms = new PreFlexRWNormsFormat();
-        private readonly FieldInfosFormat FieldInfos = new PreFlexRWFieldInfosFormat();
-        private readonly TermVectorsFormat TermVectors = new PreFlexRWTermVectorsFormat();
-        private readonly SegmentInfoFormat SegmentInfos = new PreFlexRWSegmentInfoFormat();
-        private readonly StoredFieldsFormat StoredFields = new PreFlexRWStoredFieldsFormat();
-        private readonly bool _oldFormatImpersonationIsActive;
-
-        /// <summary>
-        /// LUCENENET specific
-        /// Creates the codec with OldFormatImpersonationIsActive = true.
-        /// </summary>
-        /// <remarks>
-        /// Added so that SPIClassIterator can locate this Codec.  The iterator
-        /// only recognises classes that have empty constructors.
-        /// </remarks>
-        public PreFlexRWCodec()
-            : this(true)
-        { }
-
-        /// <summary>
-        /// </summary>
-        /// <param name="oldFormatImpersonationIsActive">
-        /// LUCENENET specific
-        /// Added to remove dependency on then-static <see cref="LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/>
-        /// </param>
-        public PreFlexRWCodec(bool oldFormatImpersonationIsActive) : base()
-        {
-            _oldFormatImpersonationIsActive = oldFormatImpersonationIsActive;
-        }
+        private readonly PostingsFormat postings = new PreFlexRWPostingsFormat();
+        private readonly Lucene3xNormsFormat norms = new PreFlexRWNormsFormat();
+        private readonly FieldInfosFormat fieldInfos = new PreFlexRWFieldInfosFormat();
+        private readonly TermVectorsFormat termVectors = new PreFlexRWTermVectorsFormat();
+        private readonly SegmentInfoFormat segmentInfos = new PreFlexRWSegmentInfoFormat();
+        private readonly StoredFieldsFormat storedFields = new PreFlexRWStoredFieldsFormat();
 
         public override PostingsFormat PostingsFormat
         {
             get
             {
-                if (_oldFormatImpersonationIsActive)
+                if (LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE)
                 {
-                    return Postings;
+                    return postings;
                 }
                 else
                 {
@@ -76,9 +52,9 @@ namespace Lucene.Net.Codecs.Lucene3x
         {
             get
             {
-                if (_oldFormatImpersonationIsActive)
+                if (LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE)
                 {
-                    return Norms;
+                    return norms;
                 }
                 else
                 {
@@ -91,9 +67,9 @@ namespace Lucene.Net.Codecs.Lucene3x
         {
             get
             {
-                if (_oldFormatImpersonationIsActive)
+                if (LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE)
                 {
-                    return SegmentInfos;
+                    return segmentInfos;
                 }
                 else
                 {
@@ -106,9 +82,9 @@ namespace Lucene.Net.Codecs.Lucene3x
         {
             get
             {
-                if (_oldFormatImpersonationIsActive)
+                if (LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE)
                 {
-                    return FieldInfos;
+                    return fieldInfos;
                 }
                 else
                 {
@@ -121,9 +97,9 @@ namespace Lucene.Net.Codecs.Lucene3x
         {
             get
             {
-                if (_oldFormatImpersonationIsActive)
+                if (LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE)
                 {
-                    return TermVectors;
+                    return termVectors;
                 }
                 else
                 {
@@ -136,9 +112,9 @@ namespace Lucene.Net.Codecs.Lucene3x
         {
             get
             {
-                if (_oldFormatImpersonationIsActive)
+                if (LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE)
                 {
-                    return StoredFields;
+                    return storedFields;
                 }
                 else
                 {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/4b0fa137/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWCodec.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWCodec.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWCodec.cs
index 79fbb42..7c69b61 100644
--- a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWCodec.cs
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWCodec.cs
@@ -24,49 +24,15 @@ namespace Lucene.Net.Codecs.Lucene40
 #pragma warning disable 612, 618
     public sealed class Lucene40RWCodec : Lucene40Codec
     {
-        private readonly FieldInfosFormat fieldInfos;
-
-        /// <summary>
-        /// LUCENENET specific
-        /// Creates the codec with OldFormatImpersonationIsActive = true.
-        /// </summary>
-        /// <remarks>
-        /// Added so that SPIClassIterator can locate this Codec.  The iterator
-        /// only recognises classes that have empty constructors.
-        /// </remarks>
-        public Lucene40RWCodec()
-            : this(true)
-        { }
-
-        /// <param name="oldFormatImpersonationIsActive">
-        /// LUCENENET specific
-        /// Added to remove dependency on then-static <see cref="LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/> 
-        /// </param>
-        public Lucene40RWCodec(bool oldFormatImpersonationIsActive) : base()
-        {
-            fieldInfos = new Lucene40FieldInfosFormatAnonymousInnerClassHelper(oldFormatImpersonationIsActive);
-            DocValues = new Lucene40RWDocValuesFormat(oldFormatImpersonationIsActive);
-            Norms = new Lucene40RWNormsFormat(oldFormatImpersonationIsActive);
-        }
+        private readonly FieldInfosFormat fieldInfos = new Lucene40FieldInfosFormatAnonymousInnerClassHelper();
 
         private class Lucene40FieldInfosFormatAnonymousInnerClassHelper : Lucene40FieldInfosFormat
         {
-            private readonly bool _oldFormatImpersonationIsActive;
-
-            /// <param name="oldFormatImpersonationIsActive">
-            /// LUCENENET specific
-            /// Added to remove dependency on then-static <see cref="LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/> 
-            /// </param>
-            public Lucene40FieldInfosFormatAnonymousInnerClassHelper(bool oldFormatImpersonationIsActive) : base()
-            {
-                _oldFormatImpersonationIsActive = oldFormatImpersonationIsActive;
-            }
-
             public override FieldInfosWriter FieldInfosWriter
             {
                 get
                 {
-                    if (!_oldFormatImpersonationIsActive)
+                    if (!LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE)
                     {
                         return base.FieldInfosWriter;
                     }
@@ -78,8 +44,8 @@ namespace Lucene.Net.Codecs.Lucene40
             }
         }
 
-        private readonly DocValuesFormat DocValues;
-        private readonly NormsFormat Norms;
+        private readonly DocValuesFormat docValues = new Lucene40RWDocValuesFormat();
+        private readonly NormsFormat norms = new Lucene40RWNormsFormat();
 
         public override FieldInfosFormat FieldInfosFormat
         {
@@ -88,12 +54,12 @@ namespace Lucene.Net.Codecs.Lucene40
 
         public override DocValuesFormat DocValuesFormat
         {
-            get { return DocValues; }
+            get { return docValues; }
         }
 
         public override NormsFormat NormsFormat
         {
-            get { return Norms; }
+            get { return norms; }
         }
     }
 #pragma warning restore 612, 618

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/4b0fa137/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWDocValuesFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWDocValuesFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWDocValuesFormat.cs
index 2281475..20c641a 100644
--- a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWDocValuesFormat.cs
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWDocValuesFormat.cs
@@ -26,32 +26,9 @@ namespace Lucene.Net.Codecs.Lucene40
 #pragma warning disable 612, 618
     public class Lucene40RWDocValuesFormat : Lucene40DocValuesFormat
     {
-        private readonly bool _oldFormatImpersonationIsActive;
-
-        /// <summary>
-        /// LUCENENET specific
-        /// Creates the codec with OldFormatImpersonationIsActive = true.
-        /// </summary>
-        /// <remarks>
-        /// Added so that SPIClassIterator can locate this Codec.  The iterator
-        /// only recognises classes that have empty constructors.
-        /// </remarks>
-        public Lucene40RWDocValuesFormat()
-            : this(true)
-        { }
-
-        /// <param name="oldFormatImpersonationIsActive">
-        /// LUCENENET specific
-        /// Added to remove dependency on then-static <see cref="LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/> 
-        /// </param>
-        public Lucene40RWDocValuesFormat(bool oldFormatImpersonationIsActive) : base()
-        {
-            _oldFormatImpersonationIsActive = oldFormatImpersonationIsActive;
-        }
-
         public override DocValuesConsumer FieldsConsumer(SegmentWriteState state)
         {
-            if (!_oldFormatImpersonationIsActive)
+            if (!LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE)
             {
                 return base.FieldsConsumer(state);
             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/4b0fa137/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWNormsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWNormsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWNormsFormat.cs
index 0830c86..12e5cf6 100644
--- a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWNormsFormat.cs
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWNormsFormat.cs
@@ -26,32 +26,9 @@ namespace Lucene.Net.Codecs.Lucene40
 #pragma warning disable 612, 618
     public class Lucene40RWNormsFormat : Lucene40NormsFormat
     {
-        private readonly bool _oldFormatImpersonationIsActive;
-
-        /// <summary>
-        /// LUCENENET specific
-        /// Creates the codec with OldFormatImpersonationIsActive = true.
-        /// </summary>
-        /// <remarks>
-        /// Added so that SPIClassIterator can locate this Codec.  The iterator
-        /// only recognises classes that have empty constructors.
-        /// </remarks>
-        public Lucene40RWNormsFormat()
-            : this(true)
-        { }
-
-        /// <param name="oldFormatImpersonationIsActive">
-        /// LUCENENET specific
-        /// Added to remove dependency on then-static <see cref="LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/> 
-        /// </param>
-        public Lucene40RWNormsFormat(bool oldFormatImpersonationIsActive) : base()
-        {
-            _oldFormatImpersonationIsActive = oldFormatImpersonationIsActive;
-        }
-
         public override DocValuesConsumer NormsConsumer(SegmentWriteState state)
         {
-            if (!_oldFormatImpersonationIsActive)
+            if (!LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE)
             {
                 return base.NormsConsumer(state);
             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/4b0fa137/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWPostingsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWPostingsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWPostingsFormat.cs
index 7a2c9cf..aff3425 100644
--- a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWPostingsFormat.cs
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWPostingsFormat.cs
@@ -26,32 +26,9 @@ namespace Lucene.Net.Codecs.Lucene40
 #pragma warning disable 612, 618
     public class Lucene40RWPostingsFormat : Lucene40PostingsFormat
     {
-        private readonly bool _oldFormatImpersonationIsActive;
-
-        /// <summary>
-        /// LUCENENET specific
-        /// Creates the codec with OldFormatImpersonationIsActive = true.
-        /// </summary>
-        /// <remarks>
-        /// Added so that SPIClassIterator can locate this Codec.  The iterator
-        /// only recognises classes that have empty constructors.
-        /// </remarks>
-        public Lucene40RWPostingsFormat()
-            : this(true)
-        { }
-
-        /// <param name="oldFormatImpersonationIsActive">
-        /// LUCENENET specific
-        /// Added to remove dependency on then-static <see cref="LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/> 
-        /// </param>
-        public Lucene40RWPostingsFormat(bool oldFormatImpersonationIsActive) : base()
-        {
-            _oldFormatImpersonationIsActive = oldFormatImpersonationIsActive;
-        }
-
         public override FieldsConsumer FieldsConsumer(SegmentWriteState state)
         {
-            if (!_oldFormatImpersonationIsActive)
+            if (!LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE)
             {
                 return base.FieldsConsumer(state);
             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/4b0fa137/src/Lucene.Net.TestFramework/Codecs/Lucene41/Lucene41RWCodec.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene41/Lucene41RWCodec.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene41/Lucene41RWCodec.cs
index 2c6edef..a51a514 100644
--- a/src/Lucene.Net.TestFramework/Codecs/Lucene41/Lucene41RWCodec.cs
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene41/Lucene41RWCodec.cs
@@ -29,55 +29,16 @@ namespace Lucene.Net.Codecs.Lucene41
 #pragma warning disable 612, 618
     public class Lucene41RWCodec : Lucene41Codec
     {
-        private readonly StoredFieldsFormat FieldsFormat = new Lucene41StoredFieldsFormat();
-        private readonly FieldInfosFormat fieldInfos;
-        private readonly DocValuesFormat DocValues;
-        private readonly NormsFormat Norms;
-        private readonly bool _oldFormatImpersonationIsActive;
-
-        /// <summary>
-        /// LUCENENET specific
-        /// Creates the codec with OldFormatImpersonationIsActive = true.
-        /// </summary>
-        /// <remarks>
-        /// Added so that SPIClassIterator can locate this Codec.  The iterator
-        /// only recognises classes that have empty constructors.
-        /// </remarks>
-        public Lucene41RWCodec()
-            : this(true)
-        { }
-
-        /// <param name="oldFormatImpersonationIsActive">
-        /// LUCENENET specific
-        /// Added to remove dependency on then-static <see cref="LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/> 
-        /// </param>
-        public Lucene41RWCodec(bool oldFormatImpersonationIsActive) : base()
-        {
-            _oldFormatImpersonationIsActive = oldFormatImpersonationIsActive;
-
-            Norms = new Lucene40RWNormsFormat(oldFormatImpersonationIsActive);
-            fieldInfos = new Lucene40FieldInfosFormatAnonymousInnerClassHelper(oldFormatImpersonationIsActive);
-            DocValues = new Lucene40RWDocValuesFormat(oldFormatImpersonationIsActive);
-        }
+        private readonly StoredFieldsFormat fieldsFormat = new Lucene41StoredFieldsFormat();
+        private readonly FieldInfosFormat fieldInfos = new Lucene40FieldInfosFormatAnonymousInnerClassHelper();
 
         private class Lucene40FieldInfosFormatAnonymousInnerClassHelper : Lucene40FieldInfosFormat
         {
-            private readonly bool _oldFormatImpersonationIsActive;
-
-            /// <param name="oldFormatImpersonationIsActive">
-            /// LUCENENET specific
-            /// Added to remove dependency on then-static <see cref="LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/> 
-            /// </param>
-            public Lucene40FieldInfosFormatAnonymousInnerClassHelper(bool oldFormatImpersonationIsActive) : base()
-            {
-                _oldFormatImpersonationIsActive = oldFormatImpersonationIsActive;
-            }
-
             public override FieldInfosWriter FieldInfosWriter
             {
                 get
                 {
-                    if (!_oldFormatImpersonationIsActive)
+                    if (!LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE)
                     {
                         return base.FieldInfosWriter;
                     }
@@ -89,6 +50,10 @@ namespace Lucene.Net.Codecs.Lucene41
             }
         }
 
+        private readonly DocValuesFormat docValues = new Lucene40RWDocValuesFormat();
+        private readonly NormsFormat norms = new Lucene40RWNormsFormat();
+
+
         public override FieldInfosFormat FieldInfosFormat
         {
             get { return fieldInfos; }
@@ -96,17 +61,17 @@ namespace Lucene.Net.Codecs.Lucene41
 
         public override StoredFieldsFormat StoredFieldsFormat
         {
-            get { return FieldsFormat; }
+            get { return fieldsFormat; }
         }
 
         public override DocValuesFormat DocValuesFormat
         {
-            get { return DocValues; }
+            get { return docValues; }
         }
 
         public override NormsFormat NormsFormat
         {
-            get { return Norms; }
+            get { return norms; }
         }
     }
 #pragma warning restore 612, 618

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/4b0fa137/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42RWCodec.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42RWCodec.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42RWCodec.cs
index 39e3b66..f117744 100644
--- a/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42RWCodec.cs
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42RWCodec.cs
@@ -25,50 +25,18 @@ namespace Lucene.Net.Codecs.Lucene42
 #pragma warning disable 612, 618
     public class Lucene42RWCodec : Lucene42Codec
     {
-        private readonly DocValuesFormat Dv;
+        private readonly DocValuesFormat Dv = new Lucene42RWDocValuesFormat();
         private readonly NormsFormat Norms = new Lucene42NormsFormat();
-        private readonly FieldInfosFormat fieldInfosFormat;
 
-        /// <summary>
-        /// LUCENENET specific
-        /// Creates the codec with OldFormatImpersonationIsActive = true.
-        /// </summary>
-        /// <remarks>
-        /// Added so that SPIClassIterator can locate this Codec.  The iterator
-        /// only recognises classes that have empty constructors.
-        /// </remarks>
-        public Lucene42RWCodec()
-            : this(true)
-        { }
-
-        /// <param name="oldFormatImpersonationIsActive">
-        /// LUCENENET specific
-        /// Added to remove dependency on then-static <see cref="LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/> 
-        /// </param>
-        public Lucene42RWCodec(bool oldFormatImpersonationIsActive) : base()
-        {
-            Dv = new Lucene42RWDocValuesFormat(oldFormatImpersonationIsActive);
-            fieldInfosFormat = new Lucene42FieldInfosFormatAnonymousInnerClassHelper(oldFormatImpersonationIsActive);
-        }
+        private readonly FieldInfosFormat fieldInfosFormat = new Lucene42FieldInfosFormatAnonymousInnerClassHelper();
 
         private class Lucene42FieldInfosFormatAnonymousInnerClassHelper : Lucene42FieldInfosFormat
         {
-            private readonly bool _oldFormatImpersonationIsActive;
-
-            /// <param name="oldFormatImpersonationIsActive">
-            /// LUCENENET specific
-            /// Added to remove dependency on then-static <see cref="LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/> 
-            /// </param>
-            public Lucene42FieldInfosFormatAnonymousInnerClassHelper(bool oldFormatImpersonationIsActive) : base()
-            {
-                _oldFormatImpersonationIsActive = oldFormatImpersonationIsActive;
-            }
-
             public override FieldInfosWriter FieldInfosWriter
             {
                 get
                 {
-                    if (!_oldFormatImpersonationIsActive)
+                    if (!LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE)
                     {
                         return base.FieldInfosWriter;
                     }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/4b0fa137/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42RWDocValuesFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42RWDocValuesFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42RWDocValuesFormat.cs
index 1a29fe6..a7ea4fb 100644
--- a/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42RWDocValuesFormat.cs
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42RWDocValuesFormat.cs
@@ -27,32 +27,9 @@ namespace Lucene.Net.Codecs.Lucene42
 #pragma warning disable 612, 618
     public class Lucene42RWDocValuesFormat : Lucene42DocValuesFormat
     {
-        private readonly bool _oldFormatImpersonationIsActive;
-
-        /// <summary>
-        /// LUCENENET specific
-        /// Creates the codec with OldFormatImpersonationIsActive = true.
-        /// </summary>
-        /// <remarks>
-        /// Added so that SPIClassIterator can locate this Codec.  The iterator
-        /// only recognises classes that have empty constructors.
-        /// </remarks>
-        public Lucene42RWDocValuesFormat()
-            : this(true)
-        { }
-
-        /// <param name="oldFormatImpersonationIsActive">
-        /// LUCENENET specific
-        /// Added to remove dependency on then-static <see cref="LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/> 
-        /// </param>
-        public Lucene42RWDocValuesFormat(bool oldFormatImpersonationIsActive) : base()
-        {
-            _oldFormatImpersonationIsActive = oldFormatImpersonationIsActive;
-        }
-
         public override DocValuesConsumer FieldsConsumer(SegmentWriteState state)
         {
-            if (!_oldFormatImpersonationIsActive)
+            if (!LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE)
             {
                 return base.FieldsConsumer(state);
             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/4b0fa137/src/Lucene.Net.TestFramework/Codecs/Lucene45/Lucene45RWCodec.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene45/Lucene45RWCodec.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene45/Lucene45RWCodec.cs
index c610ca9..6ddcf84 100644
--- a/src/Lucene.Net.TestFramework/Codecs/Lucene45/Lucene45RWCodec.cs
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene45/Lucene45RWCodec.cs
@@ -27,47 +27,15 @@ namespace Lucene.Net.Codecs.Lucene45
 #pragma warning disable 612, 618
     public class Lucene45RWCodec : Lucene45Codec
     {
-        private readonly FieldInfosFormat fieldInfosFormat;
-
-        /// <summary>
-        /// LUCENENET specific
-        /// Creates the codec with OldFormatImpersonationIsActive = true.
-        /// </summary>
-        /// <remarks>
-        /// Added so that SPIClassIterator can locate this Codec.  The iterator
-        /// only recognises classes that have empty constructors.
-        /// </remarks>
-        public Lucene45RWCodec()
-            : this(true)
-        { }
-
-        /// <param name="oldFormatImpersonationIsActive">
-        /// LUCENENET specific
-        /// Added to remove dependency on then-static <see cref="LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/> 
-        /// </param>
-        public Lucene45RWCodec(bool oldFormatImpersonationIsActive) : base()
-        {
-             fieldInfosFormat = new Lucene42FieldInfosFormatAnonymousInnerClassHelper(oldFormatImpersonationIsActive);
-        }
+        private readonly FieldInfosFormat fieldInfosFormat = new Lucene42FieldInfosFormatAnonymousInnerClassHelper();
 
         private class Lucene42FieldInfosFormatAnonymousInnerClassHelper : Lucene42FieldInfosFormat
         {
-            private readonly bool _oldFormatImpersonationIsActive;
-
-            /// <param name="oldFormatImpersonationIsActive">
-            /// LUCENENET specific
-            /// Added to remove dependency on then-static <see cref="LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/> 
-            /// </param>
-            public Lucene42FieldInfosFormatAnonymousInnerClassHelper(bool oldFormatImpersonationIsActive) : base()
-            {
-                _oldFormatImpersonationIsActive = oldFormatImpersonationIsActive;
-            }
-
             public override FieldInfosWriter FieldInfosWriter
             {
                 get
                 {
-                    if (!_oldFormatImpersonationIsActive)
+                    if (!LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE)
                     {
                         return base.FieldInfosWriter;
                     }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/4b0fa137/src/Lucene.Net.TestFramework/Util/LuceneTestCase.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Util/LuceneTestCase.cs b/src/Lucene.Net.TestFramework/Util/LuceneTestCase.cs
index e83704d..5dd3542 100644
--- a/src/Lucene.Net.TestFramework/Util/LuceneTestCase.cs
+++ b/src/Lucene.Net.TestFramework/Util/LuceneTestCase.cs
@@ -204,7 +204,6 @@ namespace Lucene.Net.Util
 
         public LuceneTestCase()
         {
-            OLD_FORMAT_IMPERSONATION_IS_ACTIVE = false;
             ClassEnvRule = new TestRuleSetupAndRestoreClassEnv();
             String directory = Paths.TempDirectory;
             TEMP_DIR = new System.IO.FileInfo(directory);
@@ -428,11 +427,8 @@ namespace Lucene.Net.Util
         /// specific tests on demand.
         ///
         /// @lucene.internal
-        /// 
-        /// LUCENENET specific
-        /// Is non-static to remove inter-class dependencies on this variable
         /// </summary>
-        public bool OLD_FORMAT_IMPERSONATION_IS_ACTIVE { get; protected set; }
+        public static bool OLD_FORMAT_IMPERSONATION_IS_ACTIVE = false;
 
         // -----------------------------------------------------------------
         // Class level (suite) rules.
@@ -596,8 +592,6 @@ namespace Lucene.Net.Util
         {
             // LUCENENET TODO: Not sure how to convert these
             //ParentChainCallRule.SetupCalled = true;
-
-
         }
 
         /// <summary>
@@ -620,8 +614,10 @@ namespace Lucene.Net.Util
 
         // LUCENENET specific method for setting up dependency injection of test classes.
         [OneTimeSetUp]
-        public virtual void OneTimeSetUp()
+        public virtual void BeforeClass()
         {
+            OLD_FORMAT_IMPERSONATION_IS_ACTIVE = false;
+
             // Setup the factories
             Codec.SetCodecFactory(TEST_CODEC_FACTORY);
             DocValuesFormat.SetDocValuesFormatFactory(TEST_DOCVALUES_FORMAT_FACTORY);
@@ -1333,8 +1329,7 @@ namespace Lucene.Net.Util
 
         /// <summary>
         /// LUCENENET specific
-        /// Is non-static because <see cref="OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/>
-        /// is now non-static.
+        /// Is non-static.
         /// </summary>
         public Field NewStringField(string name, string value, Field.Store stored)
         {
@@ -1343,8 +1338,7 @@ namespace Lucene.Net.Util
 
         /// <summary>
         /// LUCENENET specific
-        /// Is non-static because <see cref="OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/>
-        /// is now non-static.
+        /// Is non-static.
         /// </summary>
         public Field NewTextField(string name, string value, Field.Store stored)
         {
@@ -1353,8 +1347,7 @@ namespace Lucene.Net.Util
 
         /// <summary>
         /// LUCENENET specific
-        /// Is non-static because <see cref="OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/>
-        /// is now non-static.
+        /// Is non-static.
         /// </summary>
         public Field NewStringField(Random random, string name, string value, Field.Store stored)
         {
@@ -1363,8 +1356,7 @@ namespace Lucene.Net.Util
 
         /// <summary>
         /// LUCENENET specific
-        /// Is non-static because <see cref="OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/>
-        /// is also non-static to reduce hidden dependencies on this variable.
+        /// Is non-static.
         /// </summary>
         public Field NewTextField(Random random, string name, string value, Field.Store stored)
         {
@@ -1373,8 +1365,7 @@ namespace Lucene.Net.Util
 
         /// <summary>
         /// LUCENENET specific
-        /// Is non-static because <see cref="OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/>
-        /// is now non-static.
+        /// Is non-static.
         /// </summary>
         public Field NewField(string name, string value, FieldType type)
         {
@@ -1383,8 +1374,7 @@ namespace Lucene.Net.Util
 
         /// <summary>
         /// LUCENENET specific
-        /// Is non-static because <see cref="OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/>
-        /// is now non-static.
+        /// Is non-static.
         /// </summary>
         public Field NewField(Random random, string name, string value, FieldType type)
         {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/4b0fa137/src/Lucene.Net.Tests/Codecs/Lucene3x/TestLucene3xPostingsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene3x/TestLucene3xPostingsFormat.cs b/src/Lucene.Net.Tests/Codecs/Lucene3x/TestLucene3xPostingsFormat.cs
index 0ab9a7b..365ab52 100644
--- a/src/Lucene.Net.Tests/Codecs/Lucene3x/TestLucene3xPostingsFormat.cs
+++ b/src/Lucene.Net.Tests/Codecs/Lucene3x/TestLucene3xPostingsFormat.cs
@@ -27,19 +27,23 @@ namespace Lucene.Net.Codecs.Lucene3x
     /// </summary>
     public class TestLucene3xPostingsFormat : BasePostingsFormatTestCase
     {
-        private readonly Codec Codec_Renamed;
+        private readonly Codec codec = new PreFlexRWCodec();
 
-        public TestLucene3xPostingsFormat() : base()
+        /// <summary>
+        /// we will manually instantiate preflex-rw here
+        /// </summary>
+        public override void SetUp()
         {
-            OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true; // explicitly instantiates ancient codec
-            Codec_Renamed = new PreFlexRWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE);
+            base.SetUp();
+            LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true;
         }
 
+
         protected override Codec Codec
         {
             get
             {
-                return Codec_Renamed;
+                return codec;
             }
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/4b0fa137/src/Lucene.Net.Tests/Codecs/Lucene3x/TestLucene3xStoredFieldsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene3x/TestLucene3xStoredFieldsFormat.cs b/src/Lucene.Net.Tests/Codecs/Lucene3x/TestLucene3xStoredFieldsFormat.cs
index e78db47..3682335 100644
--- a/src/Lucene.Net.Tests/Codecs/Lucene3x/TestLucene3xStoredFieldsFormat.cs
+++ b/src/Lucene.Net.Tests/Codecs/Lucene3x/TestLucene3xStoredFieldsFormat.cs
@@ -26,13 +26,10 @@ namespace Lucene.Net.Codecs.Lucene3x
     [TestFixture]
     public class TestLucene3xStoredFieldsFormat : BaseStoredFieldsFormatTestCase
     {
-        /// <summary>
-        /// LUCENENET specific
-        /// Is non-static because OLD_FORMAT_IMPERSONATION_IS_ACTIVE is no longer static.
-        /// </summary>
         [OneTimeSetUp]
-        public void BeforeClass()
+        public override void BeforeClass()
         {
+            base.BeforeClass();
             OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true; // explicitly instantiates ancient codec
         }
 
@@ -40,8 +37,7 @@ namespace Lucene.Net.Codecs.Lucene3x
         {
             get
             {
-                Assert.IsTrue(OLD_FORMAT_IMPERSONATION_IS_ACTIVE, "This should have been set up in the test fixture");
-                return new PreFlexRWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE);
+                return new PreFlexRWCodec();
             }
         }
 
@@ -105,7 +101,11 @@ namespace Lucene.Net.Codecs.Lucene3x
             base.TestEmptyDocs();
         }
 
-        [Test]
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(40000)]
+#endif
+        [Test, HasTimeout]
         public override void TestConcurrentReads()
         {
             base.TestConcurrentReads();

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/4b0fa137/src/Lucene.Net.Tests/Codecs/Lucene3x/TestLucene3xTermVectorsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene3x/TestLucene3xTermVectorsFormat.cs b/src/Lucene.Net.Tests/Codecs/Lucene3x/TestLucene3xTermVectorsFormat.cs
index cbd59ed..7b7ed3a 100644
--- a/src/Lucene.Net.Tests/Codecs/Lucene3x/TestLucene3xTermVectorsFormat.cs
+++ b/src/Lucene.Net.Tests/Codecs/Lucene3x/TestLucene3xTermVectorsFormat.cs
@@ -38,8 +38,7 @@ namespace Lucene.Net.Codecs.Lucene3x
         {
             get
             {
-                Assert.IsTrue(OLD_FORMAT_IMPERSONATION_IS_ACTIVE, "This should have been set up in the test fixture");
-                return new PreFlexRWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE);
+                return new PreFlexRWCodec();
             }
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/4b0fa137/src/Lucene.Net.Tests/Codecs/Lucene3x/TestSurrogates.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene3x/TestSurrogates.cs b/src/Lucene.Net.Tests/Codecs/Lucene3x/TestSurrogates.cs
index d4b1b63..8a9134d 100644
--- a/src/Lucene.Net.Tests/Codecs/Lucene3x/TestSurrogates.cs
+++ b/src/Lucene.Net.Tests/Codecs/Lucene3x/TestSurrogates.cs
@@ -34,13 +34,11 @@ namespace Lucene.Net.Codecs.Lucene3x
     {
         /// <summary>
         /// we will manually instantiate preflex-rw here
-        /// 
-        /// LUCENENET specific
-        /// Is non-static because OLD_FORMAT_IMPERSONATION_IS_ACTIVE is no longer static.
         /// </summary>
         [OneTimeSetUp]
-        public void BeforeClass()
+        public override void BeforeClass()
         {
+            base.BeforeClass();
             OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true;
         }
 
@@ -351,7 +349,7 @@ namespace Lucene.Net.Codecs.Lucene3x
         {
             Directory dir = NewDirectory();
             RandomIndexWriter w = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))
-                .SetCodec(new PreFlexRWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE)));
+                .SetCodec(new PreFlexRWCodec()));
 
             int numField = TestUtil.NextInt(Random(), 2, 5);
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/4b0fa137/src/Lucene.Net.Tests/Codecs/Lucene3x/TestTermInfosReaderIndex.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene3x/TestTermInfosReaderIndex.cs b/src/Lucene.Net.Tests/Codecs/Lucene3x/TestTermInfosReaderIndex.cs
index 2d9dd05..246a3ec 100644
--- a/src/Lucene.Net.Tests/Codecs/Lucene3x/TestTermInfosReaderIndex.cs
+++ b/src/Lucene.Net.Tests/Codecs/Lucene3x/TestTermInfosReaderIndex.cs
@@ -66,12 +66,9 @@ namespace Lucene.Net.Codecs.Lucene3x
 
         /// <summary>
         /// we will manually instantiate preflex-rw here
-        /// 
-        /// LUCENENET specific
-        /// Is non-static because OLD_FORMAT_IMPERSONATION_IS_ACTIVE is no longer static.
         /// </summary>
         [OneTimeSetUp]
-        public void BeforeClass()
+        public override void BeforeClass()
         {
             // NOTE: turn off compound file, this test will open some index files directly.
             OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true;
@@ -84,7 +81,7 @@ namespace Lucene.Net.Codecs.Lucene3x
 
             Directory = NewDirectory();
 
-            config.SetCodec(new PreFlexRWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE));
+            config.SetCodec(new PreFlexRWCodec());
             LogMergePolicy mp = NewLogMergePolicy();
             // NOTE: turn off compound file, this test will open some index files directly.
             mp.NoCFSRatio = 0.0;
@@ -97,7 +94,7 @@ namespace Lucene.Net.Codecs.Lucene3x
             string segment = r.SegmentName;
             r.Dispose();
 
-            FieldInfosReader infosReader = (new PreFlexRWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE)).FieldInfosFormat.FieldInfosReader;
+            FieldInfosReader infosReader = (new PreFlexRWCodec()).FieldInfosFormat.FieldInfosReader;
             FieldInfos fieldInfos = infosReader.Read(Directory, segment, "", IOContext.READ_ONCE);
             string segmentFileName = IndexFileNames.SegmentFileName(segment, "", Lucene3xPostingsFormat.TERMS_INDEX_EXTENSION);
             long tiiFileLength = Directory.FileLength(segmentFileName);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/4b0fa137/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40DocValuesFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40DocValuesFormat.cs b/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40DocValuesFormat.cs
index d63a6b3..0f979c2 100644
--- a/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40DocValuesFormat.cs
+++ b/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40DocValuesFormat.cs
@@ -26,13 +26,12 @@ namespace Lucene.Net.Codecs.Lucene40
     /// </summary>
     public class TestLucene40DocValuesFormat : BaseDocValuesFormatTestCase
     {
-        /// <summary>
-        /// LUCENENET specific
-        /// Is non-static because OLD_FORMAT_IMPERSONATION_IS_ACTIVE is no longer static.
-        /// </summary>
+        private readonly Codec codec = new Lucene40RWCodec();
+
         [OneTimeSetUp]
-        public void BeforeClass()
+        public override void BeforeClass()
         {
+            base.BeforeClass();
             OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true; // explicitly instantiates ancient codec
         }
 
@@ -40,8 +39,7 @@ namespace Lucene.Net.Codecs.Lucene40
         {
             get
             {
-                Assert.True(OLD_FORMAT_IMPERSONATION_IS_ACTIVE, "Expecting that this is true");
-                return new Lucene40RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE);
+                return codec;
             }
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/4b0fa137/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40PostingsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40PostingsFormat.cs b/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40PostingsFormat.cs
index 2d2b5f1..8e8320d 100644
--- a/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40PostingsFormat.cs
+++ b/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40PostingsFormat.cs
@@ -26,13 +26,12 @@ namespace Lucene.Net.Codecs.Lucene40
     /// </summary>
     public class TestLucene40PostingsFormat : BasePostingsFormatTestCase
     {
-        /// <summary>
-        /// LUCENENET specific
-        /// Is non-static because OLD_FORMAT_IMPERSONATION_IS_ACTIVE is no longer static.
-        /// </summary>
+        private readonly Codec codec = new Lucene40RWCodec();
+
         [OneTimeSetUp]
-        public void BeforeClass()
+        public override void BeforeClass()
         {
+            base.BeforeClass();
             OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true; // explicitly instantiates ancient codec
         }
 
@@ -40,8 +39,7 @@ namespace Lucene.Net.Codecs.Lucene40
         {
             get
             {
-                Assert.True(OLD_FORMAT_IMPERSONATION_IS_ACTIVE, "Expecting this to be set already before creating codec");
-                return new Lucene40RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE);
+                return codec;
             }
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/4b0fa137/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40PostingsReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40PostingsReader.cs b/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40PostingsReader.cs
index aea5ce0..cfd9a3f 100644
--- a/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40PostingsReader.cs
+++ b/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40PostingsReader.cs
@@ -53,13 +53,10 @@ namespace Lucene.Net.Codecs.Lucene40
             }
         }
 
-        /// <summary>
-        /// LUCENENET specific
-        /// Is non-static because OLD_FORMAT_IMPERSONATION_IS_ACTIVE is no longer static.
-        /// </summary>
         [OneTimeSetUp]
-        public void BeforeClass()
+        public override void BeforeClass()
         {
+            base.BeforeClass();
             OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true; // explicitly instantiates ancient codec
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/4b0fa137/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40StoredFieldsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40StoredFieldsFormat.cs b/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40StoredFieldsFormat.cs
index c78b1b4..3ae63f7 100644
--- a/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40StoredFieldsFormat.cs
+++ b/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40StoredFieldsFormat.cs
@@ -24,13 +24,10 @@ namespace Lucene.Net.Codecs.Lucene40
 
     public class TestLucene40StoredFieldsFormat : BaseStoredFieldsFormatTestCase
     {
-        /// <summary>
-        /// LUCENENET specific
-        /// Is non-static because OLD_FORMAT_IMPERSONATION_IS_ACTIVE is no longer static.
-        /// </summary>
         [OneTimeSetUp]
-        public void BeforeClass()
+        public override void BeforeClass()
         {
+            base.BeforeClass();
             OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true; // explicitly instantiates ancient codec
         }
 
@@ -38,8 +35,7 @@ namespace Lucene.Net.Codecs.Lucene40
         {
             get
             {
-                Assert.True(OLD_FORMAT_IMPERSONATION_IS_ACTIVE, "Expecting this to be set already");
-                return new Lucene40RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE);
+                return new Lucene40RWCodec();
             }
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/4b0fa137/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40TermVectorsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40TermVectorsFormat.cs b/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40TermVectorsFormat.cs
index d7541eb..4370228 100644
--- a/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40TermVectorsFormat.cs
+++ b/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40TermVectorsFormat.cs
@@ -24,13 +24,10 @@ namespace Lucene.Net.Codecs.Lucene40
 
     public class TestLucene40TermVectorsFormat : BaseTermVectorsFormatTestCase
     {
-        /// <summary>
-        /// LUCENENET specific
-        /// Is non-static because OLD_FORMAT_IMPERSONATION_IS_ACTIVE is no longer static.
-        /// </summary>
         [OneTimeSetUp]
-        public void BeforeClass()
+        public override void BeforeClass()
         {
+            base.BeforeClass();
             OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true; // explicitly instantiates ancient codec
         }
 
@@ -38,8 +35,7 @@ namespace Lucene.Net.Codecs.Lucene40
         {
             get
             {
-                Assert.True(OLD_FORMAT_IMPERSONATION_IS_ACTIVE, "Expecting this to be set already");
-                return new Lucene40RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE);
+                return new Lucene40RWCodec();
             }
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/4b0fa137/src/Lucene.Net.Tests/Codecs/Lucene40/TestReuseDocsEnum.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene40/TestReuseDocsEnum.cs b/src/Lucene.Net.Tests/Codecs/Lucene40/TestReuseDocsEnum.cs
index c2bdadf..c8b7600 100644
--- a/src/Lucene.Net.Tests/Codecs/Lucene40/TestReuseDocsEnum.cs
+++ b/src/Lucene.Net.Tests/Codecs/Lucene40/TestReuseDocsEnum.cs
@@ -46,13 +46,10 @@ namespace Lucene.Net.Codecs.Lucene40
     [TestFixture]
     public class TestReuseDocsEnum : LuceneTestCase
     {
-        /// <summary>
-        /// LUCENENET specific
-        /// Is non-static because OLD_FORMAT_IMPERSONATION_IS_ACTIVE is no longer static.
-        /// </summary>
         [OneTimeSetUp]
-        public void BeforeClass()
+        public override void BeforeClass()
         {
+            base.BeforeClass();
             OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true; // explicitly instantiates ancient codec
         }
 
@@ -60,7 +57,7 @@ namespace Lucene.Net.Codecs.Lucene40
         public virtual void TestReuseDocsEnumNoReuse()
         {
             Directory dir = NewDirectory();
-            Codec cp = TestUtil.AlwaysPostingsFormat(new Lucene40RWPostingsFormat(OLD_FORMAT_IMPERSONATION_IS_ACTIVE));
+            Codec cp = TestUtil.AlwaysPostingsFormat(new Lucene40RWPostingsFormat());
             RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetCodec(cp));
             int numdocs = AtLeast(20);
             CreateRandomIndex(numdocs, writer, Random());
@@ -90,7 +87,7 @@ namespace Lucene.Net.Codecs.Lucene40
         public virtual void TestReuseDocsEnumSameBitsOrNull()
         {
             Directory dir = NewDirectory();
-            Codec cp = TestUtil.AlwaysPostingsFormat(new Lucene40RWPostingsFormat(OLD_FORMAT_IMPERSONATION_IS_ACTIVE));
+            Codec cp = TestUtil.AlwaysPostingsFormat(new Lucene40RWPostingsFormat());
             RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetCodec(cp));
             int numdocs = AtLeast(20);
             CreateRandomIndex(numdocs, writer, Random());
@@ -139,7 +136,7 @@ namespace Lucene.Net.Codecs.Lucene40
         public virtual void TestReuseDocsEnumDifferentReader()
         {
             Directory dir = NewDirectory();
-            Codec cp = TestUtil.AlwaysPostingsFormat(new Lucene40RWPostingsFormat(OLD_FORMAT_IMPERSONATION_IS_ACTIVE));
+            Codec cp = TestUtil.AlwaysPostingsFormat(new Lucene40RWPostingsFormat());
             MockAnalyzer analyzer = new MockAnalyzer(Random());
             analyzer.MaxTokenLength = TestUtil.NextInt(Random(), 1, IndexWriter.MAX_TERM_LENGTH);
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/4b0fa137/src/Lucene.Net.Tests/Codecs/Lucene41/TestLucene41StoredFieldsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene41/TestLucene41StoredFieldsFormat.cs b/src/Lucene.Net.Tests/Codecs/Lucene41/TestLucene41StoredFieldsFormat.cs
index 0cbb28b..3f1b59d 100644
--- a/src/Lucene.Net.Tests/Codecs/Lucene41/TestLucene41StoredFieldsFormat.cs
+++ b/src/Lucene.Net.Tests/Codecs/Lucene41/TestLucene41StoredFieldsFormat.cs
@@ -24,13 +24,10 @@ namespace Lucene.Net.Codecs.Lucene41
 
     public class TestLucene41StoredFieldsFormat : BaseStoredFieldsFormatTestCase
     {
-        /// <summary>
-        /// LUCENENET specific
-        /// Is non-static because OLD_FORMAT_IMPERSONATION_IS_ACTIVE is no longer static.
-        /// </summary>
         [OneTimeSetUp]
-        public void BeforeClass()
+        public override void BeforeClass()
         {
+            base.BeforeClass();
             OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true; // explicitly instantiates ancient codec
         }
 
@@ -38,7 +35,7 @@ namespace Lucene.Net.Codecs.Lucene41
         {
             get
             {
-                return new Lucene41RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE);
+                return new Lucene41RWCodec();
             }
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/4b0fa137/src/Lucene.Net.Tests/Codecs/Lucene42/TestLucene42DocValuesFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene42/TestLucene42DocValuesFormat.cs b/src/Lucene.Net.Tests/Codecs/Lucene42/TestLucene42DocValuesFormat.cs
index f9c47ce..c147cc4 100644
--- a/src/Lucene.Net.Tests/Codecs/Lucene42/TestLucene42DocValuesFormat.cs
+++ b/src/Lucene.Net.Tests/Codecs/Lucene42/TestLucene42DocValuesFormat.cs
@@ -26,24 +26,20 @@ namespace Lucene.Net.Codecs.Lucene42
     /// </summary>
     public class TestLucene42DocValuesFormat : BaseCompressingDocValuesFormatTestCase
     {
-        private Codec Codec_Renamed;
+        private readonly Codec codec = new Lucene42RWCodec();
 
-        /// <summary>
-        /// LUCENENET specific
-        /// Is non-static because OLD_FORMAT_IMPERSONATION_IS_ACTIVE is no longer static.
-        /// </summary>
         [OneTimeSetUp]
-        public void BeforeClass()
+        public override void BeforeClass()
         {
+            base.BeforeClass();
             OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true; // explicitly instantiates ancient codec
-            Codec_Renamed = new Lucene42RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE);
         }
 
         protected override Codec Codec
         {
             get
             {
-                return Codec_Renamed;
+                return codec;
             }
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/4b0fa137/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility.cs b/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility.cs
index 2983ae5..8b6f78d 100644
--- a/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility.cs
+++ b/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility.cs
@@ -201,7 +201,7 @@ namespace Lucene.Net.Index
         }
 
         [OneTimeSetUp]
-        public void BeforeClass()
+        public override void BeforeClass()
         {
             Assert.IsFalse(OLD_FORMAT_IMPERSONATION_IS_ACTIVE, "test infra is broken!");
             IList<string> names = new List<string>(OldNames.Length + OldSingleSegmentNames.Length);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/4b0fa137/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility3x.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility3x.cs b/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility3x.cs
index c5de1e4..9ee601f 100644
--- a/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility3x.cs
+++ b/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility3x.cs
@@ -6,6 +6,7 @@ using System;
 using System.Collections.Generic;
 using System.Diagnostics;
 using System.IO;
+using System.Text;
 
 namespace Lucene.Net.Index
 {
@@ -129,9 +130,9 @@ namespace Lucene.Net.Index
         internal static IDictionary<string, Directory> OldIndexDirs;
 
         [OneTimeSetUp]
-        public void BeforeClass()
+        public override void BeforeClass()
         {
-            Assert.IsFalse(OLD_FORMAT_IMPERSONATION_IS_ACTIVE, "test infra is broken!");
+            assertFalse("test infra is broken!", OLD_FORMAT_IMPERSONATION_IS_ACTIVE);
             IList<string> names = new List<string>(OldNames.Length + OldSingleSegmentNames.Length);
             names.AddRange(Arrays.AsList(OldNames));
             names.AddRange(Arrays.AsList(OldSingleSegmentNames));
@@ -237,13 +238,12 @@ namespace Lucene.Net.Index
                     writer = null;
                 }
 
-                MemoryStream bos = new MemoryStream(1024);
+                StringBuilder bos = new StringBuilder();
                 CheckIndex checker = new CheckIndex(dir);
-#pragma warning disable 612, 618
-                checker.InfoStream = new StreamWriter(bos.ToString(), false, IOUtils.CHARSET_UTF_8);
-#pragma warning restore 612, 618
+                checker.InfoStream = new StringWriter(bos);
                 CheckIndex.Status indexStatus = checker.DoCheckIndex();
                 Assert.IsFalse(indexStatus.Clean);
+                checker.InfoStream.Flush();
                 Assert.IsTrue(bos.ToString().Contains(typeof(IndexFormatTooOldException).Name));
 
                 dir.Dispose();
@@ -585,7 +585,7 @@ namespace Lucene.Net.Index
             Assert.AreEqual(44, hits.Length, "wrong number of hits");
             d = searcher.Doc(hits[0].Doc);
             DoTestHits(hits, 44, searcher.IndexReader);
-            Assert.AreEqual("wrong first document", "21", d.Get("id"));
+            assertEquals("wrong first document", "21", d.Get("id"));
             reader.Dispose();
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/4b0fa137/src/Lucene.Net.Tests/Index/TestBinaryDocValuesUpdates.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestBinaryDocValuesUpdates.cs b/src/Lucene.Net.Tests/Index/TestBinaryDocValuesUpdates.cs
index 19351fd..151237b 100644
--- a/src/Lucene.Net.Tests/Index/TestBinaryDocValuesUpdates.cs
+++ b/src/Lucene.Net.Tests/Index/TestBinaryDocValuesUpdates.cs
@@ -1116,19 +1116,18 @@ namespace Lucene.Net.Index
         [Test]
         public virtual void TestUpdateOldSegments()
         {
-            OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true;
-
             Codec[] oldCodecs = new Codec[] {
-                new Lucene40RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE),
-                new Lucene41RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE),
-                new Lucene42RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE),
-                new Lucene45RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE)
+                new Lucene40RWCodec(),
+                new Lucene41RWCodec(),
+                new Lucene42RWCodec(),
+                new Lucene45RWCodec()
             };
             Directory dir = NewDirectory();
 
             // create a segment with an old Codec
             IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
             conf.SetCodec(oldCodecs[Random().Next(oldCodecs.Length)]);
+            OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true;
             IndexWriter writer = new IndexWriter(dir, conf);
             Document doc = new Document();
             doc.Add(new StringField("id", "doc", Store.NO));
@@ -1138,11 +1137,6 @@ namespace Lucene.Net.Index
             dir.Dispose();
         }
 
-        /// <summary>
-        /// LUCENENET specific
-        /// Split from <see cref="TestUpdateOldSegments"/> because OLD_FORMAT_IMPERSONATION_IS_ACTIVE
-        /// is no longer static and the existing codecs have to be remade.
-        /// </summary>
         [Test, LuceneNetSpecific]
         public virtual void TestUpdateOldSegments_OldFormatNotActive()
         {
@@ -1151,10 +1145,10 @@ namespace Lucene.Net.Index
             OLD_FORMAT_IMPERSONATION_IS_ACTIVE = false;
 
             Codec[] oldCodecs = new Codec[] {
-                new Lucene40RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE),
-                new Lucene41RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE),
-                new Lucene42RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE),
-                new Lucene45RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE)
+                new Lucene40RWCodec(),
+                new Lucene41RWCodec(),
+                new Lucene42RWCodec(),
+                new Lucene45RWCodec()
             };
 
             Directory dir = NewDirectory();

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/4b0fa137/src/Lucene.Net.Tests/Index/TestCodecs.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestCodecs.cs b/src/Lucene.Net.Tests/Index/TestCodecs.cs
index e798da9..58d5aa9 100644
--- a/src/Lucene.Net.Tests/Index/TestCodecs.cs
+++ b/src/Lucene.Net.Tests/Index/TestCodecs.cs
@@ -89,8 +89,9 @@ namespace Lucene.Net.Index
         private const int TERM_DOC_FREQ_RAND = 20;
 
         [OneTimeSetUp]
-        public static void BeforeClass()
+        public override void BeforeClass()
         {
+            base.BeforeClass();
             NUM_TEST_ITER = AtLeast(20);
         }
 
@@ -899,7 +900,7 @@ namespace Lucene.Net.Index
         [Test]
         public virtual void TestDisableImpersonation()
         {
-            Codec[] oldCodecs = new Codec[] { new Lucene40RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE), new Lucene41RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE), new Lucene42RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE) };
+            Codec[] oldCodecs = new Codec[] { new Lucene40RWCodec(), new Lucene41RWCodec(), new Lucene42RWCodec() };
             Directory dir = NewDirectory();
             IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
             conf.SetCodec(oldCodecs[Random().Next(oldCodecs.Length)]);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/4b0fa137/src/Lucene.Net.Tests/Index/TestNumericDocValuesUpdates.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestNumericDocValuesUpdates.cs b/src/Lucene.Net.Tests/Index/TestNumericDocValuesUpdates.cs
index 25b8b44..eb02fac 100644
--- a/src/Lucene.Net.Tests/Index/TestNumericDocValuesUpdates.cs
+++ b/src/Lucene.Net.Tests/Index/TestNumericDocValuesUpdates.cs
@@ -1068,15 +1068,14 @@ namespace Lucene.Net.Index
         [Test]
         public virtual void TestUpdateOldSegments()
         {
-            OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true;
-
-            Codec[] oldCodecs = new Codec[] { new Lucene40RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE), new Lucene41RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE), new Lucene42RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE), new Lucene45RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE) };
+            Codec[] oldCodecs = new Codec[] { new Lucene40RWCodec(), new Lucene41RWCodec(), new Lucene42RWCodec(), new Lucene45RWCodec() };
             Directory dir = NewDirectory();
 
             bool oldValue = OLD_FORMAT_IMPERSONATION_IS_ACTIVE;
             // create a segment with an old Codec
             IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
             conf.SetCodec(oldCodecs[Random().Next(oldCodecs.Length)]);
+            OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true;
             IndexWriter writer = new IndexWriter(dir, conf);
             Document doc = new Document();
             doc.Add(new StringField("id", "doc", Store.NO));


[17/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestTermVectorsReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestTermVectorsReader.cs b/src/Lucene.Net.Tests/Index/TestTermVectorsReader.cs
new file mode 100644
index 0000000..8426151
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestTermVectorsReader.cs
@@ -0,0 +1,477 @@
+using Lucene.Net.Analysis.TokenAttributes;
+using System;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    using Lucene.Net.Analysis;
+    using NUnit.Framework;
+    using System.IO;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Codec = Lucene.Net.Codecs.Codec;
+    using Directory = Lucene.Net.Store.Directory;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldType = FieldType;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using TermVectorsReader = Lucene.Net.Codecs.TermVectorsReader;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using TextField = TextField;
+
+    [TestFixture]
+    public class TestTermVectorsReader : LuceneTestCase
+    {
+        public TestTermVectorsReader()
+        {
+            InitializeInstanceFields();
+        }
+
+        private void InitializeInstanceFields()
+        {
+            Positions = new int[TestTerms.Length][];
+            Tokens = new TestToken[TestTerms.Length * TERM_FREQ];
+        }
+
+        //Must be lexicographically sorted, will do in setup, versus trying to maintain here
+        private string[] TestFields = new string[] { "f1", "f2", "f3", "f4" };
+
+        private bool[] TestFieldsStorePos = new bool[] { true, false, true, false };
+        private bool[] TestFieldsStoreOff = new bool[] { true, false, false, true };
+        private string[] TestTerms = new string[] { "this", "is", "a", "test" };
+        private int[][] Positions;
+        private Directory Dir;
+        private SegmentCommitInfo Seg;
+        private FieldInfos FieldInfos = new FieldInfos(new FieldInfo[0]);
+        private static int TERM_FREQ = 3;
+
+        internal class TestToken : IComparable<TestToken>
+        {
+            private readonly TestTermVectorsReader OuterInstance;
+
+            public TestToken(TestTermVectorsReader outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            internal string Text;
+            internal int Pos;
+            internal int StartOffset;
+            internal int EndOffset;
+
+            public virtual int CompareTo(TestToken other)
+            {
+                return Pos - other.Pos;
+            }
+        }
+
+        internal TestToken[] Tokens;
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            /*
+            for (int i = 0; i < testFields.Length; i++) {
+              fieldInfos.Add(testFields[i], true, true, testFieldsStorePos[i], testFieldsStoreOff[i]);
+            }
+            */
+
+            Array.Sort(TestTerms);
+            int tokenUpto = 0;
+            for (int i = 0; i < TestTerms.Length; i++)
+            {
+                Positions[i] = new int[TERM_FREQ];
+                // first position must be 0
+                for (int j = 0; j < TERM_FREQ; j++)
+                {
+                    // positions are always sorted in increasing order
+                    Positions[i][j] = (int)(j * 10 + new Random(1).NextDouble() * 10);
+                    TestToken token = Tokens[tokenUpto++] = new TestToken(this);
+                    token.Text = TestTerms[i];
+                    token.Pos = Positions[i][j];
+                    token.StartOffset = j * 10;
+                    token.EndOffset = j * 10 + TestTerms[i].Length;
+                }
+            }
+            Array.Sort(Tokens);
+
+            Dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(Dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MyAnalyzer(this)).SetMaxBufferedDocs(-1).SetMergePolicy(NewLogMergePolicy(false, 10)).SetUseCompoundFile(false));
+
+            Document doc = new Document();
+            for (int i = 0; i < TestFields.Length; i++)
+            {
+                FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
+                if (TestFieldsStorePos[i] && TestFieldsStoreOff[i])
+                {
+                    customType.StoreTermVectors = true;
+                    customType.StoreTermVectorPositions = true;
+                    customType.StoreTermVectorOffsets = true;
+                }
+                else if (TestFieldsStorePos[i] && !TestFieldsStoreOff[i])
+                {
+                    customType.StoreTermVectors = true;
+                    customType.StoreTermVectorPositions = true;
+                }
+                else if (!TestFieldsStorePos[i] && TestFieldsStoreOff[i])
+                {
+                    customType.StoreTermVectors = true;
+                    customType.StoreTermVectorOffsets = true;
+                }
+                else
+                {
+                    customType.StoreTermVectors = true;
+                }
+                doc.Add(new Field(TestFields[i], "", customType));
+            }
+
+            //Create 5 documents for testing, they all have the same
+            //terms
+            for (int j = 0; j < 5; j++)
+            {
+                writer.AddDocument(doc);
+            }
+            writer.Commit();
+            Seg = writer.NewestSegment();
+            writer.Dispose();
+
+            FieldInfos = SegmentReader.ReadFieldInfos(Seg);
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            Dir.Dispose();
+            base.TearDown();
+        }
+
+        private class MyTokenizer : Tokenizer
+        {
+            private readonly TestTermVectorsReader OuterInstance;
+
+            internal int TokenUpto;
+
+            internal readonly ICharTermAttribute TermAtt;
+            internal readonly IPositionIncrementAttribute PosIncrAtt;
+            internal readonly IOffsetAttribute OffsetAtt;
+
+            public MyTokenizer(TestTermVectorsReader outerInstance, TextReader reader)
+                : base(reader)
+            {
+                this.OuterInstance = outerInstance;
+                TermAtt = AddAttribute<ICharTermAttribute>();
+                PosIncrAtt = AddAttribute<IPositionIncrementAttribute>();
+                OffsetAtt = AddAttribute<IOffsetAttribute>();
+            }
+
+            public sealed override bool IncrementToken()
+            {
+                if (TokenUpto >= OuterInstance.Tokens.Length)
+                {
+                    return false;
+                }
+                else
+                {
+                    TestToken testToken = OuterInstance.Tokens[TokenUpto++];
+                    ClearAttributes();
+                    TermAtt.Append(testToken.Text);
+                    OffsetAtt.SetOffset(testToken.StartOffset, testToken.EndOffset);
+                    if (TokenUpto > 1)
+                    {
+                        PosIncrAtt.PositionIncrement = testToken.Pos - OuterInstance.Tokens[TokenUpto - 2].Pos;
+                    }
+                    else
+                    {
+                        PosIncrAtt.PositionIncrement = testToken.Pos + 1;
+                    }
+                    return true;
+                }
+            }
+
+            public override void Reset()
+            {
+                base.Reset();
+                this.TokenUpto = 0;
+            }
+        }
+
+        private class MyAnalyzer : Analyzer
+        {
+            private readonly TestTermVectorsReader OuterInstance;
+
+            public MyAnalyzer(TestTermVectorsReader outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                return new TokenStreamComponents(new MyTokenizer(OuterInstance, reader));
+            }
+        }
+
+        [Test]
+        public virtual void Test()
+        {
+            //Check to see the files were created properly in setup
+            DirectoryReader reader = DirectoryReader.Open(Dir);
+            foreach (AtomicReaderContext ctx in reader.Leaves)
+            {
+                SegmentReader sr = (SegmentReader)ctx.Reader;
+                Assert.IsTrue(sr.FieldInfos.HasVectors);
+            }
+            reader.Dispose();
+        }
+
+        [Test]
+        public virtual void TestReader()
+        {
+            TermVectorsReader reader = Codec.Default.TermVectorsFormat.VectorsReader(Dir, Seg.Info, FieldInfos, NewIOContext(Random()));
+            for (int j = 0; j < 5; j++)
+            {
+                Terms vector = reader.Get(j).GetTerms(TestFields[0]);
+                Assert.IsNotNull(vector);
+                Assert.AreEqual(TestTerms.Length, vector.Count);
+                TermsEnum termsEnum = vector.GetIterator(null);
+                for (int i = 0; i < TestTerms.Length; i++)
+                {
+                    BytesRef text = termsEnum.Next();
+                    Assert.IsNotNull(text);
+                    string term = text.Utf8ToString();
+                    //System.out.println("Term: " + term);
+                    Assert.AreEqual(TestTerms[i], term);
+                }
+                Assert.IsNull(termsEnum.Next());
+            }
+            reader.Dispose();
+        }
+
+        [Test]
+        public virtual void TestDocsEnum()
+        {
+            TermVectorsReader reader = Codec.Default.TermVectorsFormat.VectorsReader(Dir, Seg.Info, FieldInfos, NewIOContext(Random()));
+            for (int j = 0; j < 5; j++)
+            {
+                Terms vector = reader.Get(j).GetTerms(TestFields[0]);
+                Assert.IsNotNull(vector);
+                Assert.AreEqual(TestTerms.Length, vector.Count);
+                TermsEnum termsEnum = vector.GetIterator(null);
+                DocsEnum docsEnum = null;
+                for (int i = 0; i < TestTerms.Length; i++)
+                {
+                    BytesRef text = termsEnum.Next();
+                    Assert.IsNotNull(text);
+                    string term = text.Utf8ToString();
+                    //System.out.println("Term: " + term);
+                    Assert.AreEqual(TestTerms[i], term);
+
+                    docsEnum = TestUtil.Docs(Random(), termsEnum, null, docsEnum, DocsEnum.FLAG_NONE);
+                    Assert.IsNotNull(docsEnum);
+                    int doc = docsEnum.DocID;
+                    Assert.AreEqual(-1, doc);
+                    Assert.IsTrue(docsEnum.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+                    Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, docsEnum.NextDoc());
+                }
+                Assert.IsNull(termsEnum.Next());
+            }
+            reader.Dispose();
+        }
+
+        [Test]
+        public virtual void TestPositionReader()
+        {
+            TermVectorsReader reader = Codec.Default.TermVectorsFormat.VectorsReader(Dir, Seg.Info, FieldInfos, NewIOContext(Random()));
+            //BytesRef[] terms; // LUCENENET NOTE: Not used in Lucene
+            Terms vector = reader.Get(0).GetTerms(TestFields[0]);
+            Assert.IsNotNull(vector);
+            Assert.AreEqual(TestTerms.Length, vector.Count);
+            TermsEnum termsEnum = vector.GetIterator(null);
+            DocsAndPositionsEnum dpEnum = null;
+            for (int i = 0; i < TestTerms.Length; i++)
+            {
+                BytesRef text = termsEnum.Next();
+                Assert.IsNotNull(text);
+                string term = text.Utf8ToString();
+                //System.out.println("Term: " + term);
+                Assert.AreEqual(TestTerms[i], term);
+
+                dpEnum = termsEnum.DocsAndPositions(null, dpEnum);
+                Assert.IsNotNull(dpEnum);
+                int doc = dpEnum.DocID;
+                Assert.AreEqual(-1, doc);
+                Assert.IsTrue(dpEnum.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+                Assert.AreEqual(dpEnum.Freq, Positions[i].Length);
+                for (int j = 0; j < Positions[i].Length; j++)
+                {
+                    Assert.AreEqual(Positions[i][j], dpEnum.NextPosition());
+                }
+                Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, dpEnum.NextDoc());
+
+                dpEnum = termsEnum.DocsAndPositions(null, dpEnum);
+                doc = dpEnum.DocID;
+                Assert.AreEqual(-1, doc);
+                Assert.IsTrue(dpEnum.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+                Assert.IsNotNull(dpEnum);
+                Assert.AreEqual(dpEnum.Freq, Positions[i].Length);
+                for (int j = 0; j < Positions[i].Length; j++)
+                {
+                    Assert.AreEqual(Positions[i][j], dpEnum.NextPosition());
+                    Assert.AreEqual(j * 10, dpEnum.StartOffset);
+                    Assert.AreEqual(j * 10 + TestTerms[i].Length, dpEnum.EndOffset);
+                }
+                Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, dpEnum.NextDoc());
+            }
+
+            Terms freqVector = reader.Get(0).GetTerms(TestFields[1]); //no pos, no offset
+            Assert.IsNotNull(freqVector);
+            Assert.AreEqual(TestTerms.Length, freqVector.Count);
+            termsEnum = freqVector.GetIterator(null);
+            Assert.IsNotNull(termsEnum);
+            for (int i = 0; i < TestTerms.Length; i++)
+            {
+                BytesRef text = termsEnum.Next();
+                Assert.IsNotNull(text);
+                string term = text.Utf8ToString();
+                //System.out.println("Term: " + term);
+                Assert.AreEqual(TestTerms[i], term);
+                Assert.IsNotNull(termsEnum.Docs(null, null));
+                Assert.IsNull(termsEnum.DocsAndPositions(null, null)); // no pos
+            }
+            reader.Dispose();
+        }
+
+        [Test]
+        public virtual void TestOffsetReader()
+        {
+            TermVectorsReader reader = Codec.Default.TermVectorsFormat.VectorsReader(Dir, Seg.Info, FieldInfos, NewIOContext(Random()));
+            Terms vector = reader.Get(0).GetTerms(TestFields[0]);
+            Assert.IsNotNull(vector);
+            TermsEnum termsEnum = vector.GetIterator(null);
+            Assert.IsNotNull(termsEnum);
+            Assert.AreEqual(TestTerms.Length, vector.Count);
+            DocsAndPositionsEnum dpEnum = null;
+            for (int i = 0; i < TestTerms.Length; i++)
+            {
+                BytesRef text = termsEnum.Next();
+                Assert.IsNotNull(text);
+                string term = text.Utf8ToString();
+                Assert.AreEqual(TestTerms[i], term);
+
+                dpEnum = termsEnum.DocsAndPositions(null, dpEnum);
+                Assert.IsNotNull(dpEnum);
+                Assert.IsTrue(dpEnum.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+                Assert.AreEqual(dpEnum.Freq, Positions[i].Length);
+                for (int j = 0; j < Positions[i].Length; j++)
+                {
+                    Assert.AreEqual(Positions[i][j], dpEnum.NextPosition());
+                }
+                Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, dpEnum.NextDoc());
+
+                dpEnum = termsEnum.DocsAndPositions(null, dpEnum);
+                Assert.IsTrue(dpEnum.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+                Assert.IsNotNull(dpEnum);
+                Assert.AreEqual(dpEnum.Freq, Positions[i].Length);
+                for (int j = 0; j < Positions[i].Length; j++)
+                {
+                    Assert.AreEqual(Positions[i][j], dpEnum.NextPosition());
+                    Assert.AreEqual(j * 10, dpEnum.StartOffset);
+                    Assert.AreEqual(j * 10 + TestTerms[i].Length, dpEnum.EndOffset);
+                }
+                Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, dpEnum.NextDoc());
+            }
+            reader.Dispose();
+        }
+
+        [Test]
+        public virtual void TestIllegalIndexableField()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
+            ft.StoreTermVectors = true;
+            ft.StoreTermVectorPayloads = true;
+            Document doc = new Document();
+            doc.Add(new Field("field", "value", ft));
+            try
+            {
+                w.AddDocument(doc);
+                Assert.Fail("did not hit exception");
+            }
+            catch (System.ArgumentException iae)
+            {
+                // Expected
+                Assert.AreEqual("cannot index term vector payloads without term vector positions (field=\"field\")", iae.Message);
+            }
+
+            ft = new FieldType(TextField.TYPE_NOT_STORED);
+            ft.StoreTermVectors = false;
+            ft.StoreTermVectorOffsets = true;
+            doc = new Document();
+            doc.Add(new Field("field", "value", ft));
+            try
+            {
+                w.AddDocument(doc);
+                Assert.Fail("did not hit exception");
+            }
+            catch (System.ArgumentException iae)
+            {
+                // Expected
+                Assert.AreEqual("cannot index term vector offsets when term vectors are not indexed (field=\"field\")", iae.Message);
+            }
+
+            ft = new FieldType(TextField.TYPE_NOT_STORED);
+            ft.StoreTermVectors = false;
+            ft.StoreTermVectorPositions = true;
+            doc = new Document();
+            doc.Add(new Field("field", "value", ft));
+            try
+            {
+                w.AddDocument(doc);
+                Assert.Fail("did not hit exception");
+            }
+            catch (System.ArgumentException iae)
+            {
+                // Expected
+                Assert.AreEqual("cannot index term vector positions when term vectors are not indexed (field=\"field\")", iae.Message);
+            }
+
+            ft = new FieldType(TextField.TYPE_NOT_STORED);
+            ft.StoreTermVectors = false;
+            ft.StoreTermVectorPayloads = true;
+            doc = new Document();
+            doc.Add(new Field("field", "value", ft));
+            try
+            {
+                w.AddDocument(doc);
+                Assert.Fail("did not hit exception");
+            }
+            catch (System.ArgumentException iae)
+            {
+                // Expected
+                Assert.AreEqual("cannot index term vector payloads when term vectors are not indexed (field=\"field\")", iae.Message);
+            }
+
+            w.Dispose();
+
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestTermVectorsWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestTermVectorsWriter.cs b/src/Lucene.Net.Tests/Index/TestTermVectorsWriter.cs
new file mode 100644
index 0000000..355249e
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestTermVectorsWriter.cs
@@ -0,0 +1,601 @@
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+    using System.IO;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Analyzer = Lucene.Net.Analysis.Analyzer;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using CachingTokenFilter = Lucene.Net.Analysis.CachingTokenFilter;
+    using Directory = Lucene.Net.Store.Directory;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldType = FieldType;
+    using IOUtils = Lucene.Net.Util.IOUtils;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+    using MockTokenFilter = Lucene.Net.Analysis.MockTokenFilter;
+    using MockTokenizer = Lucene.Net.Analysis.MockTokenizer;
+    using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+    using StringField = StringField;
+    using TextField = TextField;
+    using TokenStream = Lucene.Net.Analysis.TokenStream;
+
+    /// <summary>
+    /// tests for writing term vectors </summary>
+    [TestFixture]
+    public class TestTermVectorsWriter : LuceneTestCase
+    {
+        // LUCENE-1442
+        [Test]
+        public virtual void TestDoubleOffsetCounting()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document doc = new Document();
+            FieldType customType = new FieldType(StringField.TYPE_NOT_STORED);
+            customType.StoreTermVectors = true;
+            customType.StoreTermVectorPositions = true;
+            customType.StoreTermVectorOffsets = true;
+            Field f = NewField("field", "abcd", customType);
+            doc.Add(f);
+            doc.Add(f);
+            Field f2 = NewField("field", "", customType);
+            doc.Add(f2);
+            doc.Add(f);
+            w.AddDocument(doc);
+            w.Dispose();
+
+            IndexReader r = DirectoryReader.Open(dir);
+            Terms vector = r.GetTermVectors(0).GetTerms("field");
+            Assert.IsNotNull(vector);
+            TermsEnum termsEnum = vector.GetIterator(null);
+            Assert.IsNotNull(termsEnum.Next());
+            Assert.AreEqual("", termsEnum.Term.Utf8ToString());
+
+            // Token "" occurred once
+            Assert.AreEqual(1, termsEnum.TotalTermFreq);
+
+            DocsAndPositionsEnum dpEnum = termsEnum.DocsAndPositions(null, null);
+            Assert.IsTrue(dpEnum.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            dpEnum.NextPosition();
+            Assert.AreEqual(8, dpEnum.StartOffset);
+            Assert.AreEqual(8, dpEnum.EndOffset);
+            Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, dpEnum.NextDoc());
+
+            // Token "abcd" occurred three times
+            Assert.AreEqual(new BytesRef("abcd"), termsEnum.Next());
+            dpEnum = termsEnum.DocsAndPositions(null, dpEnum);
+            Assert.AreEqual(3, termsEnum.TotalTermFreq);
+
+            Assert.IsTrue(dpEnum.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            dpEnum.NextPosition();
+            Assert.AreEqual(0, dpEnum.StartOffset);
+            Assert.AreEqual(4, dpEnum.EndOffset);
+
+            dpEnum.NextPosition();
+            Assert.AreEqual(4, dpEnum.StartOffset);
+            Assert.AreEqual(8, dpEnum.EndOffset);
+
+            dpEnum.NextPosition();
+            Assert.AreEqual(8, dpEnum.StartOffset);
+            Assert.AreEqual(12, dpEnum.EndOffset);
+
+            Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, dpEnum.NextDoc());
+            Assert.IsNull(termsEnum.Next());
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        // LUCENE-1442
+        [Test]
+        public virtual void TestDoubleOffsetCounting2()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document doc = new Document();
+            FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
+            customType.StoreTermVectors = true;
+            customType.StoreTermVectorPositions = true;
+            customType.StoreTermVectorOffsets = true;
+            Field f = NewField("field", "abcd", customType);
+            doc.Add(f);
+            doc.Add(f);
+            w.AddDocument(doc);
+            w.Dispose();
+
+            IndexReader r = DirectoryReader.Open(dir);
+            TermsEnum termsEnum = r.GetTermVectors(0).GetTerms("field").GetIterator(null);
+            Assert.IsNotNull(termsEnum.Next());
+            DocsAndPositionsEnum dpEnum = termsEnum.DocsAndPositions(null, null);
+            Assert.AreEqual(2, termsEnum.TotalTermFreq);
+
+            Assert.IsTrue(dpEnum.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            dpEnum.NextPosition();
+            Assert.AreEqual(0, dpEnum.StartOffset);
+            Assert.AreEqual(4, dpEnum.EndOffset);
+
+            dpEnum.NextPosition();
+            Assert.AreEqual(5, dpEnum.StartOffset);
+            Assert.AreEqual(9, dpEnum.EndOffset);
+            Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, dpEnum.NextDoc());
+
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        // LUCENE-1448
+        [Test]
+        public virtual void TestEndOffsetPositionCharAnalyzer()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document doc = new Document();
+            FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
+            customType.StoreTermVectors = true;
+            customType.StoreTermVectorPositions = true;
+            customType.StoreTermVectorOffsets = true;
+            Field f = NewField("field", "abcd   ", customType);
+            doc.Add(f);
+            doc.Add(f);
+            w.AddDocument(doc);
+            w.Dispose();
+
+            IndexReader r = DirectoryReader.Open(dir);
+            TermsEnum termsEnum = r.GetTermVectors(0).GetTerms("field").GetIterator(null);
+            Assert.IsNotNull(termsEnum.Next());
+            DocsAndPositionsEnum dpEnum = termsEnum.DocsAndPositions(null, null);
+            Assert.AreEqual(2, termsEnum.TotalTermFreq);
+
+            Assert.IsTrue(dpEnum.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            dpEnum.NextPosition();
+            Assert.AreEqual(0, dpEnum.StartOffset);
+            Assert.AreEqual(4, dpEnum.EndOffset);
+
+            dpEnum.NextPosition();
+            Assert.AreEqual(8, dpEnum.StartOffset);
+            Assert.AreEqual(12, dpEnum.EndOffset);
+            Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, dpEnum.NextDoc());
+
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        // LUCENE-1448
+        [Test]
+        public virtual void TestEndOffsetPositionWithCachingTokenFilter()
+        {
+            Directory dir = NewDirectory();
+            Analyzer analyzer = new MockAnalyzer(Random());
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
+            Document doc = new Document();
+            IOException priorException = null;
+            TokenStream stream = analyzer.TokenStream("field", new StringReader("abcd   "));
+            try
+            {
+                stream.Reset(); // TODO: weird to reset before wrapping with CachingTokenFilter... correct?
+                TokenStream cachedStream = new CachingTokenFilter(stream);
+                FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
+                customType.StoreTermVectors = true;
+                customType.StoreTermVectorPositions = true;
+                customType.StoreTermVectorOffsets = true;
+                Field f = new Field("field", cachedStream, customType);
+                doc.Add(f);
+                doc.Add(f);
+                w.AddDocument(doc);
+            }
+            catch (IOException e)
+            {
+                priorException = e;
+            }
+            finally
+            {
+                IOUtils.CloseWhileHandlingException(priorException, stream);
+            }
+            w.Dispose();
+
+            IndexReader r = DirectoryReader.Open(dir);
+            TermsEnum termsEnum = r.GetTermVectors(0).GetTerms("field").GetIterator(null);
+            Assert.IsNotNull(termsEnum.Next());
+            DocsAndPositionsEnum dpEnum = termsEnum.DocsAndPositions(null, null);
+            Assert.AreEqual(2, termsEnum.TotalTermFreq);
+
+            Assert.IsTrue(dpEnum.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            dpEnum.NextPosition();
+            Assert.AreEqual(0, dpEnum.StartOffset);
+            Assert.AreEqual(4, dpEnum.EndOffset);
+
+            dpEnum.NextPosition();
+            Assert.AreEqual(8, dpEnum.StartOffset);
+            Assert.AreEqual(12, dpEnum.EndOffset);
+            Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, dpEnum.NextDoc());
+
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        // LUCENE-1448
+        [Test]
+        public virtual void TestEndOffsetPositionStopFilter()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET)));
+            Document doc = new Document();
+            FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
+            customType.StoreTermVectors = true;
+            customType.StoreTermVectorPositions = true;
+            customType.StoreTermVectorOffsets = true;
+            Field f = NewField("field", "abcd the", customType);
+            doc.Add(f);
+            doc.Add(f);
+            w.AddDocument(doc);
+            w.Dispose();
+
+            IndexReader r = DirectoryReader.Open(dir);
+            TermsEnum termsEnum = r.GetTermVectors(0).GetTerms("field").GetIterator(null);
+            Assert.IsNotNull(termsEnum.Next());
+            DocsAndPositionsEnum dpEnum = termsEnum.DocsAndPositions(null, null);
+            Assert.AreEqual(2, termsEnum.TotalTermFreq);
+
+            Assert.IsTrue(dpEnum.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            dpEnum.NextPosition();
+            Assert.AreEqual(0, dpEnum.StartOffset);
+            Assert.AreEqual(4, dpEnum.EndOffset);
+
+            dpEnum.NextPosition();
+            Assert.AreEqual(9, dpEnum.StartOffset);
+            Assert.AreEqual(13, dpEnum.EndOffset);
+            Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, dpEnum.NextDoc());
+
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        // LUCENE-1448
+        [Test]
+        public virtual void TestEndOffsetPositionStandard()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document doc = new Document();
+            FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
+            customType.StoreTermVectors = true;
+            customType.StoreTermVectorPositions = true;
+            customType.StoreTermVectorOffsets = true;
+            Field f = NewField("field", "abcd the  ", customType);
+            Field f2 = NewField("field", "crunch man", customType);
+            doc.Add(f);
+            doc.Add(f2);
+            w.AddDocument(doc);
+            w.Dispose();
+
+            IndexReader r = DirectoryReader.Open(dir);
+            TermsEnum termsEnum = r.GetTermVectors(0).GetTerms("field").GetIterator(null);
+            Assert.IsNotNull(termsEnum.Next());
+            DocsAndPositionsEnum dpEnum = termsEnum.DocsAndPositions(null, null);
+
+            Assert.IsTrue(dpEnum.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            dpEnum.NextPosition();
+            Assert.AreEqual(0, dpEnum.StartOffset);
+            Assert.AreEqual(4, dpEnum.EndOffset);
+
+            Assert.IsNotNull(termsEnum.Next());
+            dpEnum = termsEnum.DocsAndPositions(null, dpEnum);
+            Assert.IsTrue(dpEnum.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            dpEnum.NextPosition();
+            Assert.AreEqual(11, dpEnum.StartOffset);
+            Assert.AreEqual(17, dpEnum.EndOffset);
+
+            Assert.IsNotNull(termsEnum.Next());
+            dpEnum = termsEnum.DocsAndPositions(null, dpEnum);
+            Assert.IsTrue(dpEnum.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            dpEnum.NextPosition();
+            Assert.AreEqual(18, dpEnum.StartOffset);
+            Assert.AreEqual(21, dpEnum.EndOffset);
+
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        // LUCENE-1448
+        [Test]
+        public virtual void TestEndOffsetPositionStandardEmptyField()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document doc = new Document();
+            FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
+            customType.StoreTermVectors = true;
+            customType.StoreTermVectorPositions = true;
+            customType.StoreTermVectorOffsets = true;
+            Field f = NewField("field", "", customType);
+            Field f2 = NewField("field", "crunch man", customType);
+            doc.Add(f);
+            doc.Add(f2);
+            w.AddDocument(doc);
+            w.Dispose();
+
+            IndexReader r = DirectoryReader.Open(dir);
+            TermsEnum termsEnum = r.GetTermVectors(0).GetTerms("field").GetIterator(null);
+            Assert.IsNotNull(termsEnum.Next());
+            DocsAndPositionsEnum dpEnum = termsEnum.DocsAndPositions(null, null);
+
+            Assert.AreEqual(1, (int)termsEnum.TotalTermFreq);
+            Assert.IsTrue(dpEnum.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            dpEnum.NextPosition();
+            Assert.AreEqual(1, dpEnum.StartOffset);
+            Assert.AreEqual(7, dpEnum.EndOffset);
+
+            Assert.IsNotNull(termsEnum.Next());
+            dpEnum = termsEnum.DocsAndPositions(null, dpEnum);
+            Assert.IsTrue(dpEnum.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            dpEnum.NextPosition();
+            Assert.AreEqual(8, dpEnum.StartOffset);
+            Assert.AreEqual(11, dpEnum.EndOffset);
+
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        // LUCENE-1448
+        [Test]
+        public virtual void TestEndOffsetPositionStandardEmptyField2()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document doc = new Document();
+            FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
+            customType.StoreTermVectors = true;
+            customType.StoreTermVectorPositions = true;
+            customType.StoreTermVectorOffsets = true;
+
+            Field f = NewField("field", "abcd", customType);
+            doc.Add(f);
+            doc.Add(NewField("field", "", customType));
+
+            Field f2 = NewField("field", "crunch", customType);
+            doc.Add(f2);
+
+            w.AddDocument(doc);
+            w.Dispose();
+
+            IndexReader r = DirectoryReader.Open(dir);
+            TermsEnum termsEnum = r.GetTermVectors(0).GetTerms("field").GetIterator(null);
+            Assert.IsNotNull(termsEnum.Next());
+            DocsAndPositionsEnum dpEnum = termsEnum.DocsAndPositions(null, null);
+
+            Assert.AreEqual(1, (int)termsEnum.TotalTermFreq);
+            Assert.IsTrue(dpEnum.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            dpEnum.NextPosition();
+            Assert.AreEqual(0, dpEnum.StartOffset);
+            Assert.AreEqual(4, dpEnum.EndOffset);
+
+            Assert.IsNotNull(termsEnum.Next());
+            dpEnum = termsEnum.DocsAndPositions(null, dpEnum);
+            Assert.IsTrue(dpEnum.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            dpEnum.NextPosition();
+            Assert.AreEqual(6, dpEnum.StartOffset);
+            Assert.AreEqual(12, dpEnum.EndOffset);
+
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        // LUCENE-1168
+        [Test]
+        public virtual void TestTermVectorCorruption()
+        {
+            Directory dir = NewDirectory();
+            for (int iter = 0; iter < 2; iter++)
+            {
+                IndexWriter writer = new IndexWriter(dir, ((IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2).SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH)).SetMergeScheduler(new SerialMergeScheduler()).SetMergePolicy(new LogDocMergePolicy()));
+
+                Document document = new Document();
+                FieldType customType = new FieldType();
+                customType.IsStored = true;
+
+                Field storedField = NewField("stored", "stored", customType);
+                document.Add(storedField);
+                writer.AddDocument(document);
+                writer.AddDocument(document);
+
+                document = new Document();
+                document.Add(storedField);
+                FieldType customType2 = new FieldType(StringField.TYPE_NOT_STORED);
+                customType2.StoreTermVectors = true;
+                customType2.StoreTermVectorPositions = true;
+                customType2.StoreTermVectorOffsets = true;
+                Field termVectorField = NewField("termVector", "termVector", customType2);
+
+                document.Add(termVectorField);
+                writer.AddDocument(document);
+                writer.ForceMerge(1);
+                writer.Dispose();
+
+                IndexReader reader = DirectoryReader.Open(dir);
+                for (int i = 0; i < reader.NumDocs; i++)
+                {
+                    reader.Document(i);
+                    reader.GetTermVectors(i);
+                }
+                reader.Dispose();
+
+                writer = new IndexWriter(dir, ((IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2).SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH)).SetMergeScheduler(new SerialMergeScheduler()).SetMergePolicy(new LogDocMergePolicy()));
+
+                Directory[] indexDirs = new Directory[] { new MockDirectoryWrapper(Random(), new RAMDirectory(dir, NewIOContext(Random()))) };
+                writer.AddIndexes(indexDirs);
+                writer.ForceMerge(1);
+                writer.Dispose();
+            }
+            dir.Dispose();
+        }
+
+        // LUCENE-1168
+        [Test]
+        public virtual void TestTermVectorCorruption2()
+        {
+            Directory dir = NewDirectory();
+            for (int iter = 0; iter < 2; iter++)
+            {
+                IndexWriter writer = new IndexWriter(dir, ((IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2).SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH)).SetMergeScheduler(new SerialMergeScheduler()).SetMergePolicy(new LogDocMergePolicy()));
+
+                Document document = new Document();
+
+                FieldType customType = new FieldType();
+                customType.IsStored = true;
+
+                Field storedField = NewField("stored", "stored", customType);
+                document.Add(storedField);
+                writer.AddDocument(document);
+                writer.AddDocument(document);
+
+                document = new Document();
+                document.Add(storedField);
+                FieldType customType2 = new FieldType(StringField.TYPE_NOT_STORED);
+                customType2.StoreTermVectors = true;
+                customType2.StoreTermVectorPositions = true;
+                customType2.StoreTermVectorOffsets = true;
+                Field termVectorField = NewField("termVector", "termVector", customType2);
+                document.Add(termVectorField);
+                writer.AddDocument(document);
+                writer.ForceMerge(1);
+                writer.Dispose();
+
+                IndexReader reader = DirectoryReader.Open(dir);
+                Assert.IsNull(reader.GetTermVectors(0));
+                Assert.IsNull(reader.GetTermVectors(1));
+                Assert.IsNotNull(reader.GetTermVectors(2));
+                reader.Dispose();
+            }
+            dir.Dispose();
+        }
+
+        // LUCENE-1168
+        [Test]
+        public virtual void TestTermVectorCorruption3()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, ((IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2).SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH)).SetMergeScheduler(new SerialMergeScheduler()).SetMergePolicy(new LogDocMergePolicy()));
+
+            Document document = new Document();
+            FieldType customType = new FieldType();
+            customType.IsStored = true;
+
+            Field storedField = NewField("stored", "stored", customType);
+            document.Add(storedField);
+            FieldType customType2 = new FieldType(StringField.TYPE_NOT_STORED);
+            customType2.StoreTermVectors = true;
+            customType2.StoreTermVectorPositions = true;
+            customType2.StoreTermVectorOffsets = true;
+            Field termVectorField = NewField("termVector", "termVector", customType2);
+            document.Add(termVectorField);
+            for (int i = 0; i < 10; i++)
+            {
+                writer.AddDocument(document);
+            }
+            writer.Dispose();
+
+            writer = new IndexWriter(dir, ((IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2).SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH)).SetMergeScheduler(new SerialMergeScheduler()).SetMergePolicy(new LogDocMergePolicy()));
+            for (int i = 0; i < 6; i++)
+            {
+                writer.AddDocument(document);
+            }
+
+            writer.ForceMerge(1);
+            writer.Dispose();
+
+            IndexReader reader = DirectoryReader.Open(dir);
+            for (int i = 0; i < 10; i++)
+            {
+                reader.GetTermVectors(i);
+                reader.Document(i);
+            }
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        // LUCENE-1008
+        [Test]
+        public virtual void TestNoTermVectorAfterTermVector()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter iw = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document document = new Document();
+            FieldType customType2 = new FieldType(StringField.TYPE_NOT_STORED);
+            customType2.StoreTermVectors = true;
+            customType2.StoreTermVectorPositions = true;
+            customType2.StoreTermVectorOffsets = true;
+            document.Add(NewField("tvtest", "a b c", customType2));
+            iw.AddDocument(document);
+            document = new Document();
+            document.Add(NewTextField("tvtest", "x y z", Field.Store.NO));
+            iw.AddDocument(document);
+            // Make first segment
+            iw.Commit();
+
+            FieldType customType = new FieldType(StringField.TYPE_NOT_STORED);
+            customType.StoreTermVectors = true;
+            document.Add(NewField("tvtest", "a b c", customType));
+            iw.AddDocument(document);
+            // Make 2nd segment
+            iw.Commit();
+
+            iw.ForceMerge(1);
+            iw.Dispose();
+            dir.Dispose();
+        }
+
+        // LUCENE-1010
+        [Test]
+        public virtual void TestNoTermVectorAfterTermVectorMerge()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter iw = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document document = new Document();
+            FieldType customType = new FieldType(StringField.TYPE_NOT_STORED);
+            customType.StoreTermVectors = true;
+            document.Add(NewField("tvtest", "a b c", customType));
+            iw.AddDocument(document);
+            iw.Commit();
+
+            document = new Document();
+            document.Add(NewTextField("tvtest", "x y z", Field.Store.NO));
+            iw.AddDocument(document);
+            // Make first segment
+            iw.Commit();
+
+            iw.ForceMerge(1);
+
+            FieldType customType2 = new FieldType(StringField.TYPE_NOT_STORED);
+            customType2.StoreTermVectors = true;
+            document.Add(NewField("tvtest", "a b c", customType2));
+            iw.AddDocument(document);
+            // Make 2nd segment
+            iw.Commit();
+            iw.ForceMerge(1);
+
+            iw.Dispose();
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestTermdocPerf.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestTermdocPerf.cs b/src/Lucene.Net.Tests/Index/TestTermdocPerf.cs
new file mode 100644
index 0000000..c76b4ee
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestTermdocPerf.cs
@@ -0,0 +1,176 @@
+using Lucene.Net.Analysis.TokenAttributes;
+using Lucene.Net.Attributes;
+using Lucene.Net.Documents;
+using NUnit.Framework;
+using System;
+
+namespace Lucene.Net.Index
+{
+    using System.IO;
+
+    /*
+        /// Copyright 2006 The Apache Software Foundation
+        ///
+        /// Licensed under the Apache License, Version 2.0 (the "License");
+        /// you may not use this file except in compliance with the License.
+        /// You may obtain a copy of the License at
+        ///
+        ///     http://www.apache.org/licenses/LICENSE-2.0
+        ///
+        /// Unless required by applicable law or agreed to in writing, software
+        /// distributed under the License is distributed on an "AS IS" BASIS,
+        /// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+        /// See the License for the specific language governing permissions and
+        /// limitations under the License.
+        */
+
+    using Analyzer = Lucene.Net.Analysis.Analyzer;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using CharTermAttribute = Lucene.Net.Analysis.TokenAttributes.CharTermAttribute;
+    using Directory = Lucene.Net.Store.Directory;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using Document = Documents.Document;
+    using Field = Field;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using Tokenizer = Lucene.Net.Analysis.Tokenizer;
+
+    internal class RepeatingTokenizer : Tokenizer
+    {
+        private readonly Random Random;
+        private readonly float PercentDocs;
+        private readonly int MaxTF;
+        private int Num;
+        internal ICharTermAttribute TermAtt;
+        internal string Value;
+
+        public RepeatingTokenizer(TextReader reader, string val, Random random, float percentDocs, int maxTF)
+            : base(reader)
+        {
+            this.Value = val;
+            this.Random = random;
+            this.PercentDocs = percentDocs;
+            this.MaxTF = maxTF;
+            this.TermAtt = AddAttribute<ICharTermAttribute>();
+        }
+
+        public sealed override bool IncrementToken()
+        {
+            Num--;
+            if (Num >= 0)
+            {
+                ClearAttributes();
+                TermAtt.Append(Value);
+                return true;
+            }
+            return false;
+        }
+
+        public override void Reset()
+        {
+            base.Reset();
+            if (Random.NextDouble() < PercentDocs)
+            {
+                Num = Random.Next(MaxTF) + 1;
+            }
+            else
+            {
+                Num = 0;
+            }
+        }
+    }
+
+    [TestFixture]
+    public class TestTermdocPerf : LuceneTestCase
+    {
+        internal virtual void AddDocs(Random random, Directory dir, int ndocs, string field, string val, int maxTF, float percentDocs)
+        {
+            Analyzer analyzer = new AnalyzerAnonymousInnerClassHelper(random, val, maxTF, percentDocs);
+
+            Document doc = new Document();
+
+            doc.Add(NewStringField(field, val, Field.Store.NO));
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(100).SetMergePolicy(NewLogMergePolicy(100)));
+
+            for (int i = 0; i < ndocs; i++)
+            {
+                writer.AddDocument(doc);
+            }
+
+            writer.ForceMerge(1);
+            writer.Dispose();
+        }
+
+        private class AnalyzerAnonymousInnerClassHelper : Analyzer
+        {
+            private Random Random;
+            private string Val;
+            private int MaxTF;
+            private float PercentDocs;
+
+            public AnalyzerAnonymousInnerClassHelper(Random random, string val, int maxTF, float percentDocs)
+            {
+                this.Random = random;
+                this.Val = val;
+                this.MaxTF = maxTF;
+                this.PercentDocs = percentDocs;
+            }
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                return new TokenStreamComponents(new RepeatingTokenizer(reader, Val, Random, PercentDocs, MaxTF));
+            }
+        }
+
+        public virtual int DoTest(int iter, int ndocs, int maxTF, float percentDocs)
+        {
+            Directory dir = NewDirectory();
+
+            long start = Environment.TickCount;
+            AddDocs(Random(), dir, ndocs, "foo", "val", maxTF, percentDocs);
+            long end = Environment.TickCount;
+            if (VERBOSE)
+            {
+                Console.WriteLine("milliseconds for creation of " + ndocs + " docs = " + (end - start));
+            }
+
+            IndexReader reader = DirectoryReader.Open(dir);
+
+            TermsEnum tenum = MultiFields.GetTerms(reader, "foo").GetIterator(null);
+
+            start = Environment.TickCount;
+
+            int ret = 0;
+            DocsEnum tdocs = null;
+            Random random = new Random(Random().Next());
+            for (int i = 0; i < iter; i++)
+            {
+                tenum.SeekCeil(new BytesRef("val"));
+                tdocs = TestUtil.Docs(random, tenum, MultiFields.GetLiveDocs(reader), tdocs, DocsEnum.FLAG_NONE);
+                while (tdocs.NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
+                {
+                    ret += tdocs.DocID;
+                }
+            }
+
+            end = Environment.TickCount;
+            if (VERBOSE)
+            {
+                Console.WriteLine("milliseconds for " + iter + " TermDocs iteration: " + (end - start));
+            }
+
+            return ret;
+        }
+
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(120000)]
+#endif
+        [Test, LongRunningTest, HasTimeout]
+        public virtual void TestTermDocPerf()
+        {
+            // performance test for 10% of documents containing a term
+            DoTest(100000, 10000, 3, .1f);
+        }
+    }
+}
\ No newline at end of file


[48/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Codecs/Lucene3x/TestLucene3xStoredFieldsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene3x/TestLucene3xStoredFieldsFormat.cs b/src/Lucene.Net.Tests/Codecs/Lucene3x/TestLucene3xStoredFieldsFormat.cs
new file mode 100644
index 0000000..e78db47
--- /dev/null
+++ b/src/Lucene.Net.Tests/Codecs/Lucene3x/TestLucene3xStoredFieldsFormat.cs
@@ -0,0 +1,145 @@
+using Lucene.Net.Attributes;
+using NUnit.Framework;
+
+namespace Lucene.Net.Codecs.Lucene3x
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    using BaseStoredFieldsFormatTestCase = Lucene.Net.Index.BaseStoredFieldsFormatTestCase;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    [TestFixture]
+    public class TestLucene3xStoredFieldsFormat : BaseStoredFieldsFormatTestCase
+    {
+        /// <summary>
+        /// LUCENENET specific
+        /// Is non-static because OLD_FORMAT_IMPERSONATION_IS_ACTIVE is no longer static.
+        /// </summary>
+        [OneTimeSetUp]
+        public void BeforeClass()
+        {
+            OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true; // explicitly instantiates ancient codec
+        }
+
+        protected override Codec Codec
+        {
+            get
+            {
+                Assert.IsTrue(OLD_FORMAT_IMPERSONATION_IS_ACTIVE, "This should have been set up in the test fixture");
+                return new PreFlexRWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE);
+            }
+        }
+
+        [Test]
+        public override void TestWriteReadMerge()
+        {
+            AssumeFalse("impersonation isnt good enough", true);
+            // this test tries to switch up between the codec and another codec.
+            // for 3.x: we currently cannot take an index with existing 4.x segments
+            // and merge into newly formed 3.x segments.
+        }
+
+
+
+        #region BaseStoredFieldsFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestRandomStoredFields()
+        {
+            base.TestRandomStoredFields();
+        }
+
+        [Test]
+        // LUCENE-1727: make sure doc fields are stored in order
+        public override void TestStoredFieldsOrder()
+        {
+            base.TestStoredFieldsOrder();
+        }
+
+        [Test]
+        // LUCENE-1219
+        public override void TestBinaryFieldOffsetLength()
+        {
+            base.TestBinaryFieldOffsetLength();
+        }
+
+        [Test]
+        public override void TestNumericField()
+        {
+            base.TestNumericField();
+        }
+
+        [Test]
+        public override void TestIndexedBit()
+        {
+            base.TestIndexedBit();
+        }
+
+        [Test]
+        public override void TestReadSkip()
+        {
+            base.TestReadSkip();
+        }
+
+        [Test]
+        public override void TestEmptyDocs()
+        {
+            base.TestEmptyDocs();
+        }
+
+        [Test]
+        public override void TestConcurrentReads()
+        {
+            base.TestConcurrentReads();
+        }
+
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(120000)]
+#endif
+        [Test, HasTimeout]
+        public override void TestBigDocuments()
+        {
+            base.TestBigDocuments();
+        }
+
+        [Test]
+        public override void TestBulkMergeWithDeletes()
+        {
+            base.TestBulkMergeWithDeletes();
+        }
+
+        #endregion
+
+        #region BaseIndexFileFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestMergeStability()
+        {
+            base.TestMergeStability();
+        }
+
+        #endregion
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Codecs/Lucene3x/TestLucene3xTermVectorsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene3x/TestLucene3xTermVectorsFormat.cs b/src/Lucene.Net.Tests/Codecs/Lucene3x/TestLucene3xTermVectorsFormat.cs
new file mode 100644
index 0000000..cbd59ed
--- /dev/null
+++ b/src/Lucene.Net.Tests/Codecs/Lucene3x/TestLucene3xTermVectorsFormat.cs
@@ -0,0 +1,122 @@
+using NUnit.Framework;
+
+namespace Lucene.Net.Codecs.Lucene3x
+{
+    using System.Collections.Generic;
+    using Attributes;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using BaseTermVectorsFormatTestCase = Lucene.Net.Index.BaseTermVectorsFormatTestCase;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    public class TestLucene3xTermVectorsFormat : BaseTermVectorsFormatTestCase
+    {
+        [SetUp]
+        public override void SetUp()
+        {
+            OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true;
+            base.SetUp();
+        }
+
+        protected override Codec Codec
+        {
+            get
+            {
+                Assert.IsTrue(OLD_FORMAT_IMPERSONATION_IS_ACTIVE, "This should have been set up in the test fixture");
+                return new PreFlexRWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE);
+            }
+        }
+
+        protected override IEnumerable<Options> ValidOptions()
+        {
+            return ValidOptions(Options.NONE, Options.POSITIONS_AND_OFFSETS);
+        }
+
+
+        #region BaseTermVectorsFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        // only one doc with vectors
+        public override void TestRareVectors()
+        {
+            base.TestRareVectors();
+        }
+
+        [Test]
+        public override void TestHighFreqs()
+        {
+            base.TestHighFreqs();
+        }
+
+        [Test]
+        public override void TestLotsOfFields()
+        {
+            base.TestLotsOfFields();
+        }
+
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(300000)]
+#endif
+        [Test, HasTimeout]
+        // different options for the same field
+        public override void TestMixedOptions()
+        {
+            base.TestMixedOptions();
+        }
+
+        [Test]
+        public override void TestRandom()
+        {
+            base.TestRandom();
+        }
+
+        [Test]
+        public override void TestMerge()
+        {
+            base.TestMerge();
+        }
+
+        [Test]
+        // run random tests from different threads to make sure the per-thread clones
+        // don't share mutable data
+        public override void TestClone()
+        {
+            base.TestClone();
+        }
+
+        #endregion
+
+        #region BaseIndexFileFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestMergeStability()
+        {
+            base.TestMergeStability();
+        }
+
+        #endregion
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Codecs/Lucene3x/TestSurrogates.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene3x/TestSurrogates.cs b/src/Lucene.Net.Tests/Codecs/Lucene3x/TestSurrogates.cs
new file mode 100644
index 0000000..d4b1b63
--- /dev/null
+++ b/src/Lucene.Net.Tests/Codecs/Lucene3x/TestSurrogates.cs
@@ -0,0 +1,424 @@
+using System;
+using System.Collections.Generic;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Codecs.Lucene3x
+{
+    using Attributes;
+    using Lucene.Net.Analysis;
+    using Lucene.Net.Index;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Lucene.Net.Store;
+    using Lucene.Net.Util;
+    using NUnit.Framework;
+
+    [TestFixture]
+    public class TestSurrogates : LuceneTestCase
+    {
+        /// <summary>
+        /// we will manually instantiate preflex-rw here
+        /// 
+        /// LUCENENET specific
+        /// Is non-static because OLD_FORMAT_IMPERSONATION_IS_ACTIVE is no longer static.
+        /// </summary>
+        [OneTimeSetUp]
+        public void BeforeClass()
+        {
+            OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true;
+        }
+
+        private static string MakeDifficultRandomUnicodeString(Random r)
+        {
+            int end = r.Next(20);
+            if (end == 0)
+            {
+                // allow 0 length
+                return "";
+            }
+            char[] buffer = new char[end];
+            for (int i = 0; i < end; i++)
+            {
+                int t = r.Next(5);
+
+                if (0 == t && i < end - 1)
+                {
+                    // hi
+                    buffer[i++] = (char)(0xd800 + r.Next(2));
+                    // lo
+                    buffer[i] = (char)(0xdc00 + r.Next(2));
+                }
+                else if (t <= 3)
+                {
+                    buffer[i] = (char)('a' + r.Next(2));
+                }
+                else if (4 == t)
+                {
+                    buffer[i] = (char)(0xe000 + r.Next(2));
+                }
+            }
+
+            return new string(buffer, 0, end);
+        }
+
+        private static string ToHexString(Term t)
+        {
+            return t.Field + ":" + UnicodeUtil.ToHexString(t.Text());
+        }
+
+        private string GetRandomString(Random r)
+        {
+            string s;
+            if (r.Next(5) == 1)
+            {
+                if (r.Next(3) == 1)
+                {
+                    s = MakeDifficultRandomUnicodeString(r);
+                }
+                else
+                {
+                    s = TestUtil.RandomUnicodeString(r);
+                }
+            }
+            else
+            {
+                s = TestUtil.RandomRealisticUnicodeString(r);
+            }
+            return s;
+        }
+
+        private sealed class SortTermAsUTF16Comparer : IComparer<Term>
+        {
+#pragma warning disable 612, 618
+            private static readonly IComparer<BytesRef> LegacyComparer = BytesRef.UTF8SortedAsUTF16Comparer;
+#pragma warning restore 612, 618
+
+            public int Compare(Term term1, Term term2)
+            {
+                if (term1.Field.Equals(term2.Field))
+                {
+                    return LegacyComparer.Compare(term1.Bytes, term2.Bytes);
+                }
+                else
+                {
+                    return System.String.Compare(term1.Field, term2.Field, System.StringComparison.Ordinal);
+                }
+            }
+        }
+
+        private static readonly SortTermAsUTF16Comparer TermAsUTF16Comparer = new SortTermAsUTF16Comparer();
+
+        // single straight enum
+        private void DoTestStraightEnum(IList<Term> fieldTerms, IndexReader reader, int uniqueTermCount)
+        {
+            if (VERBOSE)
+            {
+                Console.WriteLine("\nTEST: top now enum reader=" + reader);
+            }
+            Fields fields = MultiFields.GetFields(reader);
+            {
+                // Test straight enum:
+                int termCount = 0;
+                foreach (string field in fields)
+                {
+                    Terms terms = fields.GetTerms(field);
+                    Assert.IsNotNull(terms);
+                    TermsEnum termsEnum = terms.GetIterator(null);
+                    BytesRef text;
+                    BytesRef lastText = null;
+                    while ((text = termsEnum.Next()) != null)
+                    {
+                        Term exp = fieldTerms[termCount];
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("  got term=" + field + ":" + UnicodeUtil.ToHexString(text.Utf8ToString()));
+                            Console.WriteLine("       exp=" + exp.Field + ":" + UnicodeUtil.ToHexString(exp.Text()));
+                            Console.WriteLine();
+                        }
+                        if (lastText == null)
+                        {
+                            lastText = BytesRef.DeepCopyOf(text);
+                        }
+                        else
+                        {
+                            Assert.IsTrue(lastText.CompareTo(text) < 0);
+                            lastText.CopyBytes(text);
+                        }
+                        Assert.AreEqual(exp.Field, field);
+                        Assert.AreEqual(exp.Bytes, text);
+                        termCount++;
+                    }
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("  no more terms for field=" + field);
+                    }
+                }
+                Assert.AreEqual(uniqueTermCount, termCount);
+            }
+        }
+
+        // randomly seeks to term that we know exists, then next's
+        // from there
+        private void DoTestSeekExists(Random r, IList<Term> fieldTerms, IndexReader reader)
+        {
+            IDictionary<string, TermsEnum> tes = new Dictionary<string, TermsEnum>();
+
+            // Test random seek to existing term, then enum:
+            if (VERBOSE)
+            {
+                Console.WriteLine("\nTEST: top now seek");
+            }
+
+            int num = AtLeast(100);
+            for (int iter = 0; iter < num; iter++)
+            {
+                // pick random field+term
+                int spot = r.Next(fieldTerms.Count);
+                Term term = fieldTerms[spot];
+                string field = term.Field;
+
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: exist seek field=" + field + " term=" + UnicodeUtil.ToHexString(term.Text()));
+                }
+
+                // seek to it
+                TermsEnum te;
+                if (!tes.TryGetValue(field, out te))
+                {
+                    te = MultiFields.GetTerms(reader, field).GetIterator(null);
+                    tes[field] = te;
+                }
+
+                if (VERBOSE)
+                {
+                    Console.WriteLine("  done get enum");
+                }
+
+                // seek should find the term
+                Assert.AreEqual(TermsEnum.SeekStatus.FOUND, te.SeekCeil(term.Bytes));
+
+                // now .next() this many times:
+                int ct = TestUtil.NextInt(r, 5, 100);
+                for (int i = 0; i < ct; i++)
+                {
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("TEST: now next()");
+                    }
+                    if (1 + spot + i >= fieldTerms.Count)
+                    {
+                        break;
+                    }
+                    term = fieldTerms[1 + spot + i];
+                    if (!term.Field.Equals(field))
+                    {
+                        Assert.IsNull(te.Next());
+                        break;
+                    }
+                    else
+                    {
+                        BytesRef t = te.Next();
+
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("  got term=" + (t == null ? null : UnicodeUtil.ToHexString(t.Utf8ToString())));
+                            Console.WriteLine("       exp=" + UnicodeUtil.ToHexString(term.Text().ToString()));
+                        }
+
+                        Assert.AreEqual(term.Bytes, t);
+                    }
+                }
+            }
+        }
+
+        private void DoTestSeekDoesNotExist(Random r, int numField, IList<Term> fieldTerms, Term[] fieldTermsArray, IndexReader reader)
+        {
+            IDictionary<string, TermsEnum> tes = new Dictionary<string, TermsEnum>();
+
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: top random seeks");
+            }
+
+            {
+                int num = AtLeast(100);
+                for (int iter = 0; iter < num; iter++)
+                {
+                    // seek to random spot
+                    string field = StringHelper.Intern("f" + r.Next(numField));
+                    Term tx = new Term(field, GetRandomString(r));
+
+                    int spot = Array.BinarySearch(fieldTermsArray, tx);
+
+                    if (spot < 0)
+                    {
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("TEST: non-exist seek to " + field + ":" + UnicodeUtil.ToHexString(tx.Text()));
+                        }
+
+                        // term does not exist:
+                        TermsEnum te;
+                        if (!tes.TryGetValue(field, out te))
+                        {
+                            te = MultiFields.GetTerms(reader, field).GetIterator(null);
+                            tes[field] = te;
+                        }
+
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("  got enum");
+                        }
+
+                        spot = -spot - 1;
+
+                        if (spot == fieldTerms.Count || !fieldTerms[spot].Field.Equals(field))
+                        {
+                            Assert.AreEqual(TermsEnum.SeekStatus.END, te.SeekCeil(tx.Bytes));
+                        }
+                        else
+                        {
+                            Assert.AreEqual(TermsEnum.SeekStatus.NOT_FOUND, te.SeekCeil(tx.Bytes));
+
+                            if (VERBOSE)
+                            {
+                                Console.WriteLine("  got term=" + UnicodeUtil.ToHexString(te.Term.Utf8ToString()));
+                                Console.WriteLine("  exp term=" + UnicodeUtil.ToHexString(fieldTerms[spot].Text()));
+                            }
+
+                            Assert.AreEqual(fieldTerms[spot].Bytes, te.Term);
+
+                            // now .next() this many times:
+                            int ct = TestUtil.NextInt(r, 5, 100);
+                            for (int i = 0; i < ct; i++)
+                            {
+                                if (VERBOSE)
+                                {
+                                    Console.WriteLine("TEST: now next()");
+                                }
+                                if (1 + spot + i >= fieldTerms.Count)
+                                {
+                                    break;
+                                }
+                                Term term = fieldTerms[1 + spot + i];
+                                if (!term.Field.Equals(field))
+                                {
+                                    Assert.IsNull(te.Next());
+                                    break;
+                                }
+                                else
+                                {
+                                    BytesRef t = te.Next();
+
+                                    if (VERBOSE)
+                                    {
+                                        Console.WriteLine("  got term=" + (t == null ? null : UnicodeUtil.ToHexString(t.Utf8ToString())));
+                                        Console.WriteLine("       exp=" + UnicodeUtil.ToHexString(term.Text().ToString()));
+                                    }
+
+                                    Assert.AreEqual(term.Bytes, t);
+                                }
+                            }
+                        }
+                    }
+                }
+            }
+        }
+
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(300000)]
+#endif
+        [Test, HasTimeout]
+        public virtual void TestSurrogatesOrder()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))
+                .SetCodec(new PreFlexRWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE)));
+
+            int numField = TestUtil.NextInt(Random(), 2, 5);
+
+            int uniqueTermCount = 0;
+
+            int tc = 0;
+
+            var fieldTerms = new List<Term>();
+
+            for (int f = 0; f < numField; f++)
+            {
+                string field = "f" + f;
+                int numTerms = AtLeast(200);
+
+                ISet<string> uniqueTerms = new HashSet<string>();
+
+                for (int i = 0; i < numTerms; i++)
+                {
+                    string term = GetRandomString(Random()) + "_ " + (tc++);
+                    uniqueTerms.Add(term);
+                    fieldTerms.Add(new Term(field, term));
+                    Documents.Document doc = new Documents.Document();
+                    doc.Add(NewStringField(field, term, Field.Store.NO));
+                    w.AddDocument(doc);
+                }
+                uniqueTermCount += uniqueTerms.Count;
+            }
+
+            IndexReader reader = w.Reader;
+
+            if (VERBOSE)
+            {
+                fieldTerms.Sort(TermAsUTF16Comparer);
+
+                Console.WriteLine("\nTEST: UTF16 order");
+                foreach (Term t in fieldTerms)
+                {
+                    Console.WriteLine("  " + ToHexString(t));
+                }
+            }
+
+            // sorts in code point order:
+            fieldTerms.Sort();
+
+            if (VERBOSE)
+            {
+                Console.WriteLine("\nTEST: codepoint order");
+                foreach (Term t in fieldTerms)
+                {
+                    Console.WriteLine("  " + ToHexString(t));
+                }
+            }
+
+            Term[] fieldTermsArray = fieldTerms.ToArray();
+
+            //SegmentInfo si = makePreFlexSegment(r, "_0", dir, fieldInfos, codec, fieldTerms);
+
+            //FieldsProducer fields = codec.fieldsProducer(new SegmentReadState(dir, si, fieldInfos, 1024, 1));
+            //Assert.IsNotNull(fields);
+
+            DoTestStraightEnum(fieldTerms, reader, uniqueTermCount);
+            DoTestSeekExists(Random(), fieldTerms, reader);
+            DoTestSeekDoesNotExist(Random(), numField, fieldTerms, fieldTermsArray, reader);
+
+            reader.Dispose();
+            w.Dispose();
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Codecs/Lucene3x/TestTermInfosReaderIndex.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene3x/TestTermInfosReaderIndex.cs b/src/Lucene.Net.Tests/Codecs/Lucene3x/TestTermInfosReaderIndex.cs
new file mode 100644
index 0000000..2d9dd05
--- /dev/null
+++ b/src/Lucene.Net.Tests/Codecs/Lucene3x/TestTermInfosReaderIndex.cs
@@ -0,0 +1,235 @@
+using System;
+using System.Collections.Generic;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Codecs.Lucene3x
+{
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using Directory = Lucene.Net.Store.Directory;
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldInfos = Lucene.Net.Index.FieldInfos;
+    using Fields = Lucene.Net.Index.Fields;
+    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
+    using IndexInput = Lucene.Net.Store.IndexInput;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+    using IndexWriterConfig = Lucene.Net.Index.IndexWriterConfig;
+    using IOContext = Lucene.Net.Store.IOContext;
+    using LogMergePolicy = Lucene.Net.Index.LogMergePolicy;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockTokenizer = Lucene.Net.Analysis.MockTokenizer;
+    using MultiFields = Lucene.Net.Index.MultiFields;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using SegmentReader = Lucene.Net.Index.SegmentReader;
+    using Term = Lucene.Net.Index.Term;
+    using TermQuery = Lucene.Net.Search.TermQuery;
+    using Terms = Lucene.Net.Index.Terms;
+    using TermsEnum = Lucene.Net.Index.TermsEnum;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using TopDocs = Lucene.Net.Search.TopDocs;
+
+#pragma warning disable 612, 618
+    [TestFixture]
+    public class TestTermInfosReaderIndex : LuceneTestCase
+    {
+        private static int NUMBER_OF_DOCUMENTS;
+        private static int NUMBER_OF_FIELDS;
+        private static TermInfosReaderIndex Index;
+        private static Directory Directory;
+        private static SegmentTermEnum TermEnum;
+        private static int IndexDivisor;
+        private static int TermIndexInterval;
+        private static IndexReader Reader;
+        private static IList<Term> SampleTerms;
+
+        /// <summary>
+        /// we will manually instantiate preflex-rw here
+        /// 
+        /// LUCENENET specific
+        /// Is non-static because OLD_FORMAT_IMPERSONATION_IS_ACTIVE is no longer static.
+        /// </summary>
+        [OneTimeSetUp]
+        public void BeforeClass()
+        {
+            // NOTE: turn off compound file, this test will open some index files directly.
+            OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true;
+            IndexWriterConfig config = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.KEYWORD, false)).SetUseCompoundFile(false);
+
+            TermIndexInterval = config.TermIndexInterval;
+            IndexDivisor = TestUtil.NextInt(Random(), 1, 10);
+            NUMBER_OF_DOCUMENTS = AtLeast(100);
+            NUMBER_OF_FIELDS = AtLeast(Math.Max(10, 3 * TermIndexInterval * IndexDivisor / NUMBER_OF_DOCUMENTS));
+
+            Directory = NewDirectory();
+
+            config.SetCodec(new PreFlexRWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE));
+            LogMergePolicy mp = NewLogMergePolicy();
+            // NOTE: turn off compound file, this test will open some index files directly.
+            mp.NoCFSRatio = 0.0;
+            config.SetMergePolicy(mp);
+
+            Populate(Directory, config);
+
+            DirectoryReader r0 = IndexReader.Open(Directory);
+            SegmentReader r = LuceneTestCase.GetOnlySegmentReader(r0);
+            string segment = r.SegmentName;
+            r.Dispose();
+
+            FieldInfosReader infosReader = (new PreFlexRWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE)).FieldInfosFormat.FieldInfosReader;
+            FieldInfos fieldInfos = infosReader.Read(Directory, segment, "", IOContext.READ_ONCE);
+            string segmentFileName = IndexFileNames.SegmentFileName(segment, "", Lucene3xPostingsFormat.TERMS_INDEX_EXTENSION);
+            long tiiFileLength = Directory.FileLength(segmentFileName);
+            IndexInput input = Directory.OpenInput(segmentFileName, NewIOContext(Random()));
+            TermEnum = new SegmentTermEnum(Directory.OpenInput(IndexFileNames.SegmentFileName(segment, "", Lucene3xPostingsFormat.TERMS_EXTENSION), NewIOContext(Random())), fieldInfos, false);
+            int totalIndexInterval = TermEnum.indexInterval * IndexDivisor;
+
+            SegmentTermEnum indexEnum = new SegmentTermEnum(input, fieldInfos, true);
+            Index = new TermInfosReaderIndex(indexEnum, IndexDivisor, tiiFileLength, totalIndexInterval);
+            indexEnum.Dispose();
+            input.Dispose();
+
+            Reader = IndexReader.Open(Directory);
+            SampleTerms = Sample(Random(), Reader, 1000);
+        }
+
+        [OneTimeTearDown]
+        public static void AfterClass()
+        {
+            TermEnum.Dispose();
+            Reader.Dispose();
+            Directory.Dispose();
+            TermEnum = null;
+            Reader = null;
+            Directory = null;
+            Index = null;
+            SampleTerms = null;
+        }
+
+        [Test]
+        public virtual void TestSeekEnum()
+        {
+            int indexPosition = 3;
+            SegmentTermEnum clone = (SegmentTermEnum)TermEnum.Clone();
+            Term term = FindTermThatWouldBeAtIndex(clone, indexPosition);
+            SegmentTermEnum enumerator = clone;
+            Index.SeekEnum(enumerator, indexPosition);
+            Assert.AreEqual(term, enumerator.Term());
+            clone.Dispose();
+        }
+
+        [Test]
+        public virtual void TestCompareTo()
+        {
+            Term term = new Term("field" + Random().Next(NUMBER_OF_FIELDS), Text);
+            for (int i = 0; i < Index.Length; i++)
+            {
+                Term t = Index.GetTerm(i);
+                int compareTo = term.CompareTo(t);
+                Assert.AreEqual(compareTo, Index.CompareTo(term, i));
+            }
+        }
+
+        [Test]
+        public virtual void TestRandomSearchPerformance()
+        {
+            IndexSearcher searcher = new IndexSearcher(Reader);
+            foreach (Term t in SampleTerms)
+            {
+                TermQuery query = new TermQuery(t);
+                TopDocs topDocs = searcher.Search(query, 10);
+                Assert.IsTrue(topDocs.TotalHits > 0);
+            }
+        }
+
+        private static IList<Term> Sample(Random random, IndexReader reader, int size)
+        {
+            IList<Term> sample = new List<Term>();
+            Fields fields = MultiFields.GetFields(reader);
+            foreach (string field in fields)
+            {
+                Terms terms = fields.GetTerms(field);
+                Assert.IsNotNull(terms);
+                TermsEnum termsEnum = terms.GetIterator(null);
+                while (termsEnum.Next() != null)
+                {
+                    if (sample.Count >= size)
+                    {
+                        int pos = random.Next(size);
+                        sample[pos] = new Term(field, termsEnum.Term);
+                    }
+                    else
+                    {
+                        sample.Add(new Term(field, termsEnum.Term));
+                    }
+                }
+            }
+            Collections.Shuffle(sample);
+            return sample;
+        }
+
+        private Term FindTermThatWouldBeAtIndex(SegmentTermEnum termEnum, int index)
+        {
+            int termPosition = index * TermIndexInterval * IndexDivisor;
+            for (int i = 0; i < termPosition; i++)
+            {
+                // TODO: this test just uses random terms, so this is always possible
+                AssumeTrue("ran out of terms", termEnum.Next());
+            }
+            Term term = termEnum.Term();
+            // An indexed term is only written when the term after
+            // it exists, so, if the number of terms is 0 mod
+            // termIndexInterval, the last index term will not be
+            // written; so we require a term after this term
+            // as well:
+            AssumeTrue("ran out of terms", termEnum.Next());
+            return term;
+        }
+
+        private void Populate(Directory directory, IndexWriterConfig config)
+        {
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, config);
+            for (int i = 0; i < NUMBER_OF_DOCUMENTS; i++)
+            {
+                Document document = new Document();
+                for (int f = 0; f < NUMBER_OF_FIELDS; f++)
+                {
+                    document.Add(NewStringField("field" + f, Text, Field.Store.NO));
+                }
+                writer.AddDocument(document);
+            }
+            writer.ForceMerge(1);
+            writer.Dispose();
+        }
+
+        private static string Text
+        {
+            get
+            {
+                return Convert.ToString(Random().Next());
+            }
+        }
+    }
+#pragma warning restore 612, 618
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Codecs/Lucene40/TestBitVector.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene40/TestBitVector.cs b/src/Lucene.Net.Tests/Codecs/Lucene40/TestBitVector.cs
new file mode 100644
index 0000000..3103129
--- /dev/null
+++ b/src/Lucene.Net.Tests/Codecs/Lucene40/TestBitVector.cs
@@ -0,0 +1,325 @@
+namespace Lucene.Net.Codecs.Lucene40
+{
+    using NUnit.Framework;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Directory = Lucene.Net.Store.Directory;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+    using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    /// <summary>
+    /// <code>TestBitVector</code> tests the <code>BitVector</code>, obviously.
+    /// </summary>
+    [TestFixture]
+    public class TestBitVector : LuceneTestCase
+    {
+        /// <summary>
+        /// Test the default constructor on BitVectors of various sizes.
+        /// </summary>
+        [Test]
+        public virtual void TestConstructSize()
+        {
+            DoTestConstructOfSize(8);
+            DoTestConstructOfSize(20);
+            DoTestConstructOfSize(100);
+            DoTestConstructOfSize(1000);
+        }
+
+        private void DoTestConstructOfSize(int n)
+        {
+            BitVector bv = new BitVector(n);
+            Assert.AreEqual(n, bv.Length); // LUCENENET NOTE: Length is the equivalent of size()
+        }
+
+        /// <summary>
+        /// Test the get() and set() methods on BitVectors of various sizes.
+        /// </summary>
+        [Test]
+        public virtual void TestGetSet()
+        {
+            DoTestGetSetVectorOfSize(8);
+            DoTestGetSetVectorOfSize(20);
+            DoTestGetSetVectorOfSize(100);
+            DoTestGetSetVectorOfSize(1000);
+        }
+
+        private void DoTestGetSetVectorOfSize(int n)
+        {
+            BitVector bv = new BitVector(n);
+            for (int i = 0; i < bv.Length; i++) // LUCENENET NOTE: Length is the equivalent of size()
+            {
+                // ensure a set bit can be git'
+                Assert.IsFalse(bv.Get(i));
+                bv.Set(i);
+                Assert.IsTrue(bv.Get(i));
+            }
+        }
+
+        /// <summary>
+        /// Test the clear() method on BitVectors of various sizes.
+        /// </summary>
+        [Test]
+        public virtual void TestClear()
+        {
+            DoTestClearVectorOfSize(8);
+            DoTestClearVectorOfSize(20);
+            DoTestClearVectorOfSize(100);
+            DoTestClearVectorOfSize(1000);
+        }
+
+        private void DoTestClearVectorOfSize(int n)
+        {
+            BitVector bv = new BitVector(n);
+            for (int i = 0; i < bv.Length; i++) // LUCENENET NOTE: Length is the equivalent of size()
+            {
+                // ensure a set bit is cleared
+                Assert.IsFalse(bv.Get(i));
+                bv.Set(i);
+                Assert.IsTrue(bv.Get(i));
+                bv.Clear(i);
+                Assert.IsFalse(bv.Get(i));
+            }
+        }
+
+        /// <summary>
+        /// Test the count() method on BitVectors of various sizes.
+        /// </summary>
+        [Test]
+        public virtual void TestCount()
+        {
+            DoTestCountVectorOfSize(8);
+            DoTestCountVectorOfSize(20);
+            DoTestCountVectorOfSize(100);
+            DoTestCountVectorOfSize(1000);
+        }
+
+        private void DoTestCountVectorOfSize(int n)
+        {
+            BitVector bv = new BitVector(n);
+            // test count when incrementally setting bits
+            for (int i = 0; i < bv.Length; i++) // LUCENENET NOTE: Length is the equivalent of size()
+            {
+                Assert.IsFalse(bv.Get(i));
+                Assert.AreEqual(i, bv.Count());
+                bv.Set(i);
+                Assert.IsTrue(bv.Get(i));
+                Assert.AreEqual(i + 1, bv.Count());
+            }
+
+            bv = new BitVector(n);
+            // test count when setting then clearing bits
+            for (int i = 0; i < bv.Length; i++) // LUCENENET NOTE: Length is the equivalent of size()
+            {
+                Assert.IsFalse(bv.Get(i));
+                Assert.AreEqual(0, bv.Count());
+                bv.Set(i);
+                Assert.IsTrue(bv.Get(i));
+                Assert.AreEqual(1, bv.Count());
+                bv.Clear(i);
+                Assert.IsFalse(bv.Get(i));
+                Assert.AreEqual(0, bv.Count());
+            }
+        }
+
+        /// <summary>
+        /// Test writing and construction to/from Directory.
+        /// </summary>
+        [Test]
+        public virtual void TestWriteRead()
+        {
+            DoTestWriteRead(8);
+            DoTestWriteRead(20);
+            DoTestWriteRead(100);
+            DoTestWriteRead(1000);
+        }
+
+        private void DoTestWriteRead(int n)
+        {
+            MockDirectoryWrapper d = new MockDirectoryWrapper(Random(), new RAMDirectory());
+            d.PreventDoubleWrite = false;
+            BitVector bv = new BitVector(n);
+            // test count when incrementally setting bits
+            for (int i = 0; i < bv.Length; i++) // LUCENENET NOTE: Length is the equivalent of size()
+            {
+                Assert.IsFalse(bv.Get(i));
+                Assert.AreEqual(i, bv.Count());
+                bv.Set(i);
+                Assert.IsTrue(bv.Get(i));
+                Assert.AreEqual(i + 1, bv.Count());
+                bv.Write(d, "TESTBV", NewIOContext(Random()));
+                BitVector compare = new BitVector(d, "TESTBV", NewIOContext(Random()));
+                // compare bit vectors with bits set incrementally
+                Assert.IsTrue(DoCompare(bv, compare));
+            }
+        }
+
+        /// <summary>
+        /// Test r/w when size/count cause switching between bit-set and d-gaps file formats.
+        /// </summary>
+        [Test]
+        public virtual void TestDgaps()
+        {
+            DoTestDgaps(1, 0, 1);
+            DoTestDgaps(10, 0, 1);
+            DoTestDgaps(100, 0, 1);
+            DoTestDgaps(1000, 4, 7);
+            DoTestDgaps(10000, 40, 43);
+            DoTestDgaps(100000, 415, 418);
+            DoTestDgaps(1000000, 3123, 3126);
+            // now exercise skipping of fully populated byte in the bitset (they are omitted if bitset is sparse)
+            MockDirectoryWrapper d = new MockDirectoryWrapper(Random(), new RAMDirectory());
+            d.PreventDoubleWrite = false;
+            BitVector bv = new BitVector(10000);
+            bv.Set(0);
+            for (int i = 8; i < 16; i++)
+            {
+                bv.Set(i);
+            } // make sure we have once byte full of set bits
+            for (int i = 32; i < 40; i++)
+            {
+                bv.Set(i);
+            } // get a second byte full of set bits
+            // add some more bits here
+            for (int i = 40; i < 10000; i++)
+            {
+                if (Random().Next(1000) == 0)
+                {
+                    bv.Set(i);
+                }
+            }
+            bv.Write(d, "TESTBV", NewIOContext(Random()));
+            BitVector compare = new BitVector(d, "TESTBV", NewIOContext(Random()));
+            Assert.IsTrue(DoCompare(bv, compare));
+        }
+
+        private void DoTestDgaps(int size, int count1, int count2)
+        {
+            MockDirectoryWrapper d = new MockDirectoryWrapper(Random(), new RAMDirectory());
+            d.PreventDoubleWrite = false;
+            BitVector bv = new BitVector(size);
+            bv.InvertAll();
+            for (int i = 0; i < count1; i++)
+            {
+                bv.Clear(i);
+                Assert.AreEqual(i + 1, size - bv.Count());
+            }
+            bv.Write(d, "TESTBV", NewIOContext(Random()));
+            // gradually increase number of set bits
+            for (int i = count1; i < count2; i++)
+            {
+                BitVector bv2 = new BitVector(d, "TESTBV", NewIOContext(Random()));
+                Assert.IsTrue(DoCompare(bv, bv2));
+                bv = bv2;
+                bv.Clear(i);
+                Assert.AreEqual(i + 1, size - bv.Count());
+                bv.Write(d, "TESTBV", NewIOContext(Random()));
+            }
+            // now start decreasing number of set bits
+            for (int i = count2 - 1; i >= count1; i--)
+            {
+                BitVector bv2 = new BitVector(d, "TESTBV", NewIOContext(Random()));
+                Assert.IsTrue(DoCompare(bv, bv2));
+                bv = bv2;
+                bv.Set(i);
+                Assert.AreEqual(i, size - bv.Count());
+                bv.Write(d, "TESTBV", NewIOContext(Random()));
+            }
+        }
+
+        [Test]
+        public virtual void TestSparseWrite()
+        {
+            Directory d = NewDirectory();
+            const int numBits = 10240;
+            BitVector bv = new BitVector(numBits);
+            bv.InvertAll();
+            int numToClear = Random().Next(5);
+            for (int i = 0; i < numToClear; i++)
+            {
+                bv.Clear(Random().Next(numBits));
+            }
+            bv.Write(d, "test", NewIOContext(Random()));
+            long size = d.FileLength("test");
+            Assert.IsTrue(size < 100, "size=" + size);
+            d.Dispose();
+        }
+
+        [Test]
+        public virtual void TestClearedBitNearEnd()
+        {
+            Directory d = NewDirectory();
+            int numBits = TestUtil.NextInt(Random(), 7, 1000);
+            BitVector bv = new BitVector(numBits);
+            bv.InvertAll();
+            bv.Clear(numBits - TestUtil.NextInt(Random(), 1, 7));
+            bv.Write(d, "test", NewIOContext(Random()));
+            Assert.AreEqual(numBits - 1, bv.Count());
+            d.Dispose();
+        }
+
+        [Test]
+        public virtual void TestMostlySet()
+        {
+            Directory d = NewDirectory();
+            int numBits = TestUtil.NextInt(Random(), 30, 1000);
+            for (int numClear = 0; numClear < 20; numClear++)
+            {
+                BitVector bv = new BitVector(numBits);
+                bv.InvertAll();
+                int count = 0;
+                while (count < numClear)
+                {
+                    int bit = Random().Next(numBits);
+                    // Don't use getAndClear, so that count is recomputed
+                    if (bv.Get(bit))
+                    {
+                        bv.Clear(bit);
+                        count++;
+                        Assert.AreEqual(numBits - count, bv.Count());
+                    }
+                }
+            }
+
+            d.Dispose();
+        }
+
+        /// <summary>
+        /// Compare two BitVectors.
+        /// this should really be an equals method on the BitVector itself. </summary>
+        /// <param name="bv"> One bit vector </param>
+        /// <param name="compare"> The second to compare </param>
+        private bool DoCompare(BitVector bv, BitVector compare)
+        {
+            bool equal = true;
+            for (int i = 0; i < bv.Length; i++) // LUCENENET NOTE: Length is the equivalent of size()
+            {
+                // bits must be equal
+                if (bv.Get(i) != compare.Get(i))
+                {
+                    equal = false;
+                    break;
+                }
+            }
+            Assert.AreEqual(bv.Count(), compare.Count());
+            return equal;
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40DocValuesFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40DocValuesFormat.cs b/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40DocValuesFormat.cs
new file mode 100644
index 0000000..d63a6b3
--- /dev/null
+++ b/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40DocValuesFormat.cs
@@ -0,0 +1,555 @@
+namespace Lucene.Net.Codecs.Lucene40
+{
+    using NUnit.Framework;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using BaseDocValuesFormatTestCase = Lucene.Net.Index.BaseDocValuesFormatTestCase;
+
+    /// <summary>
+    /// Tests Lucene40DocValuesFormat
+    /// </summary>
+    public class TestLucene40DocValuesFormat : BaseDocValuesFormatTestCase
+    {
+        /// <summary>
+        /// LUCENENET specific
+        /// Is non-static because OLD_FORMAT_IMPERSONATION_IS_ACTIVE is no longer static.
+        /// </summary>
+        [OneTimeSetUp]
+        public void BeforeClass()
+        {
+            OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true; // explicitly instantiates ancient codec
+        }
+
+        protected override Codec Codec
+        {
+            get
+            {
+                Assert.True(OLD_FORMAT_IMPERSONATION_IS_ACTIVE, "Expecting that this is true");
+                return new Lucene40RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE);
+            }
+        }
+
+        // LUCENE-4583: this codec should throw IAE on huge binary values:
+        protected internal override bool CodecAcceptsHugeBinaryValues(string field)
+        {
+            return false;
+        }
+
+
+        #region BaseDocValuesFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestOneNumber()
+        {
+            base.TestOneNumber();
+        }
+
+        [Test]
+        public override void TestOneFloat()
+        {
+            base.TestOneFloat();
+        }
+
+        [Test]
+        public override void TestTwoNumbers()
+        {
+            base.TestTwoNumbers();
+        }
+
+        [Test]
+        public override void TestTwoBinaryValues()
+        {
+            base.TestTwoBinaryValues();
+        }
+
+        [Test]
+        public override void TestTwoFieldsMixed()
+        {
+            base.TestTwoFieldsMixed();
+        }
+
+        [Test]
+        public override void TestThreeFieldsMixed()
+        {
+            base.TestThreeFieldsMixed();
+        }
+
+        [Test]
+        public override void TestThreeFieldsMixed2()
+        {
+            base.TestThreeFieldsMixed2();
+        }
+
+        [Test]
+        public override void TestTwoDocumentsNumeric()
+        {
+            base.TestTwoDocumentsNumeric();
+        }
+
+        [Test]
+        public override void TestTwoDocumentsMerged()
+        {
+            base.TestTwoDocumentsMerged();
+        }
+
+        [Test]
+        public override void TestBigNumericRange()
+        {
+            base.TestBigNumericRange();
+        }
+
+        [Test]
+        public override void TestBigNumericRange2()
+        {
+            base.TestBigNumericRange2();
+        }
+
+        [Test]
+        public override void TestBytes()
+        {
+            base.TestBytes();
+        }
+
+        [Test]
+        public override void TestBytesTwoDocumentsMerged()
+        {
+            base.TestBytesTwoDocumentsMerged();
+        }
+
+        [Test]
+        public override void TestSortedBytes()
+        {
+            base.TestSortedBytes();
+        }
+
+        [Test]
+        public override void TestSortedBytesTwoDocuments()
+        {
+            base.TestSortedBytesTwoDocuments();
+        }
+
+        [Test]
+        public override void TestSortedBytesThreeDocuments()
+        {
+            base.TestSortedBytesThreeDocuments();
+        }
+
+        [Test]
+        public override void TestSortedBytesTwoDocumentsMerged()
+        {
+            base.TestSortedBytesTwoDocumentsMerged();
+        }
+
+        [Test]
+        public override void TestSortedMergeAwayAllValues()
+        {
+            base.TestSortedMergeAwayAllValues();
+        }
+
+        [Test]
+        public override void TestBytesWithNewline()
+        {
+            base.TestBytesWithNewline();
+        }
+
+        [Test]
+        public override void TestMissingSortedBytes()
+        {
+            base.TestMissingSortedBytes();
+        }
+
+        [Test]
+        public override void TestSortedTermsEnum()
+        {
+            base.TestSortedTermsEnum();
+        }
+
+        [Test]
+        public override void TestEmptySortedBytes()
+        {
+            base.TestEmptySortedBytes();
+        }
+
+        [Test]
+        public override void TestEmptyBytes()
+        {
+            base.TestEmptyBytes();
+        }
+
+        [Test]
+        public override void TestVeryLargeButLegalBytes()
+        {
+            base.TestVeryLargeButLegalBytes();
+        }
+
+        [Test]
+        public override void TestVeryLargeButLegalSortedBytes()
+        {
+            base.TestVeryLargeButLegalSortedBytes();
+        }
+
+        [Test]
+        public override void TestCodecUsesOwnBytes()
+        {
+            base.TestCodecUsesOwnBytes();
+        }
+
+        [Test]
+        public override void TestCodecUsesOwnSortedBytes()
+        {
+            base.TestCodecUsesOwnSortedBytes();
+        }
+
+        [Test]
+        public override void TestCodecUsesOwnBytesEachTime()
+        {
+            base.TestCodecUsesOwnBytesEachTime();
+        }
+
+        [Test]
+        public override void TestCodecUsesOwnSortedBytesEachTime()
+        {
+            base.TestCodecUsesOwnSortedBytesEachTime();
+        }
+
+        /*
+         * Simple test case to show how to use the API
+         */
+        [Test]
+        public override void TestDocValuesSimple()
+        {
+            base.TestDocValuesSimple();
+        }
+
+        [Test]
+        public override void TestRandomSortedBytes()
+        {
+            base.TestRandomSortedBytes();
+        }
+
+        [Test]
+        public override void TestBooleanNumericsVsStoredFields()
+        {
+            base.TestBooleanNumericsVsStoredFields();
+        }
+
+        [Test]
+        public override void TestByteNumericsVsStoredFields()
+        {
+            base.TestByteNumericsVsStoredFields();
+        }
+
+        [Test]
+        public override void TestByteMissingVsFieldCache()
+        {
+            base.TestByteMissingVsFieldCache();
+        }
+
+        [Test]
+        public override void TestShortNumericsVsStoredFields()
+        {
+            base.TestShortNumericsVsStoredFields();
+        }
+
+        [Test]
+        public override void TestShortMissingVsFieldCache()
+        {
+            base.TestShortMissingVsFieldCache();
+        }
+
+        [Test]
+        public override void TestIntNumericsVsStoredFields()
+        {
+            base.TestIntNumericsVsStoredFields();
+        }
+
+        [Test]
+        public override void TestIntMissingVsFieldCache()
+        {
+            base.TestIntMissingVsFieldCache();
+        }
+
+        [Test]
+        public override void TestLongNumericsVsStoredFields()
+        {
+            base.TestLongNumericsVsStoredFields();
+        }
+
+        [Test]
+        public override void TestLongMissingVsFieldCache()
+        {
+            base.TestLongMissingVsFieldCache();
+        }
+
+        [Test]
+        public override void TestBinaryFixedLengthVsStoredFields()
+        {
+            base.TestBinaryFixedLengthVsStoredFields();
+        }
+
+        [Test]
+        public override void TestBinaryVariableLengthVsStoredFields()
+        {
+            base.TestBinaryVariableLengthVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedFixedLengthVsStoredFields()
+        {
+            base.TestSortedFixedLengthVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedFixedLengthVsFieldCache()
+        {
+            base.TestSortedFixedLengthVsFieldCache();
+        }
+
+        [Test]
+        public override void TestSortedVariableLengthVsFieldCache()
+        {
+            base.TestSortedVariableLengthVsFieldCache();
+        }
+
+        [Test]
+        public override void TestSortedVariableLengthVsStoredFields()
+        {
+            base.TestSortedVariableLengthVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedSetOneValue()
+        {
+            base.TestSortedSetOneValue();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoFields()
+        {
+            base.TestSortedSetTwoFields();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoDocumentsMerged()
+        {
+            base.TestSortedSetTwoDocumentsMerged();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoValues()
+        {
+            base.TestSortedSetTwoValues();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoValuesUnordered()
+        {
+            base.TestSortedSetTwoValuesUnordered();
+        }
+
+        [Test]
+        public override void TestSortedSetThreeValuesTwoDocs()
+        {
+            base.TestSortedSetThreeValuesTwoDocs();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoDocumentsLastMissing()
+        {
+            base.TestSortedSetTwoDocumentsLastMissing();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoDocumentsLastMissingMerge()
+        {
+            base.TestSortedSetTwoDocumentsLastMissingMerge();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoDocumentsFirstMissing()
+        {
+            base.TestSortedSetTwoDocumentsFirstMissing();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoDocumentsFirstMissingMerge()
+        {
+            base.TestSortedSetTwoDocumentsFirstMissingMerge();
+        }
+
+        [Test]
+        public override void TestSortedSetMergeAwayAllValues()
+        {
+            base.TestSortedSetMergeAwayAllValues();
+        }
+
+        [Test]
+        public override void TestSortedSetTermsEnum()
+        {
+            base.TestSortedSetTermsEnum();
+        }
+
+        [Test]
+        public override void TestSortedSetFixedLengthVsStoredFields()
+        {
+            base.TestSortedSetFixedLengthVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedSetVariableLengthVsStoredFields()
+        {
+            base.TestSortedSetVariableLengthVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedSetFixedLengthSingleValuedVsStoredFields()
+        {
+            base.TestSortedSetFixedLengthSingleValuedVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedSetVariableLengthSingleValuedVsStoredFields()
+        {
+            base.TestSortedSetVariableLengthSingleValuedVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedSetFixedLengthVsUninvertedField()
+        {
+            base.TestSortedSetFixedLengthVsUninvertedField();
+        }
+
+        [Test]
+        public override void TestSortedSetVariableLengthVsUninvertedField()
+        {
+            base.TestSortedSetVariableLengthVsUninvertedField();
+        }
+
+        [Test]
+        public override void TestGCDCompression()
+        {
+            base.TestGCDCompression();
+        }
+
+        [Test]
+        public override void TestZeros()
+        {
+            base.TestZeros();
+        }
+
+        [Test]
+        public override void TestZeroOrMin()
+        {
+            base.TestZeroOrMin();
+        }
+
+        [Test]
+        public override void TestTwoNumbersOneMissing()
+        {
+            base.TestTwoNumbersOneMissing();
+        }
+
+        [Test]
+        public override void TestTwoNumbersOneMissingWithMerging()
+        {
+            base.TestTwoNumbersOneMissingWithMerging();
+        }
+
+        [Test]
+        public override void TestThreeNumbersOneMissingWithMerging()
+        {
+            base.TestThreeNumbersOneMissingWithMerging();
+        }
+
+        [Test]
+        public override void TestTwoBytesOneMissing()
+        {
+            base.TestTwoBytesOneMissing();
+        }
+
+        [Test]
+        public override void TestTwoBytesOneMissingWithMerging()
+        {
+            base.TestTwoBytesOneMissingWithMerging();
+        }
+
+        [Test]
+        public override void TestThreeBytesOneMissingWithMerging()
+        {
+            base.TestThreeBytesOneMissingWithMerging();
+        }
+
+        // LUCENE-4853
+        [Test]
+        public override void TestHugeBinaryValues()
+        {
+            base.TestHugeBinaryValues();
+        }
+
+        // TODO: get this out of here and into the deprecated codecs (4.0, 4.2)
+        [Test]
+        public override void TestHugeBinaryValueLimit()
+        {
+            base.TestHugeBinaryValueLimit();
+        }
+
+        /// <summary>
+        /// Tests dv against stored fields with threads (binary/numeric/sorted, no missing)
+        /// </summary>
+        [Test]
+        public override void TestThreads()
+        {
+            base.TestThreads();
+        }
+
+        /// <summary>
+        /// Tests dv against stored fields with threads (all types + missing)
+        /// </summary>
+        [Test]
+        public override void TestThreads2()
+        {
+            base.TestThreads2();
+        }
+
+        // LUCENE-5218
+        [Test]
+        public override void TestEmptyBinaryValueOnPageSizes()
+        {
+            base.TestEmptyBinaryValueOnPageSizes();
+        }
+
+        #endregion
+
+        #region BaseIndexFileFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestMergeStability()
+        {
+            base.TestMergeStability();
+        }
+
+        #endregion
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40PostingsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40PostingsFormat.cs b/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40PostingsFormat.cs
new file mode 100644
index 0000000..2d2b5f1
--- /dev/null
+++ b/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40PostingsFormat.cs
@@ -0,0 +1,111 @@
+namespace Lucene.Net.Codecs.Lucene40
+{
+    using NUnit.Framework;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using BasePostingsFormatTestCase = Lucene.Net.Index.BasePostingsFormatTestCase;
+
+    /// <summary>
+    /// Tests Lucene40PostingsFormat
+    /// </summary>
+    public class TestLucene40PostingsFormat : BasePostingsFormatTestCase
+    {
+        /// <summary>
+        /// LUCENENET specific
+        /// Is non-static because OLD_FORMAT_IMPERSONATION_IS_ACTIVE is no longer static.
+        /// </summary>
+        [OneTimeSetUp]
+        public void BeforeClass()
+        {
+            OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true; // explicitly instantiates ancient codec
+        }
+
+        protected override Codec Codec
+        {
+            get
+            {
+                Assert.True(OLD_FORMAT_IMPERSONATION_IS_ACTIVE, "Expecting this to be set already before creating codec");
+                return new Lucene40RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE);
+            }
+        }
+
+
+        #region BasePostingsFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestDocsOnly()
+        {
+            base.TestDocsOnly();
+        }
+
+        [Test]
+        public override void TestDocsAndFreqs()
+        {
+            base.TestDocsAndFreqs();
+        }
+
+        [Test]
+        public override void TestDocsAndFreqsAndPositions()
+        {
+            base.TestDocsAndFreqsAndPositions();
+        }
+
+        [Test]
+        public override void TestDocsAndFreqsAndPositionsAndPayloads()
+        {
+            base.TestDocsAndFreqsAndPositionsAndPayloads();
+        }
+
+        [Test]
+        public override void TestDocsAndFreqsAndPositionsAndOffsets()
+        {
+            base.TestDocsAndFreqsAndPositionsAndOffsets();
+        }
+
+        [Test]
+        public override void TestDocsAndFreqsAndPositionsAndOffsetsAndPayloads()
+        {
+            base.TestDocsAndFreqsAndPositionsAndOffsetsAndPayloads();
+        }
+
+        [Test]
+        public override void TestRandom()
+        {
+            base.TestRandom();
+        }
+
+        #endregion
+
+        #region BaseIndexFileFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestMergeStability()
+        {
+            base.TestMergeStability();
+        }
+
+        #endregion
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40PostingsReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40PostingsReader.cs b/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40PostingsReader.cs
new file mode 100644
index 0000000..aea5ce0
--- /dev/null
+++ b/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40PostingsReader.cs
@@ -0,0 +1,166 @@
+using Lucene.Net.Documents;
+using Lucene.Net.Index;
+using Lucene.Net.Support;
+using System;
+using System.Collections.Generic;
+using System.Text;
+
+namespace Lucene.Net.Codecs.Lucene40
+{
+    using Lucene.Net.Randomized.Generators;
+    using NUnit.Framework;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldType = FieldType;
+    using IndexWriterConfig = Lucene.Net.Index.IndexWriterConfig;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using StringField = StringField;
+    using Term = Lucene.Net.Index.Term;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using TextField = TextField;
+
+    [TestFixture]
+    public class TestLucene40PostingsReader : LuceneTestCase
+    {
+        internal static readonly string[] Terms = new string[100];
+
+        static TestLucene40PostingsReader()
+        {
+            for (int i = 0; i < Terms.Length; i++)
+            {
+                Terms[i] = Convert.ToString(i + 1);
+            }
+        }
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Is non-static because OLD_FORMAT_IMPERSONATION_IS_ACTIVE is no longer static.
+        /// </summary>
+        [OneTimeSetUp]
+        public void BeforeClass()
+        {
+            OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true; // explicitly instantiates ancient codec
+        }
+
+        /// <summary>
+        /// tests terms with different probabilities of being in the document.
+        ///  depends heavily on term vectors cross-check at checkIndex
+        /// </summary>
+        [Test]
+        public virtual void TestPostings()
+        {
+            Directory dir = NewFSDirectory(CreateTempDir("postings"));
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            iwc.SetCodec(Codec.ForName("Lucene40"));
+            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwc);
+
+            Document doc = new Document();
+
+            // id field
+            FieldType idType = new FieldType(StringField.TYPE_NOT_STORED);
+            idType.StoreTermVectors = true;
+            Field idField = new Field("id", "", idType);
+            doc.Add(idField);
+
+            // title field: short text field
+            FieldType titleType = new FieldType(TextField.TYPE_NOT_STORED);
+            titleType.StoreTermVectors = true;
+            titleType.StoreTermVectorPositions = true;
+            titleType.StoreTermVectorOffsets = true;
+            titleType.IndexOptions = IndexOptions();
+            Field titleField = new Field("title", "", titleType);
+            doc.Add(titleField);
+
+            // body field: long text field
+            FieldType bodyType = new FieldType(TextField.TYPE_NOT_STORED);
+            bodyType.StoreTermVectors = true;
+            bodyType.StoreTermVectorPositions = true;
+            bodyType.StoreTermVectorOffsets = true;
+            bodyType.IndexOptions = IndexOptions();
+            Field bodyField = new Field("body", "", bodyType);
+            doc.Add(bodyField);
+
+            int numDocs = AtLeast(1000);
+            for (int i = 0; i < numDocs; i++)
+            {
+                idField.SetStringValue(Convert.ToString(i));
+                titleField.SetStringValue(FieldValue(1));
+                bodyField.SetStringValue(FieldValue(3));
+                iw.AddDocument(doc);
+                if (Random().Next(20) == 0)
+                {
+                    iw.DeleteDocuments(new Term("id", Convert.ToString(i)));
+                }
+            }
+            if (Random().NextBoolean())
+            {
+                // delete 1-100% of docs
+                iw.DeleteDocuments(new Term("title", Terms[Random().Next(Terms.Length)]));
+            }
+            iw.Dispose();
+            dir.Dispose(); // checkindex
+        }
+
+        internal virtual IndexOptions IndexOptions()
+        {
+            switch (Random().Next(4))
+            {
+                case 0:
+                    return Index.IndexOptions.DOCS_ONLY;
+
+                case 1:
+                    return Index.IndexOptions.DOCS_AND_FREQS;
+
+                case 2:
+                    return Index.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
+
+                default:
+                    return Index.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS;
+            }
+        }
+
+        internal virtual string FieldValue(int maxTF)
+        {
+            IList<string> shuffled = new List<string>();
+            StringBuilder sb = new StringBuilder();
+            int i = Random().Next(Terms.Length);
+            while (i < Terms.Length)
+            {
+                int tf = TestUtil.NextInt(Random(), 1, maxTF);
+                for (int j = 0; j < tf; j++)
+                {
+                    shuffled.Add(Terms[i]);
+                }
+                i++;
+            }
+            Collections.Shuffle(shuffled);
+            foreach (string term in shuffled)
+            {
+                sb.Append(term);
+                sb.Append(' ');
+            }
+            return sb.ToString();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40StoredFieldsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40StoredFieldsFormat.cs b/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40StoredFieldsFormat.cs
new file mode 100644
index 0000000..c78b1b4
--- /dev/null
+++ b/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40StoredFieldsFormat.cs
@@ -0,0 +1,147 @@
+namespace Lucene.Net.Codecs.Lucene40
+{
+    using Attributes;
+    using NUnit.Framework;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using BaseStoredFieldsFormatTestCase = Lucene.Net.Index.BaseStoredFieldsFormatTestCase;
+
+    public class TestLucene40StoredFieldsFormat : BaseStoredFieldsFormatTestCase
+    {
+        /// <summary>
+        /// LUCENENET specific
+        /// Is non-static because OLD_FORMAT_IMPERSONATION_IS_ACTIVE is no longer static.
+        /// </summary>
+        [OneTimeSetUp]
+        public void BeforeClass()
+        {
+            OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true; // explicitly instantiates ancient codec
+        }
+
+        protected override Codec Codec
+        {
+            get
+            {
+                Assert.True(OLD_FORMAT_IMPERSONATION_IS_ACTIVE, "Expecting this to be set already");
+                return new Lucene40RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE);
+            }
+        }
+
+
+        #region BaseStoredFieldsFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestRandomStoredFields()
+        {
+            base.TestRandomStoredFields();
+        }
+
+        [Test]
+        // LUCENE-1727: make sure doc fields are stored in order
+        public override void TestStoredFieldsOrder()
+        {
+            base.TestStoredFieldsOrder();
+        }
+
+        [Test]
+        // LUCENE-1219
+        public override void TestBinaryFieldOffsetLength()
+        {
+            base.TestBinaryFieldOffsetLength();
+        }
+
+        [Test]
+        public override void TestNumericField()
+        {
+            base.TestNumericField();
+        }
+
+        [Test]
+        public override void TestIndexedBit()
+        {
+            base.TestIndexedBit();
+        }
+
+        [Test]
+        public override void TestReadSkip()
+        {
+            base.TestReadSkip();
+        }
+
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(300000)]
+#endif
+        [Test, HasTimeout]
+        public override void TestEmptyDocs()
+        {
+            base.TestEmptyDocs();
+        }
+
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(300000)]
+#endif
+        [Test, HasTimeout]
+        public override void TestConcurrentReads()
+        {
+            base.TestConcurrentReads();
+        }
+
+        [Test]
+        public override void TestWriteReadMerge()
+        {
+            base.TestWriteReadMerge();
+        }
+
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(80000)]
+#endif
+        [Test, HasTimeout]
+        public override void TestBigDocuments()
+        {
+            base.TestBigDocuments();
+        }
+
+        [Test]
+        public override void TestBulkMergeWithDeletes()
+        {
+            base.TestBulkMergeWithDeletes();
+        }
+
+        #endregion
+
+        #region BaseIndexFileFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestMergeStability()
+        {
+            base.TestMergeStability();
+        }
+
+        #endregion
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40TermVectorsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40TermVectorsFormat.cs b/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40TermVectorsFormat.cs
new file mode 100644
index 0000000..d7541eb
--- /dev/null
+++ b/src/Lucene.Net.Tests/Codecs/Lucene40/TestLucene40TermVectorsFormat.cs
@@ -0,0 +1,117 @@
+namespace Lucene.Net.Codecs.Lucene40
+{
+    using Attributes;
+    using NUnit.Framework;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using BaseTermVectorsFormatTestCase = Lucene.Net.Index.BaseTermVectorsFormatTestCase;
+
+    public class TestLucene40TermVectorsFormat : BaseTermVectorsFormatTestCase
+    {
+        /// <summary>
+        /// LUCENENET specific
+        /// Is non-static because OLD_FORMAT_IMPERSONATION_IS_ACTIVE is no longer static.
+        /// </summary>
+        [OneTimeSetUp]
+        public void BeforeClass()
+        {
+            OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true; // explicitly instantiates ancient codec
+        }
+
+        protected override Codec Codec
+        {
+            get
+            {
+                Assert.True(OLD_FORMAT_IMPERSONATION_IS_ACTIVE, "Expecting this to be set already");
+                return new Lucene40RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE);
+            }
+        }
+
+
+        #region BaseTermVectorsFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        // only one doc with vectors
+        public override void TestRareVectors()
+        {
+            base.TestRareVectors();
+        }
+
+        [Test]
+        public override void TestHighFreqs()
+        {
+            base.TestHighFreqs();
+        }
+
+        [Test]
+        public override void TestLotsOfFields()
+        {
+            base.TestLotsOfFields();
+        }
+
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(300000)]
+#endif
+        [Test, HasTimeout]
+        // different options for the same field
+        public override void TestMixedOptions()
+        {
+            base.TestMixedOptions();
+        }
+
+        [Test]
+        public override void TestRandom()
+        {
+            base.TestRandom();
+        }
+
+        [Test]
+        public override void TestMerge()
+        {
+            base.TestMerge();
+        }
+
+        [Test]
+        // run random tests from different threads to make sure the per-thread clones
+        // don't share mutable data
+        public override void TestClone()
+        {
+            base.TestClone();
+        }
+
+        #endregion
+
+        #region BaseIndexFileFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestMergeStability()
+        {
+            base.TestMergeStability();
+        }
+
+        #endregion
+    }
+}
\ No newline at end of file


[07/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestCustomSearcherSort.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestCustomSearcherSort.cs b/src/Lucene.Net.Tests/Search/TestCustomSearcherSort.cs
new file mode 100644
index 0000000..0511e75
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestCustomSearcherSort.cs
@@ -0,0 +1,262 @@
+using System;
+using System.Collections.Generic;
+using System.Globalization;
+using System.Text;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using NUnit.Framework;
+
+    /// <summary>
+    /// Copyright 2005 The Apache Software Foundation
+    ///
+    /// Licensed under the Apache License, Version 2.0 (the "License");
+    /// you may not use this file except in compliance with the License.
+    /// You may obtain a copy of the License at
+    ///
+    ///     http://www.apache.org/licenses/LICENSE-2.0
+    ///
+    /// Unless required by applicable law or agreed to in writing, software
+    /// distributed under the License is distributed on an "AS IS" BASIS,
+    /// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    /// See the License for the specific language governing permissions and
+    /// limitations under the License.
+    /// </summary>
+
+    using DateTools = DateTools;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Term = Lucene.Net.Index.Term;
+
+    /// <summary>
+    /// Unit test for sorting code. </summary>
+    [TestFixture]
+    public class TestCustomSearcherSort : LuceneTestCase
+    {
+        private Directory Index = null;
+        private IndexReader Reader;
+        private Query Query = null;
+
+        // reduced from 20000 to 2000 to speed up test...
+        private int INDEX_SIZE;
+
+        /// <summary>
+        /// Create index and query for test cases.
+        /// </summary>
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            INDEX_SIZE = AtLeast(2000);
+            Index = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), Index, Similarity, TimeZone);
+            RandomGen random = new RandomGen(this, Random());
+            for (int i = 0; i < INDEX_SIZE; ++i) // don't decrease; if to low the
+            {
+                // problem doesn't show up
+                Document doc = new Document();
+                if ((i % 5) != 0) // some documents must not have an entry in the first
+                {
+                    // sort field
+                    doc.Add(NewStringField("publicationDate_", random.LuceneDate, Field.Store.YES));
+                }
+                if ((i % 7) == 0) // some documents to match the query (see below)
+                {
+                    doc.Add(NewTextField("content", "test", Field.Store.YES));
+                }
+                // every document has a defined 'mandant' field
+                doc.Add(NewStringField("mandant", Convert.ToString(i % 3), Field.Store.YES));
+                writer.AddDocument(doc);
+            }
+            Reader = writer.Reader;
+            writer.Dispose();
+            Query = new TermQuery(new Term("content", "test"));
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            Reader.Dispose();
+            Index.Dispose();
+            base.TearDown();
+        }
+
+        /// <summary>
+        /// Run the test using two CustomSearcher instances.
+        /// </summary>
+        [Test]
+        public virtual void TestFieldSortCustomSearcher()
+        {
+            // log("Run testFieldSortCustomSearcher");
+            // define the sort criteria
+            Sort custSort = new Sort(new SortField("publicationDate_", SortFieldType.STRING), SortField.FIELD_SCORE);
+            IndexSearcher searcher = new CustomSearcher(this, Reader, 2);
+            // search and check hits
+            MatchHits(searcher, custSort);
+        }
+
+        /// <summary>
+        /// Run the test using one CustomSearcher wrapped by a MultiSearcher.
+        /// </summary>
+        [Test]
+        public virtual void TestFieldSortSingleSearcher()
+        {
+            // log("Run testFieldSortSingleSearcher");
+            // define the sort criteria
+            Sort custSort = new Sort(new SortField("publicationDate_", SortFieldType.STRING), SortField.FIELD_SCORE);
+            IndexSearcher searcher = new CustomSearcher(this, Reader, 2);
+            // search and check hits
+            MatchHits(searcher, custSort);
+        }
+
+        // make sure the documents returned by the search match the expected list
+        private void MatchHits(IndexSearcher searcher, Sort sort)
+        {
+            // make a query without sorting first
+            ScoreDoc[] hitsByRank = searcher.Search(Query, null, int.MaxValue).ScoreDocs;
+            CheckHits(hitsByRank, "Sort by rank: "); // check for duplicates
+            IDictionary<int?, int?> resultMap = new SortedDictionary<int?, int?>();
+            // store hits in TreeMap - TreeMap does not allow duplicates; existing
+            // entries are silently overwritten
+            for (int hitid = 0; hitid < hitsByRank.Length; ++hitid)
+            {
+                resultMap[Convert.ToInt32(hitsByRank[hitid].Doc)] = Convert.ToInt32(hitid); // Value: Hits-Objekt Index -  Key: Lucene
+                // Document ID
+            }
+
+            // now make a query using the sort criteria
+            ScoreDoc[] resultSort = searcher.Search(Query, null, int.MaxValue, sort).ScoreDocs;
+            CheckHits(resultSort, "Sort by custom criteria: "); // check for duplicates
+
+            // besides the sorting both sets of hits must be identical
+            for (int hitid = 0; hitid < resultSort.Length; ++hitid)
+            {
+                int? idHitDate = Convert.ToInt32(resultSort[hitid].Doc); // document ID
+                // from sorted
+                // search
+                if (!resultMap.ContainsKey(idHitDate))
+                {
+                    Log("ID " + idHitDate + " not found. Possibliy a duplicate.");
+                }
+                Assert.IsTrue(resultMap.ContainsKey(idHitDate)); // same ID must be in the
+                // Map from the rank-sorted
+                // search
+                // every hit must appear once in both result sets --> remove it from the
+                // Map.
+                // At the end the Map must be empty!
+                resultMap.Remove(idHitDate);
+            }
+            if (resultMap.Count == 0)
+            {
+                // log("All hits matched");
+            }
+            else
+            {
+                Log("Couldn't match " + resultMap.Count + " hits.");
+            }
+            Assert.AreEqual(resultMap.Count, 0);
+        }
+
+        /// <summary>
+        /// Check the hits for duplicates.
+        /// </summary>
+        private void CheckHits(ScoreDoc[] hits, string prefix)
+        {
+            if (hits != null)
+            {
+                IDictionary<int?, int?> idMap = new SortedDictionary<int?, int?>();
+                for (int docnum = 0; docnum < hits.Length; ++docnum)
+                {
+                    int? luceneId = null;
+
+                    luceneId = Convert.ToInt32(hits[docnum].Doc);
+                    if (idMap.ContainsKey(luceneId))
+                    {
+                        StringBuilder message = new StringBuilder(prefix);
+                        message.Append("Duplicate key for hit index = ");
+                        message.Append(docnum);
+                        message.Append(", previous index = ");
+                        message.Append((idMap[luceneId]).ToString());
+                        message.Append(", Lucene ID = ");
+                        message.Append(luceneId);
+                        Log(message.ToString());
+                    }
+                    else
+                    {
+                        idMap[luceneId] = Convert.ToInt32(docnum);
+                    }
+                }
+            }
+        }
+
+        // Simply write to console - choosen to be independant of log4j etc
+        private void Log(string message)
+        {
+            if (VERBOSE)
+            {
+                Console.WriteLine(message);
+            }
+        }
+
+        public class CustomSearcher : IndexSearcher
+        {
+            private readonly TestCustomSearcherSort OuterInstance;
+
+            internal int Switcher;
+
+            public CustomSearcher(TestCustomSearcherSort outerInstance, IndexReader r, int switcher)
+                : base(r)
+            {
+                this.OuterInstance = outerInstance;
+                this.Switcher = switcher;
+            }
+
+            public override TopFieldDocs Search(Query query, Filter filter, int nDocs, Sort sort)
+            {
+                BooleanQuery bq = new BooleanQuery();
+                bq.Add(query, Occur.MUST);
+                bq.Add(new TermQuery(new Term("mandant", Convert.ToString(Switcher))), Occur.MUST);
+                return base.Search(bq, filter, nDocs, sort);
+            }
+
+            public override TopDocs Search(Query query, Filter filter, int nDocs)
+            {
+                BooleanQuery bq = new BooleanQuery();
+                bq.Add(query, Occur.MUST);
+                bq.Add(new TermQuery(new Term("mandant", Convert.ToString(Switcher))), Occur.MUST);
+                return base.Search(bq, filter, nDocs);
+            }
+        }
+
+        private class RandomGen
+        {
+            private readonly TestCustomSearcherSort OuterInstance;
+
+            internal RandomGen(TestCustomSearcherSort outerInstance, Random random)
+            {
+                this.OuterInstance = outerInstance;
+                this.Random = random;
+                @base = new DateTime(1980, 1, 1);
+            }
+
+            internal Random Random;
+
+            // we use the default Locale/TZ since LuceneTestCase randomizes it
+            internal DateTime @base;
+
+            // Just to generate some different Lucene Date strings
+            internal virtual string LuceneDate
+            {
+                get
+                {
+                    return DateTools.TimeToString((DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond) + Random.Next() - int.MinValue, DateTools.Resolution.DAY);
+                }
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestDateFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestDateFilter.cs b/src/Lucene.Net.Tests/Search/TestDateFilter.cs
new file mode 100644
index 0000000..11e62e1
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestDateFilter.cs
@@ -0,0 +1,165 @@
+using System;
+using Lucene.Net.Attributes;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using NUnit.Framework;
+    using DateTools = DateTools;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Term = Lucene.Net.Index.Term;
+
+    /// <summary>
+    /// DateFilter JUnit tests.
+    ///
+    ///
+    /// </summary>
+    [TestFixture]
+    public class TestDateFilter : LuceneTestCase
+    {
+        ///
+        [OneTimeSetUp]
+        public virtual void TestBefore()
+        {
+            // create an index
+            Directory indexStore = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), indexStore, Similarity, TimeZone);
+
+            long now = DateTime.UtcNow.Ticks / TimeSpan.TicksPerMillisecond;
+
+            Document doc = new Document();
+            // add time that is in the past
+            doc.Add(NewStringField("datefield", DateTools.TimeToString(now - 1000, DateTools.Resolution.MILLISECOND), Field.Store.YES));
+            doc.Add(NewTextField("body", "Today is a very sunny day in New York City", Field.Store.YES));
+            writer.AddDocument(doc);
+
+            IndexReader reader = writer.Reader;
+            writer.Dispose();
+            IndexSearcher searcher = NewSearcher(reader);
+
+            // filter that should preserve matches
+            // DateFilter df1 = DateFilter.Before("datefield", now);
+            TermRangeFilter df1 = TermRangeFilter.NewStringRange("datefield", DateTools.TimeToString(now - 2000, DateTools.Resolution.MILLISECOND), DateTools.TimeToString(now, DateTools.Resolution.MILLISECOND), false, true);
+            // filter that should discard matches
+            // DateFilter df2 = DateFilter.Before("datefield", now - 999999);
+            TermRangeFilter df2 = TermRangeFilter.NewStringRange("datefield", DateTools.TimeToString(0, DateTools.Resolution.MILLISECOND), DateTools.TimeToString(now - 2000, DateTools.Resolution.MILLISECOND), true, false);
+
+            // search something that doesn't exist with DateFilter
+            Query query1 = new TermQuery(new Term("body", "NoMatchForthis"));
+
+            // search for something that does exists
+            Query query2 = new TermQuery(new Term("body", "sunny"));
+
+            ScoreDoc[] result;
+
+            // ensure that queries return expected results without DateFilter first
+            result = searcher.Search(query1, null, 1000).ScoreDocs;
+            Assert.AreEqual(0, result.Length);
+
+            result = searcher.Search(query2, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, result.Length);
+
+            // run queries with DateFilter
+            result = searcher.Search(query1, df1, 1000).ScoreDocs;
+            Assert.AreEqual(0, result.Length);
+
+            result = searcher.Search(query1, df2, 1000).ScoreDocs;
+            Assert.AreEqual(0, result.Length);
+
+            result = searcher.Search(query2, df1, 1000).ScoreDocs;
+            Assert.AreEqual(1, result.Length);
+
+            result = searcher.Search(query2, df2, 1000).ScoreDocs;
+            Assert.AreEqual(0, result.Length);
+            reader.Dispose();
+            indexStore.Dispose();
+        }
+
+        [Test, LuceneNetSpecific]
+        public void Test()
+        {
+            // noop, required for the before and after tests to run
+        }
+
+        ///
+        [OneTimeTearDown]
+        public virtual void TestAfter()
+        {
+            // create an index
+            Directory indexStore = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), indexStore, Similarity, TimeZone);
+
+            long now = DateTime.UtcNow.Ticks / TimeSpan.TicksPerMillisecond;
+
+            Document doc = new Document();
+            // add time that is in the future
+            doc.Add(NewStringField("datefield", DateTools.TimeToString(now + 888888, DateTools.Resolution.MILLISECOND), Field.Store.YES));
+            doc.Add(NewTextField("body", "Today is a very sunny day in New York City", Field.Store.YES));
+            writer.AddDocument(doc);
+
+            IndexReader reader = writer.Reader;
+            writer.Dispose();
+            IndexSearcher searcher = NewSearcher(reader);
+
+            // filter that should preserve matches
+            // DateFilter df1 = DateFilter.After("datefield", now);
+            TermRangeFilter df1 = TermRangeFilter.NewStringRange("datefield", DateTools.TimeToString(now, DateTools.Resolution.MILLISECOND), DateTools.TimeToString(now + 999999, DateTools.Resolution.MILLISECOND), true, false);
+            // filter that should discard matches
+            // DateFilter df2 = DateFilter.After("datefield", now + 999999);
+            TermRangeFilter df2 = TermRangeFilter.NewStringRange("datefield", DateTools.TimeToString(now + 999999, DateTools.Resolution.MILLISECOND), DateTools.TimeToString(now + 999999999, DateTools.Resolution.MILLISECOND), false, true);
+
+            // search something that doesn't exist with DateFilter
+            Query query1 = new TermQuery(new Term("body", "NoMatchForthis"));
+
+            // search for something that does exists
+            Query query2 = new TermQuery(new Term("body", "sunny"));
+
+            ScoreDoc[] result;
+
+            // ensure that queries return expected results without DateFilter first
+            result = searcher.Search(query1, null, 1000).ScoreDocs;
+            Assert.AreEqual(0, result.Length);
+
+            result = searcher.Search(query2, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, result.Length);
+
+            // run queries with DateFilter
+            result = searcher.Search(query1, df1, 1000).ScoreDocs;
+            Assert.AreEqual(0, result.Length);
+
+            result = searcher.Search(query1, df2, 1000).ScoreDocs;
+            Assert.AreEqual(0, result.Length);
+
+            result = searcher.Search(query2, df1, 1000).ScoreDocs;
+            Assert.AreEqual(1, result.Length);
+
+            result = searcher.Search(query2, df2, 1000).ScoreDocs;
+            Assert.AreEqual(0, result.Length);
+            reader.Dispose();
+            indexStore.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestDateSort.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestDateSort.cs b/src/Lucene.Net.Tests/Search/TestDateSort.cs
new file mode 100644
index 0000000..3c45c9b
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestDateSort.cs
@@ -0,0 +1,125 @@
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using DateTools = DateTools;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Term = Lucene.Net.Index.Term;
+
+    /// <summary>
+    /// Test date sorting, i.e. auto-sorting of fields with type "long".
+    /// See http://issues.apache.org/jira/browse/LUCENE-1045
+    /// </summary>
+    [TestFixture]
+    public class TestDateSort : LuceneTestCase
+    {
+        private const string TEXT_FIELD = "text";
+        private const string DATE_TIME_FIELD = "dateTime";
+
+        private Directory Directory;
+        private IndexReader Reader;
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            // Create an index writer.
+            Directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), Directory, Similarity, TimeZone);
+
+            // oldest doc:
+            // Add the first document.  text = "Document 1"  dateTime = Oct 10 03:25:22 EDT 2007
+            writer.AddDocument(CreateDocument("Document 1", 1192001122000L));
+            // Add the second document.  text = "Document 2"  dateTime = Oct 10 03:25:26 EDT 2007
+            writer.AddDocument(CreateDocument("Document 2", 1192001126000L));
+            // Add the third document.  text = "Document 3"  dateTime = Oct 11 07:12:13 EDT 2007
+            writer.AddDocument(CreateDocument("Document 3", 1192101133000L));
+            // Add the fourth document.  text = "Document 4"  dateTime = Oct 11 08:02:09 EDT 2007
+            writer.AddDocument(CreateDocument("Document 4", 1192104129000L));
+            // latest doc:
+            // Add the fifth document.  text = "Document 5"  dateTime = Oct 12 13:25:43 EDT 2007
+            writer.AddDocument(CreateDocument("Document 5", 1192209943000L));
+
+            Reader = writer.Reader;
+            writer.Dispose();
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            Reader.Dispose();
+            Directory.Dispose();
+            base.TearDown();
+        }
+
+        [Test]
+        public virtual void TestReverseDateSort()
+        {
+            IndexSearcher searcher = NewSearcher(Reader);
+
+            Sort sort = new Sort(new SortField(DATE_TIME_FIELD, SortFieldType.STRING, true));
+            Query query = new TermQuery(new Term(TEXT_FIELD, "document"));
+
+            // Execute the search and process the search results.
+            string[] actualOrder = new string[5];
+            ScoreDoc[] hits = searcher.Search(query, null, 1000, sort).ScoreDocs;
+            for (int i = 0; i < hits.Length; i++)
+            {
+                Document document = searcher.Doc(hits[i].Doc);
+                string text = document.Get(TEXT_FIELD);
+                actualOrder[i] = text;
+            }
+
+            // Set up the expected order (i.e. Document 5, 4, 3, 2, 1).
+            string[] expectedOrder = new string[5];
+            expectedOrder[0] = "Document 5";
+            expectedOrder[1] = "Document 4";
+            expectedOrder[2] = "Document 3";
+            expectedOrder[3] = "Document 2";
+            expectedOrder[4] = "Document 1";
+
+            Assert.AreEqual(Arrays.AsList(expectedOrder), Arrays.AsList(actualOrder));
+        }
+
+        private Document CreateDocument(string text, long time)
+        {
+            Document document = new Document();
+
+            // Add the text field.
+            Field textField = NewTextField(TEXT_FIELD, text, Field.Store.YES);
+            document.Add(textField);
+
+            // Add the date/time field.
+            string dateTimeString = DateTools.TimeToString(time, DateTools.Resolution.SECOND);
+            Field dateTimeField = NewStringField(DATE_TIME_FIELD, dateTimeString, Field.Store.YES);
+            document.Add(dateTimeField);
+
+            return document;
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestDisjunctionMaxQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestDisjunctionMaxQuery.cs b/src/Lucene.Net.Tests/Search/TestDisjunctionMaxQuery.cs
new file mode 100644
index 0000000..18db902
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestDisjunctionMaxQuery.cs
@@ -0,0 +1,570 @@
+using System;
+using System.Globalization;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using Lucene.Net.Index;
+    using NUnit.Framework;
+    using Analyzer = Lucene.Net.Analysis.Analyzer;
+    using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
+    using DefaultSimilarity = Lucene.Net.Search.Similarities.DefaultSimilarity;
+    using Directory = Lucene.Net.Store.Directory;
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
+    using Document = Documents.Document;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Field = Field;
+    using FieldInvertState = Lucene.Net.Index.FieldInvertState;
+    using FieldType = FieldType;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using IndexWriter = Lucene.Net.Index.IndexWriter;
+    using IndexWriterConfig = Lucene.Net.Index.IndexWriterConfig;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Similarity = Lucene.Net.Search.Similarities.Similarity;
+    using SlowCompositeReaderWrapper = Lucene.Net.Index.SlowCompositeReaderWrapper;
+    using SpanQuery = Lucene.Net.Search.Spans.SpanQuery;
+    using SpanTermQuery = Lucene.Net.Search.Spans.SpanTermQuery;
+    using Term = Lucene.Net.Index.Term;
+    using TextField = TextField;
+
+    /// <summary>
+    /// Test of the DisjunctionMaxQuery.
+    ///
+    /// </summary>
+    [TestFixture]
+    public class TestDisjunctionMaxQuery : LuceneTestCase
+    {
+        /// <summary>
+        /// threshold for comparing floats </summary>
+        public static readonly float SCORE_COMP_THRESH = 0.0000f;
+
+        /// <summary>
+        /// Similarity to eliminate tf, idf and lengthNorm effects to isolate test
+        /// case.
+        ///
+        /// <p>
+        /// same as TestRankingSimilarity in TestRanking.zip from
+        /// http://issues.apache.org/jira/browse/LUCENE-323
+        /// </p>
+        /// </summary>
+        private class TestSimilarity : DefaultSimilarity
+        {
+            public TestSimilarity()
+            {
+            }
+
+            public override float Tf(float freq)
+            {
+                if (freq > 0.0f)
+                {
+                    return 1.0f;
+                }
+                else
+                {
+                    return 0.0f;
+                }
+            }
+
+            public override float LengthNorm(FieldInvertState state)
+            {
+                // Disable length norm
+                return state.Boost;
+            }
+
+            public override float Idf(long docFreq, long numDocs)
+            {
+                return 1.0f;
+            }
+        }
+
+        public Similarity Sim = new TestSimilarity();
+        public Directory Index;
+        public IndexReader r;
+        public IndexSearcher s;
+
+        private static readonly FieldType NonAnalyzedType = new FieldType(TextField.TYPE_STORED);
+
+        static TestDisjunctionMaxQuery()
+        {
+            NonAnalyzedType.IsTokenized = false;
+        }
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+
+            Index = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), Index, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetSimilarity(Sim).SetMergePolicy(NewLogMergePolicy()));
+
+            // hed is the most important field, dek is secondary
+
+            // d1 is an "ok" match for: albino elephant
+            {
+                Document d1 = new Document();
+                d1.Add(NewField("id", "d1", NonAnalyzedType)); // Field.Keyword("id",
+                // "d1"));
+                d1.Add(NewTextField("hed", "elephant", Field.Store.YES)); // Field.Text("hed", "elephant"));
+                d1.Add(NewTextField("dek", "elephant", Field.Store.YES)); // Field.Text("dek", "elephant"));
+                writer.AddDocument(d1);
+            }
+
+            // d2 is a "good" match for: albino elephant
+            {
+                Document d2 = new Document();
+                d2.Add(NewField("id", "d2", NonAnalyzedType)); // Field.Keyword("id",
+                // "d2"));
+                d2.Add(NewTextField("hed", "elephant", Field.Store.YES)); // Field.Text("hed", "elephant"));
+                d2.Add(NewTextField("dek", "albino", Field.Store.YES)); // Field.Text("dek",
+                // "albino"));
+                d2.Add(NewTextField("dek", "elephant", Field.Store.YES)); // Field.Text("dek", "elephant"));
+                writer.AddDocument(d2);
+            }
+
+            // d3 is a "better" match for: albino elephant
+            {
+                Document d3 = new Document();
+                d3.Add(NewField("id", "d3", NonAnalyzedType)); // Field.Keyword("id",
+                // "d3"));
+                d3.Add(NewTextField("hed", "albino", Field.Store.YES)); // Field.Text("hed",
+                // "albino"));
+                d3.Add(NewTextField("hed", "elephant", Field.Store.YES)); // Field.Text("hed", "elephant"));
+                writer.AddDocument(d3);
+            }
+
+            // d4 is the "best" match for: albino elephant
+            {
+                Document d4 = new Document();
+                d4.Add(NewField("id", "d4", NonAnalyzedType)); // Field.Keyword("id",
+                // "d4"));
+                d4.Add(NewTextField("hed", "albino", Field.Store.YES)); // Field.Text("hed",
+                // "albino"));
+                d4.Add(NewField("hed", "elephant", NonAnalyzedType)); // Field.Text("hed", "elephant"));
+                d4.Add(NewTextField("dek", "albino", Field.Store.YES)); // Field.Text("dek",
+                // "albino"));
+                writer.AddDocument(d4);
+            }
+
+            r = SlowCompositeReaderWrapper.Wrap(writer.Reader);
+            writer.Dispose();
+            s = NewSearcher(r);
+            s.Similarity = Sim;
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            r.Dispose();
+            Index.Dispose();
+            base.TearDown();
+        }
+
+        [Test]
+        public virtual void TestSkipToFirsttimeMiss()
+        {
+            DisjunctionMaxQuery dq = new DisjunctionMaxQuery(0.0f);
+            dq.Add(Tq("id", "d1"));
+            dq.Add(Tq("dek", "DOES_NOT_EXIST"));
+
+            QueryUtils.Check(Random(), dq, s, Similarity);
+            Assert.IsTrue(s.TopReaderContext is AtomicReaderContext);
+            Weight dw = s.CreateNormalizedWeight(dq);
+            AtomicReaderContext context = (AtomicReaderContext)s.TopReaderContext;
+            Scorer ds = dw.GetScorer(context, (context.AtomicReader).LiveDocs);
+            bool skipOk = ds.Advance(3) != DocIdSetIterator.NO_MORE_DOCS;
+            if (skipOk)
+            {
+                Assert.Fail("firsttime skipTo found a match? ... " + r.Document(ds.DocID).Get("id"));
+            }
+        }
+
+        [Test]
+        public virtual void TestSkipToFirsttimeHit()
+        {
+            DisjunctionMaxQuery dq = new DisjunctionMaxQuery(0.0f);
+            dq.Add(Tq("dek", "albino"));
+            dq.Add(Tq("dek", "DOES_NOT_EXIST"));
+            Assert.IsTrue(s.TopReaderContext is AtomicReaderContext);
+            QueryUtils.Check(Random(), dq, s, Similarity);
+            Weight dw = s.CreateNormalizedWeight(dq);
+            AtomicReaderContext context = (AtomicReaderContext)s.TopReaderContext;
+            Scorer ds = dw.GetScorer(context, (context.AtomicReader).LiveDocs);
+            Assert.IsTrue(ds.Advance(3) != DocIdSetIterator.NO_MORE_DOCS, "firsttime skipTo found no match");
+            Assert.AreEqual("d4", r.Document(ds.DocID).Get("id"), "found wrong docid");
+        }
+
+        [Test]
+        public virtual void TestSimpleEqualScores1()
+        {
+            DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.0f);
+            q.Add(Tq("hed", "albino"));
+            q.Add(Tq("hed", "elephant"));
+            QueryUtils.Check(Random(), q, s, Similarity);
+
+            ScoreDoc[] h = s.Search(q, null, 1000).ScoreDocs;
+
+            try
+            {
+                Assert.AreEqual(4, h.Length, "all docs should match " + q.ToString());
+
+                float score = h[0].Score;
+                for (int i = 1; i < h.Length; i++)
+                {
+                    Assert.AreEqual(score, h[i].Score, SCORE_COMP_THRESH, "score #" + i + " is not the same");
+                }
+            }
+            catch (Exception e)
+            {
+                PrintHits("testSimpleEqualScores1", h, s);
+                throw e;
+            }
+        }
+
+        [Test]
+        public virtual void TestSimpleEqualScores2()
+        {
+            DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.0f);
+            q.Add(Tq("dek", "albino"));
+            q.Add(Tq("dek", "elephant"));
+            QueryUtils.Check(Random(), q, s, Similarity);
+
+            ScoreDoc[] h = s.Search(q, null, 1000).ScoreDocs;
+
+            try
+            {
+                Assert.AreEqual(3, h.Length, "3 docs should match " + q.ToString());
+                float score = h[0].Score;
+                for (int i = 1; i < h.Length; i++)
+                {
+                    Assert.AreEqual(score, h[i].Score, SCORE_COMP_THRESH, "score #" + i + " is not the same");
+                }
+            }
+            catch (Exception e)
+            {
+                PrintHits("testSimpleEqualScores2", h, s);
+                throw e;
+            }
+        }
+
+        [Test]
+        public virtual void TestSimpleEqualScores3()
+        {
+            DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.0f);
+            q.Add(Tq("hed", "albino"));
+            q.Add(Tq("hed", "elephant"));
+            q.Add(Tq("dek", "albino"));
+            q.Add(Tq("dek", "elephant"));
+            QueryUtils.Check(Random(), q, s, Similarity);
+
+            ScoreDoc[] h = s.Search(q, null, 1000).ScoreDocs;
+
+            try
+            {
+                Assert.AreEqual(4, h.Length, "all docs should match " + q.ToString());
+                float score = h[0].Score;
+                for (int i = 1; i < h.Length; i++)
+                {
+                    Assert.AreEqual(score, h[i].Score, SCORE_COMP_THRESH, "score #" + i + " is not the same");
+                }
+            }
+            catch (Exception e)
+            {
+                PrintHits("testSimpleEqualScores3", h, s);
+                throw e;
+            }
+        }
+
+        [Test]
+        public virtual void TestSimpleTiebreaker()
+        {
+            DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.01f);
+            q.Add(Tq("dek", "albino"));
+            q.Add(Tq("dek", "elephant"));
+            QueryUtils.Check(Random(), q, s, Similarity);
+
+            ScoreDoc[] h = s.Search(q, null, 1000).ScoreDocs;
+
+            try
+            {
+                Assert.AreEqual(3, h.Length, "3 docs should match " + q.ToString());
+                Assert.AreEqual("d2", s.Doc(h[0].Doc).Get("id"), "wrong first");
+                float score0 = h[0].Score;
+                float score1 = h[1].Score;
+                float score2 = h[2].Score;
+                Assert.IsTrue(score0 > score1, "d2 does not have better score then others: " + score0 + " >? " + score1);
+                Assert.AreEqual(score1, score2, SCORE_COMP_THRESH, "d4 and d1 don't have equal scores");
+            }
+            catch (Exception e)
+            {
+                PrintHits("testSimpleTiebreaker", h, s);
+                throw e;
+            }
+        }
+
+        [Test]
+        public virtual void TestBooleanRequiredEqualScores()
+        {
+            BooleanQuery q = new BooleanQuery();
+            {
+                DisjunctionMaxQuery q1 = new DisjunctionMaxQuery(0.0f);
+                q1.Add(Tq("hed", "albino"));
+                q1.Add(Tq("dek", "albino"));
+                q.Add(q1, Occur.MUST); // true,false);
+                QueryUtils.Check(Random(), q1, s, Similarity);
+            }
+            {
+                DisjunctionMaxQuery q2 = new DisjunctionMaxQuery(0.0f);
+                q2.Add(Tq("hed", "elephant"));
+                q2.Add(Tq("dek", "elephant"));
+                q.Add(q2, Occur.MUST); // true,false);
+                QueryUtils.Check(Random(), q2, s, Similarity);
+            }
+
+            QueryUtils.Check(Random(), q, s, Similarity);
+
+            ScoreDoc[] h = s.Search(q, null, 1000).ScoreDocs;
+
+            try
+            {
+                Assert.AreEqual(3, h.Length, "3 docs should match " + q.ToString());
+                float score = h[0].Score;
+                for (int i = 1; i < h.Length; i++)
+                {
+                    Assert.AreEqual(score, h[i].Score, SCORE_COMP_THRESH, "score #" + i + " is not the same");
+                }
+            }
+            catch (Exception e)
+            {
+                PrintHits("testBooleanRequiredEqualScores1", h, s);
+                throw e;
+            }
+        }
+
+        [Test]
+        public virtual void TestBooleanOptionalNoTiebreaker()
+        {
+            BooleanQuery q = new BooleanQuery();
+            {
+                DisjunctionMaxQuery q1 = new DisjunctionMaxQuery(0.0f);
+                q1.Add(Tq("hed", "albino"));
+                q1.Add(Tq("dek", "albino"));
+                q.Add(q1, Occur.SHOULD); // false,false);
+            }
+            {
+                DisjunctionMaxQuery q2 = new DisjunctionMaxQuery(0.0f);
+                q2.Add(Tq("hed", "elephant"));
+                q2.Add(Tq("dek", "elephant"));
+                q.Add(q2, Occur.SHOULD); // false,false);
+            }
+            QueryUtils.Check(Random(), q, s, Similarity);
+
+            ScoreDoc[] h = s.Search(q, null, 1000).ScoreDocs;
+
+            try
+            {
+                Assert.AreEqual(4, h.Length, "4 docs should match " + q.ToString());
+                float score = h[0].Score;
+                for (int i = 1; i < h.Length - 1; i++) // note: -1
+                {
+                    Assert.AreEqual(score, h[i].Score, SCORE_COMP_THRESH, "score #" + i + " is not the same");
+                }
+                Assert.AreEqual("d1", s.Doc(h[h.Length - 1].Doc).Get("id"), "wrong last");
+                float score1 = h[h.Length - 1].Score;
+                Assert.IsTrue(score > score1, "d1 does not have worse score then others: " + score + " >? " + score1);
+            }
+            catch (Exception e)
+            {
+                PrintHits("testBooleanOptionalNoTiebreaker", h, s);
+                throw e;
+            }
+        }
+
+        [Test]
+        public virtual void TestBooleanOptionalWithTiebreaker()
+        {
+            BooleanQuery q = new BooleanQuery();
+            {
+                DisjunctionMaxQuery q1 = new DisjunctionMaxQuery(0.01f);
+                q1.Add(Tq("hed", "albino"));
+                q1.Add(Tq("dek", "albino"));
+                q.Add(q1, Occur.SHOULD); // false,false);
+            }
+            {
+                DisjunctionMaxQuery q2 = new DisjunctionMaxQuery(0.01f);
+                q2.Add(Tq("hed", "elephant"));
+                q2.Add(Tq("dek", "elephant"));
+                q.Add(q2, Occur.SHOULD); // false,false);
+            }
+            QueryUtils.Check(Random(), q, s, Similarity);
+
+            ScoreDoc[] h = s.Search(q, null, 1000).ScoreDocs;
+
+            try
+            {
+                Assert.AreEqual(4, h.Length, "4 docs should match " + q.ToString());
+
+                float score0 = h[0].Score;
+                float score1 = h[1].Score;
+                float score2 = h[2].Score;
+                float score3 = h[3].Score;
+
+                string doc0 = s.Doc(h[0].Doc).Get("id");
+                string doc1 = s.Doc(h[1].Doc).Get("id");
+                string doc2 = s.Doc(h[2].Doc).Get("id");
+                string doc3 = s.Doc(h[3].Doc).Get("id");
+
+                Assert.IsTrue(doc0.Equals("d2") || doc0.Equals("d4"), "doc0 should be d2 or d4: " + doc0);
+                Assert.IsTrue(doc1.Equals("d2") || doc1.Equals("d4"), "doc1 should be d2 or d4: " + doc0);
+                Assert.AreEqual(score0, score1, SCORE_COMP_THRESH, "score0 and score1 should match");
+                Assert.AreEqual("d3", doc2, "wrong third");
+                Assert.IsTrue(score1 > score2, "d3 does not have worse score then d2 and d4: " + score1 + " >? " + score2);
+
+                Assert.AreEqual("d1", doc3, "wrong fourth");
+                Assert.IsTrue(score2 > score3, "d1 does not have worse score then d3: " + score2 + " >? " + score3);
+            }
+            catch (Exception e)
+            {
+                PrintHits("testBooleanOptionalWithTiebreaker", h, s);
+                throw e;
+            }
+        }
+
+        [Test]
+        public virtual void TestBooleanOptionalWithTiebreakerAndBoost()
+        {
+            BooleanQuery q = new BooleanQuery();
+            {
+                DisjunctionMaxQuery q1 = new DisjunctionMaxQuery(0.01f);
+                q1.Add(Tq("hed", "albino", 1.5f));
+                q1.Add(Tq("dek", "albino"));
+                q.Add(q1, Occur.SHOULD); // false,false);
+            }
+            {
+                DisjunctionMaxQuery q2 = new DisjunctionMaxQuery(0.01f);
+                q2.Add(Tq("hed", "elephant", 1.5f));
+                q2.Add(Tq("dek", "elephant"));
+                q.Add(q2, Occur.SHOULD); // false,false);
+            }
+            QueryUtils.Check(Random(), q, s, Similarity);
+
+            ScoreDoc[] h = s.Search(q, null, 1000).ScoreDocs;
+
+            try
+            {
+                Assert.AreEqual(4, h.Length, "4 docs should match " + q.ToString());
+
+                float score0 = h[0].Score;
+                float score1 = h[1].Score;
+                float score2 = h[2].Score;
+                float score3 = h[3].Score;
+
+                string doc0 = s.Doc(h[0].Doc).Get("id");
+                string doc1 = s.Doc(h[1].Doc).Get("id");
+                string doc2 = s.Doc(h[2].Doc).Get("id");
+                string doc3 = s.Doc(h[3].Doc).Get("id");
+
+                Assert.AreEqual("d4", doc0, "doc0 should be d4: ");
+                Assert.AreEqual("d3", doc1, "doc1 should be d3: ");
+                Assert.AreEqual("d2", doc2, "doc2 should be d2: ");
+                Assert.AreEqual("d1", doc3, "doc3 should be d1: ");
+
+                Assert.IsTrue(score0 > score1, "d4 does not have a better score then d3: " + score0 + " >? " + score1);
+                Assert.IsTrue(score1 > score2, "d3 does not have a better score then d2: " + score1 + " >? " + score2);
+                Assert.IsTrue(score2 > score3, "d3 does not have a better score then d1: " + score2 + " >? " + score3);
+            }
+            catch (Exception e)
+            {
+                PrintHits("TestBooleanOptionalWithTiebreakerAndBoost", h, s);
+                throw e;
+            }
+        }
+
+        // LUCENE-4477 / LUCENE-4401:
+        [Test]
+        public virtual void TestBooleanSpanQuery()
+        {
+            int hits = 0;
+            Directory directory = NewDirectory();
+            Analyzer indexerAnalyzer = new MockAnalyzer(Random());
+
+            IndexWriterConfig config = new IndexWriterConfig(TEST_VERSION_CURRENT, indexerAnalyzer);
+            IndexWriter writer = new IndexWriter(directory, config);
+            string FIELD = "content";
+            Document d = new Document();
+            d.Add(new TextField(FIELD, "clockwork orange", Field.Store.YES));
+            writer.AddDocument(d);
+            writer.Dispose();
+
+            IndexReader indexReader = DirectoryReader.Open(directory);
+            IndexSearcher searcher = NewSearcher(indexReader);
+
+            DisjunctionMaxQuery query = new DisjunctionMaxQuery(1.0f);
+            SpanQuery sq1 = new SpanTermQuery(new Term(FIELD, "clockwork"));
+            SpanQuery sq2 = new SpanTermQuery(new Term(FIELD, "clckwork"));
+            query.Add(sq1);
+            query.Add(sq2);
+            TopScoreDocCollector collector = TopScoreDocCollector.Create(1000, true);
+            searcher.Search(query, collector);
+            hits = collector.GetTopDocs().ScoreDocs.Length;
+            foreach (ScoreDoc scoreDoc in collector.GetTopDocs().ScoreDocs)
+            {
+                Console.WriteLine(scoreDoc.Doc);
+            }
+            indexReader.Dispose();
+            Assert.AreEqual(hits, 1);
+            directory.Dispose();
+        }
+
+        /// <summary>
+        /// macro </summary>
+        protected internal virtual Query Tq(string f, string t)
+        {
+            return new TermQuery(new Term(f, t));
+        }
+
+        /// <summary>
+        /// macro </summary>
+        protected internal virtual Query Tq(string f, string t, float b)
+        {
+            Query q = Tq(f, t);
+            q.Boost = b;
+            return q;
+        }
+
+        protected internal virtual void PrintHits(string test, ScoreDoc[] h, IndexSearcher searcher)
+        {
+            Console.Error.WriteLine("------- " + test + " -------");
+
+            //DecimalFormat f = new DecimalFormat("0.000000000", DecimalFormatSymbols.getInstance(Locale.ROOT));
+
+            NumberFormatInfo f = new NumberFormatInfo();
+            f.NumberDecimalSeparator = ".";
+
+            for (int i = 0; i < h.Length; i++)
+            {
+                Document d = searcher.Doc(h[i].Doc);
+                decimal score = (decimal)h[i].Score;
+                Console.Error.WriteLine("#" + i + ": " + score.ToString(f) + " - " + d.Get("id"));
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestDocBoost.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestDocBoost.cs b/src/Lucene.Net.Tests/Search/TestDocBoost.cs
new file mode 100644
index 0000000..29fe7d6
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestDocBoost.cs
@@ -0,0 +1,122 @@
+using System;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    
+    using NUnit.Framework;
+    using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
+    using Directory = Lucene.Net.Store.Directory;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Term = Lucene.Net.Index.Term;
+
+    /// <summary>
+    /// Document boost unit test.
+    ///
+    ///
+    /// </summary>
+    [TestFixture]
+    public class TestDocBoost : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestDocBoost_Mem()
+        {
+            Directory store = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), store, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()));
+
+            Field f1 = NewTextField("field", "word", Field.Store.YES);
+            Field f2 = NewTextField("field", "word", Field.Store.YES);
+            f2.Boost = 2.0f;
+
+            Documents.Document d1 = new Documents.Document();
+            Documents.Document d2 = new Documents.Document();
+
+            d1.Add(f1); // boost = 1
+            d2.Add(f2); // boost = 2
+
+            writer.AddDocument(d1);
+            writer.AddDocument(d2);
+
+            IndexReader reader = writer.Reader;
+            writer.Dispose();
+
+            float[] scores = new float[4];
+
+            IndexSearcher searcher = NewSearcher(reader);
+            searcher.Search(new TermQuery(new Term("field", "word")), new CollectorAnonymousInnerClassHelper(this, scores));
+
+            float lastScore = 0.0f;
+
+            for (int i = 0; i < 2; i++)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine(searcher.Explain(new TermQuery(new Term("field", "word")), i));
+                }
+                Assert.IsTrue(scores[i] > lastScore, "score: " + scores[i] + " should be > lastScore: " + lastScore);
+                lastScore = scores[i];
+            }
+
+            reader.Dispose();
+            store.Dispose();
+        }
+
+        private class CollectorAnonymousInnerClassHelper : ICollector
+        {
+            private readonly TestDocBoost OuterInstance;
+
+            private float[] Scores;
+
+            public CollectorAnonymousInnerClassHelper(TestDocBoost outerInstance, float[] scores)
+            {
+                this.OuterInstance = outerInstance;
+                this.Scores = scores;
+                @base = 0;
+            }
+
+            private int @base;
+            private Scorer scorer;
+
+            public virtual void SetScorer(Scorer scorer)
+            {
+                this.scorer = scorer;
+            }
+
+            public virtual void Collect(int doc)
+            {
+                Scores[doc + @base] = scorer.GetScore();
+            }
+
+            public virtual void SetNextReader(AtomicReaderContext context)
+            {
+                @base = context.DocBase;
+            }
+
+            public virtual bool AcceptsDocsOutOfOrder
+            {
+                get { return true; }
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestDocIdSet.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestDocIdSet.cs b/src/Lucene.Net.Tests/Search/TestDocIdSet.cs
new file mode 100644
index 0000000..440fe7d
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestDocIdSet.cs
@@ -0,0 +1,254 @@
+using System;
+using System.Collections.Generic;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
+    using IBits = Lucene.Net.Util.IBits;
+    using Directory = Lucene.Net.Store.Directory;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+
+    [TestFixture]
+    public class TestDocIdSet : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestFilteredDocIdSet()
+        {
+            const int maxdoc = 10;
+            DocIdSet innerSet = new DocIdSetAnonymousInnerClassHelper(this, maxdoc);
+
+            DocIdSet filteredSet = new FilteredDocIdSetAnonymousInnerClassHelper(this, innerSet);
+
+            DocIdSetIterator iter = filteredSet.GetIterator();
+            List<int?> list = new List<int?>();
+            int doc = iter.Advance(3);
+            if (doc != DocIdSetIterator.NO_MORE_DOCS)
+            {
+                list.Add(Convert.ToInt32(doc));
+                while ((doc = iter.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS)
+                {
+                    list.Add(Convert.ToInt32(doc));
+                }
+            }
+
+            int[] docs = new int[list.Count];
+            int c = 0;
+            IEnumerator<int?> intIter = list.GetEnumerator();
+            while (intIter.MoveNext())
+            {
+                docs[c++] = (int)intIter.Current;
+            }
+            int[] answer = new int[] { 4, 6, 8 };
+            bool same = Arrays.Equals(answer, docs);
+            if (!same)
+            {
+                Console.WriteLine("answer: " + Arrays.ToString(answer));
+                Console.WriteLine("gotten: " + Arrays.ToString(docs));
+                Assert.Fail();
+            }
+        }
+
+        private class DocIdSetAnonymousInnerClassHelper : DocIdSet
+        {
+            private readonly TestDocIdSet OuterInstance;
+
+            private int Maxdoc;
+
+            public DocIdSetAnonymousInnerClassHelper(TestDocIdSet outerInstance, int maxdoc)
+            {
+                this.OuterInstance = outerInstance;
+                this.Maxdoc = maxdoc;
+            }
+
+            public override DocIdSetIterator GetIterator()
+            {
+                return new DocIdSetIteratorAnonymousInnerClassHelper(this);
+            }
+
+            private class DocIdSetIteratorAnonymousInnerClassHelper : DocIdSetIterator
+            {
+                private readonly DocIdSetAnonymousInnerClassHelper OuterInstance;
+
+                public DocIdSetIteratorAnonymousInnerClassHelper(DocIdSetAnonymousInnerClassHelper outerInstance)
+                {
+                    this.OuterInstance = outerInstance;
+                    docid = -1;
+                }
+
+                internal int docid;
+
+                public override int DocID
+                {
+                    get { return docid; }
+                }
+
+                public override int NextDoc()
+                {
+                    docid++;
+                    return docid < OuterInstance.Maxdoc ? docid : (docid = NO_MORE_DOCS);
+                }
+
+                public override int Advance(int target)
+                {
+                    return SlowAdvance(target);
+                }
+
+                public override long GetCost()
+                {
+                    return 1;
+                }
+            }
+        }
+
+        private class FilteredDocIdSetAnonymousInnerClassHelper : FilteredDocIdSet
+        {
+            private readonly TestDocIdSet OuterInstance;
+
+            public FilteredDocIdSetAnonymousInnerClassHelper(TestDocIdSet outerInstance, DocIdSet innerSet)
+                : base(innerSet)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            protected override bool Match(int docid)
+            {
+                return docid % 2 == 0; //validate only even docids
+            }
+        }
+
+        [Test]
+        public virtual void TestNullDocIdSet()
+        {
+            // Tests that if a Filter produces a null DocIdSet, which is given to
+            // IndexSearcher, everything works fine. this came up in LUCENE-1754.
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            Document doc = new Document();
+            doc.Add(NewStringField("c", "val", Field.Store.NO));
+            writer.AddDocument(doc);
+            IndexReader reader = writer.Reader;
+            writer.Dispose();
+
+            // First verify the document is searchable.
+            IndexSearcher searcher = NewSearcher(reader);
+            Assert.AreEqual(1, searcher.Search(new MatchAllDocsQuery(), 10).TotalHits);
+
+            // Now search w/ a Filter which returns a null DocIdSet
+            Filter f = new FilterAnonymousInnerClassHelper(this);
+
+            Assert.AreEqual(0, searcher.Search(new MatchAllDocsQuery(), f, 10).TotalHits);
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        private class FilterAnonymousInnerClassHelper : Filter
+        {
+            private readonly TestDocIdSet OuterInstance;
+
+            public FilterAnonymousInnerClassHelper(TestDocIdSet outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs)
+            {
+                return null;
+            }
+        }
+
+        [Test]
+        public virtual void TestNullIteratorFilteredDocIdSet()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            Document doc = new Document();
+            doc.Add(NewStringField("c", "val", Field.Store.NO));
+            writer.AddDocument(doc);
+            IndexReader reader = writer.Reader;
+            writer.Dispose();
+
+            // First verify the document is searchable.
+            IndexSearcher searcher = NewSearcher(reader);
+            Assert.AreEqual(1, searcher.Search(new MatchAllDocsQuery(), 10).TotalHits);
+
+            // Now search w/ a Filter which returns a null DocIdSet
+            Filter f = new FilterAnonymousInnerClassHelper2(this);
+
+            Assert.AreEqual(0, searcher.Search(new MatchAllDocsQuery(), f, 10).TotalHits);
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        private class FilterAnonymousInnerClassHelper2 : Filter
+        {
+            private readonly TestDocIdSet OuterInstance;
+
+            public FilterAnonymousInnerClassHelper2(TestDocIdSet outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs)
+            {
+                DocIdSet innerNullIteratorSet = new DocIdSetAnonymousInnerClassHelper2(this);
+                return new FilteredDocIdSetAnonymousInnerClassHelper2(this, innerNullIteratorSet);
+            }
+
+            private class DocIdSetAnonymousInnerClassHelper2 : DocIdSet
+            {
+                private readonly FilterAnonymousInnerClassHelper2 OuterInstance;
+
+                public DocIdSetAnonymousInnerClassHelper2(FilterAnonymousInnerClassHelper2 outerInstance)
+                {
+                    this.OuterInstance = outerInstance;
+                }
+
+                public override DocIdSetIterator GetIterator()
+                {
+                    return null;
+                }
+            }
+
+            private class FilteredDocIdSetAnonymousInnerClassHelper2 : FilteredDocIdSet
+            {
+                private readonly FilterAnonymousInnerClassHelper2 OuterInstance;
+
+                public FilteredDocIdSetAnonymousInnerClassHelper2(FilterAnonymousInnerClassHelper2 outerInstance, DocIdSet innerNullIteratorSet)
+                    : base(innerNullIteratorSet)
+                {
+                    this.OuterInstance = outerInstance;
+                }
+
+                protected override bool Match(int docid)
+                {
+                    return true;
+                }
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestDocTermOrdsRangeFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestDocTermOrdsRangeFilter.cs b/src/Lucene.Net.Tests/Search/TestDocTermOrdsRangeFilter.cs
new file mode 100644
index 0000000..f6b2706
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestDocTermOrdsRangeFilter.cs
@@ -0,0 +1,149 @@
+using Lucene.Net.Documents;
+using NUnit.Framework;
+using System;
+using System.Collections.Generic;
+
+namespace Lucene.Net.Search
+{
+    using Lucene.Net.Randomized.Generators;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockTokenizer = Lucene.Net.Analysis.MockTokenizer;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using SortedSetDocValuesField = SortedSetDocValuesField;
+    using Term = Lucene.Net.Index.Term;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using UnicodeUtil = Lucene.Net.Util.UnicodeUtil;
+
+    /// <summary>
+    /// Tests the DocTermOrdsRangeFilter
+    /// </summary>
+    [TestFixture]
+    public class TestDocTermOrdsRangeFilter : LuceneTestCase
+    {
+        protected internal IndexSearcher Searcher1;
+        protected internal IndexSearcher Searcher2;
+        private IndexReader Reader;
+        private Directory Dir;
+        protected internal string FieldName;
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Dir = NewDirectory();
+            FieldName = Random().NextBoolean() ? "field" : ""; // sometimes use an empty string as field name
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), Dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.KEYWORD, false)).SetMaxBufferedDocs(TestUtil.NextInt(Random(), 50, 1000)));
+            List<string> terms = new List<string>();
+            int num = AtLeast(200);
+            for (int i = 0; i < num; i++)
+            {
+                Document doc = new Document();
+                doc.Add(NewStringField("id", Convert.ToString(i), Field.Store.NO));
+                int numTerms = Random().Next(4);
+                for (int j = 0; j < numTerms; j++)
+                {
+                    string s = TestUtil.RandomUnicodeString(Random());
+                    doc.Add(NewStringField(FieldName, s, Field.Store.NO));
+                    // if the default codec doesn't support sortedset, we will uninvert at search time
+                    if (DefaultCodecSupportsSortedSet())
+                    {
+                        doc.Add(new SortedSetDocValuesField(FieldName, new BytesRef(s)));
+                    }
+                    terms.Add(s);
+                }
+                writer.AddDocument(doc);
+            }
+
+            if (VERBOSE)
+            {
+                // utf16 order
+                terms.Sort();
+                Console.WriteLine("UTF16 order:");
+                foreach (string s in terms)
+                {
+                    Console.WriteLine("  " + UnicodeUtil.ToHexString(s));
+                }
+            }
+
+            int numDeletions = Random().Next(num / 10);
+            for (int i = 0; i < numDeletions; i++)
+            {
+                writer.DeleteDocuments(new Term("id", Convert.ToString(Random().Next(num))));
+            }
+
+            Reader = writer.Reader;
+            Searcher1 = NewSearcher(Reader);
+            Searcher2 = NewSearcher(Reader);
+            writer.Dispose();
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            Reader.Dispose();
+            Dir.Dispose();
+            base.TearDown();
+        }
+
+        /// <summary>
+        /// test a bunch of random ranges </summary>
+        [Test]
+        public virtual void TestRanges()
+        {
+            int num = AtLeast(1000);
+            for (int i = 0; i < num; i++)
+            {
+                BytesRef lowerVal = new BytesRef(TestUtil.RandomUnicodeString(Random()));
+                BytesRef upperVal = new BytesRef(TestUtil.RandomUnicodeString(Random()));
+                if (upperVal.CompareTo(lowerVal) < 0)
+                {
+                    AssertSame(upperVal, lowerVal, Random().NextBoolean(), Random().NextBoolean());
+                }
+                else
+                {
+                    AssertSame(lowerVal, upperVal, Random().NextBoolean(), Random().NextBoolean());
+                }
+            }
+        }
+
+        /// <summary>
+        /// check that the # of hits is the same as if the query
+        /// is run against the inverted index
+        /// </summary>
+        protected internal virtual void AssertSame(BytesRef lowerVal, BytesRef upperVal, bool includeLower, bool includeUpper)
+        {
+            Query docValues = new ConstantScoreQuery(DocTermOrdsRangeFilter.NewBytesRefRange(FieldName, lowerVal, upperVal, includeLower, includeUpper));
+            MultiTermQuery inverted = new TermRangeQuery(FieldName, lowerVal, upperVal, includeLower, includeUpper);
+            inverted.MultiTermRewriteMethod = (MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE);
+
+            TopDocs invertedDocs = Searcher1.Search(inverted, 25);
+            TopDocs docValuesDocs = Searcher2.Search(docValues, 25);
+
+            CheckHits.CheckEqual(inverted, invertedDocs.ScoreDocs, docValuesDocs.ScoreDocs);
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestDocTermOrdsRewriteMethod.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestDocTermOrdsRewriteMethod.cs b/src/Lucene.Net.Tests/Search/TestDocTermOrdsRewriteMethod.cs
new file mode 100644
index 0000000..7389e9e
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestDocTermOrdsRewriteMethod.cs
@@ -0,0 +1,164 @@
+using System;
+using System.Collections.Generic;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using Lucene.Net.Randomized.Generators;
+    using NUnit.Framework;
+    using AutomatonTestUtil = Lucene.Net.Util.Automaton.AutomatonTestUtil;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockTokenizer = Lucene.Net.Analysis.MockTokenizer;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using RegExp = Lucene.Net.Util.Automaton.RegExp;
+    using SortedSetDocValuesField = SortedSetDocValuesField;
+    using Term = Lucene.Net.Index.Term;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using UnicodeUtil = Lucene.Net.Util.UnicodeUtil;
+
+    /// <summary>
+    /// Tests the DocTermOrdsRewriteMethod
+    /// </summary>
+    [TestFixture]
+    public class TestDocTermOrdsRewriteMethod : LuceneTestCase
+    {
+        protected internal IndexSearcher Searcher1;
+        protected internal IndexSearcher Searcher2;
+        private IndexReader Reader;
+        private Directory Dir;
+        protected internal string FieldName;
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Dir = NewDirectory();
+            FieldName = Random().NextBoolean() ? "field" : ""; // sometimes use an empty string as field name
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), Dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.KEYWORD, false)).SetMaxBufferedDocs(TestUtil.NextInt(Random(), 50, 1000)));
+            List<string> terms = new List<string>();
+            int num = AtLeast(200);
+            for (int i = 0; i < num; i++)
+            {
+                Document doc = new Document();
+                doc.Add(NewStringField("id", Convert.ToString(i), Field.Store.NO));
+                int numTerms = Random().Next(4);
+                for (int j = 0; j < numTerms; j++)
+                {
+                    string s = TestUtil.RandomUnicodeString(Random());
+                    doc.Add(NewStringField(FieldName, s, Field.Store.NO));
+                    // if the default codec doesn't support sortedset, we will uninvert at search time
+                    if (DefaultCodecSupportsSortedSet())
+                    {
+                        doc.Add(new SortedSetDocValuesField(FieldName, new BytesRef(s)));
+                    }
+                    terms.Add(s);
+                }
+                writer.AddDocument(doc);
+            }
+
+            if (VERBOSE)
+            {
+                // utf16 order
+                terms.Sort();
+                Console.WriteLine("UTF16 order:");
+                foreach (string s in terms)
+                {
+                    Console.WriteLine("  " + UnicodeUtil.ToHexString(s));
+                }
+            }
+
+            int numDeletions = Random().Next(num / 10);
+            for (int i = 0; i < numDeletions; i++)
+            {
+                writer.DeleteDocuments(new Term("id", Convert.ToString(Random().Next(num))));
+            }
+
+            Reader = writer.Reader;
+            Searcher1 = NewSearcher(Reader);
+            Searcher2 = NewSearcher(Reader);
+            writer.Dispose();
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            Reader.Dispose();
+            Dir.Dispose();
+            base.TearDown();
+        }
+
+        /// <summary>
+        /// test a bunch of random regular expressions </summary>
+        [Test]
+        public virtual void TestRegexps()
+        {
+            int num = AtLeast(1000);
+            for (int i = 0; i < num; i++)
+            {
+                string reg = AutomatonTestUtil.RandomRegexp(Random());
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: regexp=" + reg);
+                }
+                AssertSame(reg);
+            }
+        }
+
+        /// <summary>
+        /// check that the # of hits is the same as if the query
+        /// is run against the inverted index
+        /// </summary>
+        protected internal virtual void AssertSame(string regexp)
+        {
+            RegexpQuery docValues = new RegexpQuery(new Term(FieldName, regexp), RegExp.NONE);
+            docValues.MultiTermRewriteMethod = (new DocTermOrdsRewriteMethod());
+            RegexpQuery inverted = new RegexpQuery(new Term(FieldName, regexp), RegExp.NONE);
+
+            TopDocs invertedDocs = Searcher1.Search(inverted, 25);
+            TopDocs docValuesDocs = Searcher2.Search(docValues, 25);
+
+            CheckHits.CheckEqual(inverted, invertedDocs.ScoreDocs, docValuesDocs.ScoreDocs);
+        }
+
+        [Test]
+        public virtual void TestEquals()
+        {
+            RegexpQuery a1 = new RegexpQuery(new Term(FieldName, "[aA]"), RegExp.NONE);
+            RegexpQuery a2 = new RegexpQuery(new Term(FieldName, "[aA]"), RegExp.NONE);
+            RegexpQuery b = new RegexpQuery(new Term(FieldName, "[bB]"), RegExp.NONE);
+            Assert.AreEqual(a1, a2);
+            Assert.IsFalse(a1.Equals(b));
+
+            a1.MultiTermRewriteMethod = (new DocTermOrdsRewriteMethod());
+            a2.MultiTermRewriteMethod = (new DocTermOrdsRewriteMethod());
+            b.MultiTermRewriteMethod = (new DocTermOrdsRewriteMethod());
+            Assert.AreEqual(a1, a2);
+            Assert.IsFalse(a1.Equals(b));
+            QueryUtils.Check(a1);
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestDocValuesScoring.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestDocValuesScoring.cs b/src/Lucene.Net.Tests/Search/TestDocValuesScoring.cs
new file mode 100644
index 0000000..ebd3e72
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestDocValuesScoring.cs
@@ -0,0 +1,233 @@
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using Lucene.Net.Index;
+    using NUnit.Framework;
+    using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldInvertState = Lucene.Net.Index.FieldInvertState;
+    using SingleDocValuesField = SingleDocValuesField;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using PerFieldSimilarityWrapper = Lucene.Net.Search.Similarities.PerFieldSimilarityWrapper;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Similarity = Lucene.Net.Search.Similarities.Similarity;
+    using Term = Lucene.Net.Index.Term;
+
+    /// <summary>
+    /// Tests the use of indexdocvalues in scoring.
+    ///
+    /// In the example, a docvalues field is used as a per-document boost (separate from the norm)
+    /// @lucene.experimental
+    /// </summary>
+    [SuppressCodecs("Lucene3x")]
+    [TestFixture]
+    public class TestDocValuesScoring : LuceneTestCase
+    {
+        private const float SCORE_EPSILON = 0.001f; // for comparing floats
+
+        [Test]
+        public virtual void TestSimple()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            Document doc = new Document();
+            Field field = NewTextField("foo", "", Field.Store.NO);
+            doc.Add(field);
+            Field dvField = new SingleDocValuesField("foo_boost", 0.0F);
+            doc.Add(dvField);
+            Field field2 = NewTextField("bar", "", Field.Store.NO);
+            doc.Add(field2);
+
+            field.SetStringValue("quick brown fox");
+            field2.SetStringValue("quick brown fox");
+            dvField.SetSingleValue(2f); // boost x2
+            iw.AddDocument(doc);
+            field.SetStringValue("jumps over lazy brown dog");
+            field2.SetStringValue("jumps over lazy brown dog");
+            dvField.SetSingleValue(4f); // boost x4
+            iw.AddDocument(doc);
+            IndexReader ir = iw.Reader;
+            iw.Dispose();
+
+            // no boosting
+            IndexSearcher searcher1 = NewSearcher(ir, false, Similarity);
+            Similarity @base = searcher1.Similarity;
+            // boosting
+            IndexSearcher searcher2 = NewSearcher(ir, false, Similarity);
+            searcher2.Similarity = new PerFieldSimilarityWrapperAnonymousInnerClassHelper(this, field, @base);
+
+            // in this case, we searched on field "foo". first document should have 2x the score.
+            TermQuery tq = new TermQuery(new Term("foo", "quick"));
+            QueryUtils.Check(Random(), tq, searcher1, Similarity);
+            QueryUtils.Check(Random(), tq, searcher2, Similarity);
+
+            TopDocs noboost = searcher1.Search(tq, 10);
+            TopDocs boost = searcher2.Search(tq, 10);
+            Assert.AreEqual(1, noboost.TotalHits);
+            Assert.AreEqual(1, boost.TotalHits);
+
+            //System.out.println(searcher2.Explain(tq, boost.ScoreDocs[0].Doc));
+            Assert.AreEqual(boost.ScoreDocs[0].Score, noboost.ScoreDocs[0].Score * 2f, SCORE_EPSILON);
+
+            // this query matches only the second document, which should have 4x the score.
+            tq = new TermQuery(new Term("foo", "jumps"));
+            QueryUtils.Check(Random(), tq, searcher1, Similarity);
+            QueryUtils.Check(Random(), tq, searcher2, Similarity);
+
+            noboost = searcher1.Search(tq, 10);
+            boost = searcher2.Search(tq, 10);
+            Assert.AreEqual(1, noboost.TotalHits);
+            Assert.AreEqual(1, boost.TotalHits);
+
+            Assert.AreEqual(boost.ScoreDocs[0].Score, noboost.ScoreDocs[0].Score * 4f, SCORE_EPSILON);
+
+            // search on on field bar just for kicks, nothing should happen, since we setup
+            // our sim provider to only use foo_boost for field foo.
+            tq = new TermQuery(new Term("bar", "quick"));
+            QueryUtils.Check(Random(), tq, searcher1, Similarity);
+            QueryUtils.Check(Random(), tq, searcher2, Similarity);
+
+            noboost = searcher1.Search(tq, 10);
+            boost = searcher2.Search(tq, 10);
+            Assert.AreEqual(1, noboost.TotalHits);
+            Assert.AreEqual(1, boost.TotalHits);
+
+            Assert.AreEqual(boost.ScoreDocs[0].Score, noboost.ScoreDocs[0].Score, SCORE_EPSILON);
+
+            ir.Dispose();
+            dir.Dispose();
+        }
+
+        private class PerFieldSimilarityWrapperAnonymousInnerClassHelper : PerFieldSimilarityWrapper
+        {
+            private readonly TestDocValuesScoring OuterInstance;
+
+            private Field Field;
+            private Similarity @base;
+
+            public PerFieldSimilarityWrapperAnonymousInnerClassHelper(TestDocValuesScoring outerInstance, Field field, Similarity @base)
+            {
+                this.OuterInstance = outerInstance;
+                this.Field = field;
+                this.@base = @base;
+                fooSim = new BoostingSimilarity(@base, "foo_boost");
+            }
+
+            internal readonly Similarity fooSim;
+
+            public override Similarity Get(string field)
+            {
+                return "foo".Equals(field) ? fooSim : @base;
+            }
+
+            public override float Coord(int overlap, int maxOverlap)
+            {
+                return @base.Coord(overlap, maxOverlap);
+            }
+
+            public override float QueryNorm(float sumOfSquaredWeights)
+            {
+                return @base.QueryNorm(sumOfSquaredWeights);
+            }
+        }
+
+        /// <summary>
+        /// Similarity that wraps another similarity and boosts the final score
+        /// according to whats in a docvalues field.
+        ///
+        /// @lucene.experimental
+        /// </summary>
+        internal class BoostingSimilarity : Similarity
+        {
+            internal readonly Similarity Sim;
+            internal readonly string BoostField;
+
+            public BoostingSimilarity(Similarity sim, string boostField)
+            {
+                this.Sim = sim;
+                this.BoostField = boostField;
+            }
+
+            public override long ComputeNorm(FieldInvertState state)
+            {
+                return Sim.ComputeNorm(state);
+            }
+
+            public override SimWeight ComputeWeight(float queryBoost, CollectionStatistics collectionStats, params TermStatistics[] termStats)
+            {
+                return Sim.ComputeWeight(queryBoost, collectionStats, termStats);
+            }
+
+            public override SimScorer GetSimScorer(SimWeight stats, AtomicReaderContext context)
+            {
+                SimScorer sub = Sim.GetSimScorer(stats, context);
+                FieldCache.Singles values = FieldCache.DEFAULT.GetSingles(context.AtomicReader, BoostField, false);
+
+                return new SimScorerAnonymousInnerClassHelper(this, sub, values);
+            }
+
+            private class SimScorerAnonymousInnerClassHelper : SimScorer
+            {
+                private readonly BoostingSimilarity OuterInstance;
+
+                private SimScorer Sub;
+                private FieldCache.Singles Values;
+
+                public SimScorerAnonymousInnerClassHelper(BoostingSimilarity outerInstance, SimScorer sub, FieldCache.Singles values)
+                {
+                    this.OuterInstance = outerInstance;
+                    this.Sub = sub;
+                    this.Values = values;
+                }
+
+                public override float Score(int doc, float freq)
+                {
+                    return Values.Get(doc) * Sub.Score(doc, freq);
+                }
+
+                public override float ComputeSlopFactor(int distance)
+                {
+                    return Sub.ComputeSlopFactor(distance);
+                }
+
+                public override float ComputePayloadFactor(int doc, int start, int end, BytesRef payload)
+                {
+                    return Sub.ComputePayloadFactor(doc, start, end, payload);
+                }
+
+                public override Explanation Explain(int doc, Explanation freq)
+                {
+                    Explanation boostExplanation = new Explanation(Values.Get(doc), "indexDocValue(" + OuterInstance.BoostField + ")");
+                    Explanation simExplanation = Sub.Explain(doc, freq);
+                    Explanation expl = new Explanation(boostExplanation.Value * simExplanation.Value, "product of:");
+                    expl.AddDetail(boostExplanation);
+                    expl.AddDetail(simExplanation);
+                    return expl;
+                }
+            }
+        }
+    }
+}
\ No newline at end of file


[43/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestAllFilesHaveCodecHeader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestAllFilesHaveCodecHeader.cs b/src/Lucene.Net.Tests/Index/TestAllFilesHaveCodecHeader.cs
new file mode 100644
index 0000000..2fbaa40
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestAllFilesHaveCodecHeader.cs
@@ -0,0 +1,119 @@
+using System;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+    using CodecUtil = Lucene.Net.Codecs.CodecUtil;
+    using CompoundFileDirectory = Lucene.Net.Store.CompoundFileDirectory;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexInput = Lucene.Net.Store.IndexInput;
+    using IOUtils = Lucene.Net.Util.IOUtils;
+    using Lucene46Codec = Lucene.Net.Codecs.Lucene46.Lucene46Codec;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using NumericDocValuesField = NumericDocValuesField;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    /// <summary>
+    /// Test that a plain default puts codec headers in all files.
+    /// </summary>
+    [TestFixture]
+    public class TestAllFilesHaveCodecHeader : LuceneTestCase
+    {
+        [Test]
+        public virtual void Test()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            conf.SetCodec(new Lucene46Codec());
+            RandomIndexWriter riw = new RandomIndexWriter(Random(), dir, conf);
+            Document doc = new Document();
+            // these fields should sometimes get term vectors, etc
+            Field idField = NewStringField("id", "", Field.Store.NO);
+            Field bodyField = NewTextField("body", "", Field.Store.NO);
+            Field dvField = new NumericDocValuesField("dv", 5);
+            doc.Add(idField);
+            doc.Add(bodyField);
+            doc.Add(dvField);
+            for (int i = 0; i < 100; i++)
+            {
+                idField.SetStringValue(Convert.ToString(i));
+                bodyField.SetStringValue(TestUtil.RandomUnicodeString(Random()));
+                riw.AddDocument(doc);
+                if (Random().Next(7) == 0)
+                {
+                    riw.Commit();
+                }
+                // TODO: we should make a new format with a clean header...
+                // if (Random().nextInt(20) == 0) {
+                //  riw.DeleteDocuments(new Term("id", Integer.toString(i)));
+                // }
+            }
+            riw.Dispose();
+            CheckHeaders(dir);
+            dir.Dispose();
+        }
+
+        private void CheckHeaders(Directory dir)
+        {
+            foreach (string file in dir.ListAll())
+            {
+                if (file.Equals(IndexWriter.WRITE_LOCK_NAME))
+                {
+                    continue; // write.lock has no header, thats ok
+                }
+                if (file.Equals(IndexFileNames.SEGMENTS_GEN))
+                {
+                    continue; // segments.gen has no header, thats ok
+                }
+                if (file.EndsWith(IndexFileNames.COMPOUND_FILE_EXTENSION))
+                {
+                    CompoundFileDirectory cfsDir = new CompoundFileDirectory(dir, file, NewIOContext(Random()), false);
+                    CheckHeaders(cfsDir); // recurse into cfs
+                    cfsDir.Dispose();
+                }
+                IndexInput @in = null;
+                bool success = false;
+                try
+                {
+                    @in = dir.OpenInput(file, NewIOContext(Random()));
+                    int val = @in.ReadInt32();
+                    Assert.AreEqual(CodecUtil.CODEC_MAGIC, val, file + " has no codec header, instead found: " + val);
+                    success = true;
+                }
+                finally
+                {
+                    if (success)
+                    {
+                        IOUtils.Close(@in);
+                    }
+                    else
+                    {
+                        IOUtils.CloseWhileHandlingException(@in);
+                    }
+                }
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestAtomicUpdate.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestAtomicUpdate.cs b/src/Lucene.Net.Tests/Index/TestAtomicUpdate.cs
new file mode 100644
index 0000000..10a43dc
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestAtomicUpdate.cs
@@ -0,0 +1,218 @@
+using System;
+using System.IO;
+using System.Threading;
+using Lucene.Net.Attributes;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    
+    using Lucene.Net.Store;
+    using Lucene.Net.Support;
+    using Lucene.Net.Util;
+    using NUnit.Framework;
+
+    /*
+        /// Copyright 2004 The Apache Software Foundation
+        ///
+        /// Licensed under the Apache License, Version 2.0 (the "License");
+        /// you may not use this file except in compliance with the License.
+        /// You may obtain a copy of the License at
+        ///
+        ///     http://www.apache.org/licenses/LICENSE-2.0
+        ///
+        /// Unless required by applicable law or agreed to in writing, software
+        /// distributed under the License is distributed on an "AS IS" BASIS,
+        /// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+        /// See the License for the specific language governing permissions and
+        /// limitations under the License.
+        */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+
+    [TestFixture]
+    public class TestAtomicUpdate : LuceneTestCase
+    {
+        private abstract class TimedThread : ThreadClass
+        {
+            internal volatile bool Failed;
+            internal int Count;
+            internal static float RUN_TIME_MSEC = AtLeast(500);
+            internal TimedThread[] AllThreads;
+
+            public abstract void DoWork();
+
+            internal TimedThread(TimedThread[] threads)
+            {
+                this.AllThreads = threads;
+            }
+
+            public override void Run()
+            {
+                long stopTime = Environment.TickCount + (long)RUN_TIME_MSEC;
+
+                Count = 0;
+
+                try
+                {
+                    do
+                    {
+                        if (AnyErrors())
+                        {
+                            break;
+                        }
+                        DoWork();
+                        Count++;
+                    } while (Environment.TickCount < stopTime);
+                }
+                catch (Exception e)
+                {
+                    Console.WriteLine(Thread.CurrentThread.Name + ": exc");
+                    Console.WriteLine(e.StackTrace);
+                    Failed = true;
+                }
+            }
+
+            internal virtual bool AnyErrors()
+            {
+                for (int i = 0; i < AllThreads.Length; i++)
+                {
+                    if (AllThreads[i] != null && AllThreads[i].Failed)
+                    {
+                        return true;
+                    }
+                }
+                return false;
+            }
+        }
+
+        private class IndexerThread : TimedThread
+        {
+            internal IndexWriter Writer;
+
+            public IndexerThread(IndexWriter writer, TimedThread[] threads)
+                : base(threads)
+            {
+                this.Writer = writer;
+            }
+
+            public override void DoWork()
+            {
+                // Update all 100 docs...
+                for (int i = 0; i < 100; i++)
+                {
+                    Documents.Document d = new Documents.Document();
+                    d.Add(new StringField("id", Convert.ToString(i), Field.Store.YES));
+                    d.Add(new TextField("contents", English.IntToEnglish(i + 10 * Count), Field.Store.NO));
+                    Writer.UpdateDocument(new Term("id", Convert.ToString(i)), d);
+                }
+            }
+        }
+
+        private class SearcherThread : TimedThread
+        {
+            internal Directory Directory;
+
+            public SearcherThread(Directory directory, TimedThread[] threads)
+                : base(threads)
+            {
+                this.Directory = directory;
+            }
+
+            public override void DoWork()
+            {
+                IndexReader r = DirectoryReader.Open(Directory);
+                Assert.AreEqual(100, r.NumDocs);
+                r.Dispose();
+            }
+        }
+
+        /*
+          Run one indexer and 2 searchers against single index as
+          stress test.
+        */
+
+        public virtual void RunTest(Directory directory)
+        {
+            TimedThread[] threads = new TimedThread[4];
+
+            IndexWriterConfig conf = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetMaxBufferedDocs(7);
+            ((TieredMergePolicy)conf.MergePolicy).MaxMergeAtOnce = 3;
+            IndexWriter writer = RandomIndexWriter.MockIndexWriter(directory, conf, Random());
+
+            // Establish a base index of 100 docs:
+            for (int i = 0; i < 100; i++)
+            {
+                Documents.Document d = new Documents.Document();
+                d.Add(NewStringField("id", Convert.ToString(i), Field.Store.YES));
+                d.Add(NewTextField("contents", English.IntToEnglish(i), Field.Store.NO));
+                if ((i - 1) % 7 == 0)
+                {
+                    writer.Commit();
+                }
+                writer.AddDocument(d);
+            }
+            writer.Commit();
+
+            IndexReader r = DirectoryReader.Open(directory);
+            Assert.AreEqual(100, r.NumDocs);
+            r.Dispose();
+
+            IndexerThread indexerThread = new IndexerThread(writer, threads);
+            threads[0] = indexerThread;
+            indexerThread.Start();
+
+            IndexerThread indexerThread2 = new IndexerThread(writer, threads);
+            threads[1] = indexerThread2;
+            indexerThread2.Start();
+
+            SearcherThread searcherThread1 = new SearcherThread(directory, threads);
+            threads[2] = searcherThread1;
+            searcherThread1.Start();
+
+            SearcherThread searcherThread2 = new SearcherThread(directory, threads);
+            threads[3] = searcherThread2;
+            searcherThread2.Start();
+
+            indexerThread.Join();
+            indexerThread2.Join();
+            searcherThread1.Join();
+            searcherThread2.Join();
+
+            writer.Dispose();
+
+            Assert.IsTrue(!indexerThread.Failed, "hit unexpected exception in indexer");
+            Assert.IsTrue(!indexerThread2.Failed, "hit unexpected exception in indexer2");
+            Assert.IsTrue(!searcherThread1.Failed, "hit unexpected exception in search1");
+            Assert.IsTrue(!searcherThread2.Failed, "hit unexpected exception in search2");
+            //System.out.println("    Writer: " + indexerThread.count + " iterations");
+            //System.out.println("Searcher 1: " + searcherThread1.count + " searchers created");
+            //System.out.println("Searcher 2: " + searcherThread2.count + " searchers created");
+        }
+
+        /*
+          Run above stress test against RAMDirectory and then
+          FSDirectory.
+        */
+
+        [Test]
+        public virtual void TestAtomicUpdates()
+        {
+            Directory directory;
+
+            // First in a RAM directory:
+            using (directory = new MockDirectoryWrapper(Random(), new RAMDirectory()))
+            {
+                RunTest(directory);
+            }
+
+            // Second in an FSDirectory:
+            DirectoryInfo dirPath = CreateTempDir("lucene.test.atomic");
+            using (directory = NewFSDirectory(dirPath))
+            {
+                RunTest(directory);
+            }
+            System.IO.Directory.Delete(dirPath.FullName, true);
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility.cs b/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility.cs
new file mode 100644
index 0000000..2983ae5
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility.cs
@@ -0,0 +1,1079 @@
+using Lucene.Net.Randomized.Generators;
+using Lucene.Net.Support;
+using NUnit.Framework;
+using System;
+using System.Diagnostics;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Text;
+
+namespace Lucene.Net.Index
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    using FileInfo = System.IO.FileInfo;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using BinaryDocValuesField = Lucene.Net.Documents.BinaryDocValuesField;
+    using Document = Lucene.Net.Documents.Document;
+    using DoubleDocValuesField = Lucene.Net.Documents.DoubleDocValuesField;
+    using Field = Lucene.Net.Documents.Field;
+    using FieldType = Lucene.Net.Documents.FieldType;
+    using SingleDocValuesField = Lucene.Net.Documents.SingleDocValuesField;
+    using Int32Field = Lucene.Net.Documents.Int32Field;
+    using Int64Field = Lucene.Net.Documents.Int64Field;
+    using NumericDocValuesField = Lucene.Net.Documents.NumericDocValuesField;
+    using SortedDocValuesField = Lucene.Net.Documents.SortedDocValuesField;
+    using SortedSetDocValuesField = Lucene.Net.Documents.SortedSetDocValuesField;
+    using StringField = Lucene.Net.Documents.StringField;
+    using TextField = Lucene.Net.Documents.TextField;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using FieldCache = Lucene.Net.Search.FieldCache;
+    using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+    using NumericRangeQuery = Lucene.Net.Search.NumericRangeQuery;
+    using ScoreDoc = Lucene.Net.Search.ScoreDoc;
+    using TermQuery = Lucene.Net.Search.TermQuery;
+    using BaseDirectoryWrapper = Lucene.Net.Store.BaseDirectoryWrapper;
+    using Directory = Lucene.Net.Store.Directory;
+    using FSDirectory = Lucene.Net.Store.FSDirectory;
+    using NIOFSDirectory = Lucene.Net.Store.NIOFSDirectory;
+    using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+    using SimpleFSDirectory = Lucene.Net.Store.SimpleFSDirectory;
+    using IBits = Lucene.Net.Util.IBits;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Constants = Lucene.Net.Util.Constants;
+    using IOUtils = Lucene.Net.Util.IOUtils;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using SuppressCodecs = Lucene.Net.Util.LuceneTestCase.SuppressCodecs;
+    using StringHelper = Lucene.Net.Util.StringHelper;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    /*
+      Verify we can read the pre-5.0 file format, do searches
+      against it, and add documents to it.
+    */
+    // note: add this if we make a 4.x impersonator
+    // TODO: don't use 4.x codec, its unrealistic since it means
+    // we won't even be running the actual code, only the impostor
+    // @SuppressCodecs("Lucene4x")
+    // Sep codec cannot yet handle the offsets in our 4.x index!
+    [SuppressCodecs("Lucene3x", "MockFixedIntBlock", "MockVariableIntBlock", "MockSep", "MockRandom", "Lucene40", "Lucene41", "Appending", "Lucene42", "Lucene45")]
+    [TestFixture]
+    public class TestBackwardsCompatibility : LuceneTestCase
+    {
+
+        // Uncomment these cases & run them on an older Lucene version,
+        // to generate indexes to test backwards compatibility.  These
+        // indexes will be created under directory /tmp/idx/.
+        //
+        // However, you must first disable the Lucene TestSecurityManager,
+        // which will otherwise disallow writing outside of the build/
+        // directory - to do this, comment out the "java.security.manager"
+        // <sysproperty> under the "test-macro" <macrodef>.
+        //
+        // Be sure to create the indexes with the actual format:
+        //  ant test -Dtestcase=TestBackwardsCompatibility -Dversion=x.y.z
+        //      -Dtests.codec=LuceneXY -Dtests.postingsformat=LuceneXY -Dtests.docvaluesformat=LuceneXY
+        //
+        // Zip up the generated indexes:
+        //
+        //    cd /tmp/idx/index.cfs   ; zip index.<VERSION>.cfs.zip *
+        //    cd /tmp/idx/index.nocfs ; zip index.<VERSION>.nocfs.zip *
+        //
+        // Then move those 2 zip files to your trunk checkout and add them
+        // to the oldNames array.
+
+        /*
+        public void testCreateCFS() throws IOException {
+          createIndex("index.cfs", true, false);
+        }
+	
+        public void testCreateNoCFS() throws IOException {
+          createIndex("index.nocfs", false, false);
+        }
+        */
+
+        /*
+          // These are only needed for the special upgrade test to verify
+          // that also single-segment indexes are correctly upgraded by IndexUpgrader.
+          // You don't need them to be build for non-4.0 (the test is happy with just one
+          // "old" segment format, version is unimportant:
+	  
+          public void testCreateSingleSegmentCFS() throws IOException {
+            createIndex("index.singlesegment.cfs", true, true);
+          }
+	
+          public void testCreateSingleSegmentNoCFS() throws IOException {
+            createIndex("index.singlesegment.nocfs", false, true);
+          }
+	
+        */
+
+        /*
+        public void testCreateMoreTermsIndex() throws Exception {
+          // we use a real directory name that is not cleaned up,
+          // because this method is only used to create backwards
+          // indexes:
+          File indexDir = new File("moreterms");
+          TestUtil.rmDir(indexDir);
+          Directory dir = NewFSDirectory(indexDir);
+	
+          LogByteSizeMergePolicy mp = new LogByteSizeMergePolicy();
+          mp.SetUseCompoundFile(false);
+          mp.setNoCFSRatio(1.0);
+          mp.setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY);
+          MockAnalyzer analyzer = new MockAnalyzer(Random());
+          analyzer.setMaxTokenLength(TestUtil.nextInt(Random(), 1, IndexWriter.MAX_TERM_LENGTH));
+	
+          // TODO: remove randomness
+          IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer)
+            .SetMergePolicy(mp);
+          conf.SetCodec(Codec.ForName("Lucene40"));
+          IndexWriter writer = new IndexWriter(dir, conf);
+          LineFileDocs docs = new LineFileDocs(null, true);
+          for(int i=0;i<50;i++) {
+            writer.AddDocument(docs.NextDoc());
+          }
+          writer.Dispose();
+          dir.Dispose();
+	
+          // Gives you time to copy the index out!: (there is also
+          // a test option to not remove temp dir...):
+          Thread.sleep(100000);
+        }
+        */
+
+        // LUCENENET specific to load resources for this type
+        internal const string CURRENT_RESOURCE_DIRECTORY = "Lucene.Net.Tests.Index.";
+
+        internal static readonly string[] OldNames = new string[] {
+            "40.cfs", "40.nocfs", "41.cfs", "41.nocfs", "42.cfs",
+            "42.nocfs", "45.cfs", "45.nocfs", "461.cfs", "461.nocfs"
+        };
+
+        internal readonly string[] UnsupportedNames = new string[] {
+            "19.cfs", "19.nocfs", "20.cfs", "20.nocfs", "21.cfs",
+            "21.nocfs", "22.cfs", "22.nocfs", "23.cfs", "23.nocfs",
+            "24.cfs", "24.nocfs", "29.cfs", "29.nocfs"
+        };
+
+        internal static readonly string[] OldSingleSegmentNames = new string[] {
+            "40.optimized.cfs", "40.optimized.nocfs"
+        };
+
+        internal static IDictionary<string, Directory> OldIndexDirs;
+
+        /// <summary>
+        /// Randomizes the use of some of hte constructor variations
+        /// </summary>
+        private IndexUpgrader NewIndexUpgrader(Directory dir)
+        {
+            bool streamType = Random().NextBoolean();
+            int choice = TestUtil.NextInt(Random(), 0, 2);
+            switch (choice)
+            {
+                case 0:
+                    return new IndexUpgrader(dir, TEST_VERSION_CURRENT);
+                case 1:
+                    return new IndexUpgrader(dir, TEST_VERSION_CURRENT, streamType ? null : Console.Error, false);
+                case 2:
+                    return new IndexUpgrader(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, null), false);
+                default:
+                    Assert.Fail("case statement didn't get updated when random bounds changed");
+                    break;
+            }
+            return null; // never get here
+        }
+
+        [OneTimeSetUp]
+        public void BeforeClass()
+        {
+            Assert.IsFalse(OLD_FORMAT_IMPERSONATION_IS_ACTIVE, "test infra is broken!");
+            IList<string> names = new List<string>(OldNames.Length + OldSingleSegmentNames.Length);
+            names.AddRange(Arrays.AsList(OldNames));
+            names.AddRange(Arrays.AsList(OldSingleSegmentNames));
+            OldIndexDirs = new Dictionary<string, Directory>();
+            foreach (string name in names)
+            {
+                DirectoryInfo dir = CreateTempDir(name);
+                using (Stream zipFileStream = this.GetType().Assembly.GetManifestResourceStream(CURRENT_RESOURCE_DIRECTORY + "index." + name + ".zip"))
+                {
+                    TestUtil.Unzip(zipFileStream, dir);
+                }
+                OldIndexDirs[name] = NewFSDirectory(dir);
+            }
+        }
+
+        [OneTimeTearDown]
+        public void AfterClass()
+        {
+            foreach (Directory d in OldIndexDirs.Values)
+            {
+                d.Dispose();
+            }
+            OldIndexDirs = null;
+            base.TearDown();
+        }
+
+        public override void TearDown()
+        {
+            // LUCENENET: We don't want our temp directory deleted until after
+            // all of the tests in the class run. So we need to override this and
+            // call base.TearDown() manually during TestFixtureTearDown
+        }
+
+        /// <summary>
+        /// this test checks that *only* IndexFormatTooOldExceptions are thrown when you open and operate on too old indexes! </summary>
+        [Test]
+        public virtual void TestUnsupportedOldIndexes()
+        {
+            for (int i = 0; i < UnsupportedNames.Length; i++)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: index " + UnsupportedNames[i]);
+                }
+                DirectoryInfo oldIndxeDir = CreateTempDir(UnsupportedNames[i]);
+                using (Stream dataFile = this.GetType().Assembly.GetManifestResourceStream(CURRENT_RESOURCE_DIRECTORY + "unsupported." + UnsupportedNames[i] + ".zip"))
+                {
+                    TestUtil.Unzip(dataFile, oldIndxeDir);
+                }
+                BaseDirectoryWrapper dir = NewFSDirectory(oldIndxeDir);
+                // don't checkindex, these are intentionally not supported
+                dir.CheckIndexOnClose = false;
+
+                IndexReader reader = null;
+                IndexWriter writer = null;
+                try
+                {
+                    reader = DirectoryReader.Open(dir);
+                    Assert.Fail("DirectoryReader.open should not pass for " + UnsupportedNames[i]);
+                }
+#pragma warning disable 168
+                catch (IndexFormatTooOldException e)
+#pragma warning restore 168
+                {
+                    // pass
+                }
+                finally
+                {
+                    if (reader != null)
+                    {
+                        reader.Dispose();
+                    }
+                    reader = null;
+                }
+
+                try
+                {
+                    writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+                    Assert.Fail("IndexWriter creation should not pass for " + UnsupportedNames[i]);
+                }
+                catch (IndexFormatTooOldException e)
+                {
+                    // pass
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("TEST: got expected exc:");
+                        Console.WriteLine(e.StackTrace);
+                    }
+                    // Make sure exc message includes a path=
+                    Assert.IsTrue(e.Message.IndexOf("path=\"") != -1, "got exc message: " + e.Message);
+                }
+                finally
+                {
+                    // we should fail to open IW, and so it should be null when we get here.
+                    // However, if the test fails (i.e., IW did not fail on open), we need
+                    // to close IW. However, if merges are run, IW may throw
+                    // IndexFormatTooOldException, and we don't want to mask the Assert.Fail()
+                    // above, so close without waiting for merges.
+                    if (writer != null)
+                    {
+                        writer.Dispose(false);
+                    }
+                    writer = null;
+                }
+
+                StringBuilder sb = new StringBuilder(1024);
+                CheckIndex checker = new CheckIndex(dir);
+                CheckIndex.Status indexStatus;
+                using (var infoStream = new StringWriter(sb))
+                {
+                    checker.InfoStream = infoStream;
+                    indexStatus = checker.DoCheckIndex();
+                }
+                Assert.IsFalse(indexStatus.Clean);
+                Assert.IsTrue(sb.ToString().Contains(typeof(IndexFormatTooOldException).Name));
+
+                dir.Dispose();
+                TestUtil.Rm(oldIndxeDir);
+            }
+        }
+
+        [Test]
+        public virtual void TestFullyMergeOldIndex()
+        {
+            foreach (string name in OldNames)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("\nTEST: index=" + name);
+                }
+                Directory dir = NewDirectory(OldIndexDirs[name]);
+                IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+                w.ForceMerge(1);
+                w.Dispose();
+
+                dir.Dispose();
+            }
+        }
+
+        [Test]
+        public virtual void TestAddOldIndexes()
+        {
+            foreach (string name in OldNames)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("\nTEST: old index " + name);
+                }
+                Directory targetDir = NewDirectory();
+                IndexWriter w = new IndexWriter(targetDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+                w.AddIndexes(OldIndexDirs[name]);
+                if (VERBOSE)
+                {
+                    Console.WriteLine("\nTEST: done adding indices; now close");
+                }
+                w.Dispose();
+
+                targetDir.Dispose();
+            }
+        }
+
+        [Test]
+        public virtual void TestAddOldIndexesReader()
+        {
+            foreach (string name in OldNames)
+            {
+                IndexReader reader = DirectoryReader.Open(OldIndexDirs[name]);
+
+                Directory targetDir = NewDirectory();
+                IndexWriter w = new IndexWriter(targetDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+                w.AddIndexes(reader);
+                w.Dispose();
+                reader.Dispose();
+
+                targetDir.Dispose();
+            }
+        }
+
+        [Test]
+        public virtual void TestSearchOldIndex()
+        {
+            foreach (string name in OldNames)
+            {
+                SearchIndex(OldIndexDirs[name], name);
+            }
+        }
+
+        [Test]
+        public virtual void TestIndexOldIndexNoAdds()
+        {
+            foreach (string name in OldNames)
+            {
+                Directory dir = NewDirectory(OldIndexDirs[name]);
+                ChangeIndexNoAdds(Random(), dir);
+                dir.Dispose();
+            }
+        }
+
+        [Test]
+        public virtual void TestIndexOldIndex()
+        {
+            foreach (string name in OldNames)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: oldName=" + name);
+                }
+                Directory dir = NewDirectory(OldIndexDirs[name]);
+                ChangeIndexWithAdds(Random(), dir, name);
+                dir.Dispose();
+            }
+        }
+
+        private void DoTestHits(ScoreDoc[] hits, int expectedCount, IndexReader reader)
+        {
+            int hitCount = hits.Length;
+            Assert.AreEqual(expectedCount, hitCount, "wrong number of hits");
+            for (int i = 0; i < hitCount; i++)
+            {
+                reader.Document(hits[i].Doc);
+                reader.GetTermVectors(hits[i].Doc);
+            }
+        }
+
+        public virtual void SearchIndex(Directory dir, string oldName)
+        {
+            //QueryParser parser = new QueryParser("contents", new MockAnalyzer(random));
+            //Query query = parser.parse("handle:1");
+
+            IndexReader reader = DirectoryReader.Open(dir);
+            IndexSearcher searcher = NewSearcher(reader);
+
+            TestUtil.CheckIndex(dir);
+
+            // true if this is a 4.0+ index
+            bool is40Index = MultiFields.GetMergedFieldInfos(reader).FieldInfo("content5") != null;
+            // true if this is a 4.2+ index
+            bool is42Index = MultiFields.GetMergedFieldInfos(reader).FieldInfo("dvSortedSet") != null;
+
+            Debug.Assert(is40Index); // NOTE: currently we can only do this on trunk!
+
+            IBits liveDocs = MultiFields.GetLiveDocs(reader);
+
+            for (int i = 0; i < 35; i++)
+            {
+                if (liveDocs.Get(i))
+                {
+                    Document d = reader.Document(i);
+                    IList<IIndexableField> fields = d.Fields;
+                    bool isProxDoc = d.GetField("content3") == null;
+                    if (isProxDoc)
+                    {
+                        int numFields = is40Index ? 7 : 5;
+                        Assert.AreEqual(numFields, fields.Count);
+                        IIndexableField f = d.GetField("id");
+                        Assert.AreEqual("" + i, f.GetStringValue());
+
+                        f = d.GetField("utf8");
+                        Assert.AreEqual("Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", f.GetStringValue());
+
+                        f = d.GetField("autf8");
+                        Assert.AreEqual("Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", f.GetStringValue());
+
+                        f = d.GetField("content2");
+                        Assert.AreEqual("here is more content with aaa aaa aaa", f.GetStringValue());
+
+                        f = d.GetField("fie\u2C77ld");
+                        Assert.AreEqual("field with non-ascii name", f.GetStringValue());
+                    }
+
+                    Fields tfvFields = reader.GetTermVectors(i);
+                    Assert.IsNotNull(tfvFields, "i=" + i);
+                    Terms tfv = tfvFields.GetTerms("utf8");
+                    Assert.IsNotNull(tfv, "docID=" + i + " index=" + oldName);
+                }
+                else
+                {
+                    // Only ID 7 is deleted
+                    Assert.AreEqual(7, i);
+                }
+            }
+
+            if (is40Index)
+            {
+                // check docvalues fields
+                NumericDocValues dvByte = MultiDocValues.GetNumericValues(reader, "dvByte");
+                BinaryDocValues dvBytesDerefFixed = MultiDocValues.GetBinaryValues(reader, "dvBytesDerefFixed");
+                BinaryDocValues dvBytesDerefVar = MultiDocValues.GetBinaryValues(reader, "dvBytesDerefVar");
+                SortedDocValues dvBytesSortedFixed = MultiDocValues.GetSortedValues(reader, "dvBytesSortedFixed");
+                SortedDocValues dvBytesSortedVar = MultiDocValues.GetSortedValues(reader, "dvBytesSortedVar");
+                BinaryDocValues dvBytesStraightFixed = MultiDocValues.GetBinaryValues(reader, "dvBytesStraightFixed");
+                BinaryDocValues dvBytesStraightVar = MultiDocValues.GetBinaryValues(reader, "dvBytesStraightVar");
+                NumericDocValues dvDouble = MultiDocValues.GetNumericValues(reader, "dvDouble");
+                NumericDocValues dvFloat = MultiDocValues.GetNumericValues(reader, "dvFloat");
+                NumericDocValues dvInt = MultiDocValues.GetNumericValues(reader, "dvInt");
+                NumericDocValues dvLong = MultiDocValues.GetNumericValues(reader, "dvLong");
+                NumericDocValues dvPacked = MultiDocValues.GetNumericValues(reader, "dvPacked");
+                NumericDocValues dvShort = MultiDocValues.GetNumericValues(reader, "dvShort");
+                SortedSetDocValues dvSortedSet = null;
+                if (is42Index)
+                {
+                    dvSortedSet = MultiDocValues.GetSortedSetValues(reader, "dvSortedSet");
+                }
+
+                for (int i = 0; i < 35; i++)
+                {
+                    int id = Convert.ToInt32(reader.Document(i).Get("id"));
+                    Assert.AreEqual(id, dvByte.Get(i));
+
+                    sbyte[] bytes = new sbyte[] { (sbyte)((int)((uint)id >> 24)), (sbyte)((int)((uint)id >> 16)), (sbyte)((int)((uint)id >> 8)), (sbyte)id };
+                    BytesRef expectedRef = new BytesRef((byte[])(Array)bytes);
+                    BytesRef scratch = new BytesRef();
+
+                    dvBytesDerefFixed.Get(i, scratch);
+                    Assert.AreEqual(expectedRef, scratch);
+                    dvBytesDerefVar.Get(i, scratch);
+                    Assert.AreEqual(expectedRef, scratch);
+                    dvBytesSortedFixed.Get(i, scratch);
+                    Assert.AreEqual(expectedRef, scratch);
+                    dvBytesSortedVar.Get(i, scratch);
+                    Assert.AreEqual(expectedRef, scratch);
+                    dvBytesStraightFixed.Get(i, scratch);
+                    Assert.AreEqual(expectedRef, scratch);
+                    dvBytesStraightVar.Get(i, scratch);
+                    Assert.AreEqual(expectedRef, scratch);
+
+                    Assert.AreEqual((double)id, BitConverter.Int64BitsToDouble(dvDouble.Get(i)), 0D);
+                    Assert.AreEqual((float)id, Number.Int32BitsToSingle((int)dvFloat.Get(i)), 0F);
+                    Assert.AreEqual(id, dvInt.Get(i));
+                    Assert.AreEqual(id, dvLong.Get(i));
+                    Assert.AreEqual(id, dvPacked.Get(i));
+                    Assert.AreEqual(id, dvShort.Get(i));
+                    if (is42Index)
+                    {
+                        dvSortedSet.SetDocument(i);
+                        long ord = dvSortedSet.NextOrd();
+                        Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, dvSortedSet.NextOrd());
+                        dvSortedSet.LookupOrd(ord, scratch);
+                        Assert.AreEqual(expectedRef, scratch);
+                    }
+                }
+            }
+
+            ScoreDoc[] hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).ScoreDocs;
+
+            // First document should be #0
+            Document doc = searcher.IndexReader.Document(hits[0].Doc);
+            assertEquals("didn't get the right document first", "0", doc.Get("id"));
+
+            DoTestHits(hits, 34, searcher.IndexReader);
+
+            if (is40Index)
+            {
+                hits = searcher.Search(new TermQuery(new Term("content5", "aaa")), null, 1000).ScoreDocs;
+
+                DoTestHits(hits, 34, searcher.IndexReader);
+
+                hits = searcher.Search(new TermQuery(new Term("content6", "aaa")), null, 1000).ScoreDocs;
+
+                DoTestHits(hits, 34, searcher.IndexReader);
+            }
+
+            hits = searcher.Search(new TermQuery(new Term("utf8", "\u0000")), null, 1000).ScoreDocs;
+            Assert.AreEqual(34, hits.Length);
+            hits = searcher.Search(new TermQuery(new Term("utf8", "lu\uD834\uDD1Ece\uD834\uDD60ne")), null, 1000).ScoreDocs;
+            Assert.AreEqual(34, hits.Length);
+            hits = searcher.Search(new TermQuery(new Term("utf8", "ab\ud917\udc17cd")), null, 1000).ScoreDocs;
+            Assert.AreEqual(34, hits.Length);
+
+            reader.Dispose();
+        }
+
+        private int Compare(string name, string v)
+        {
+            int v0 = Convert.ToInt32(name.Substring(0, 2));
+            int v1 = Convert.ToInt32(v);
+            return v0 - v1;
+        }
+
+        public virtual void ChangeIndexWithAdds(Random random, Directory dir, string origOldName)
+        {
+            // open writer
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).SetOpenMode(OpenMode.APPEND).SetMergePolicy(NewLogMergePolicy()));
+            // add 10 docs
+            for (int i = 0; i < 10; i++)
+            {
+                AddDoc(writer, 35 + i);
+            }
+
+            // make sure writer sees right total -- writer seems not to know about deletes in .del?
+            int expected;
+            if (Compare(origOldName, "24") < 0)
+            {
+                expected = 44;
+            }
+            else
+            {
+                expected = 45;
+            }
+            Assert.AreEqual(expected, writer.NumDocs, "wrong doc count");
+            writer.Dispose();
+
+            // make sure searching sees right # hits
+            IndexReader reader = DirectoryReader.Open(dir);
+            IndexSearcher searcher = NewSearcher(reader);
+            ScoreDoc[] hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).ScoreDocs;
+            Document d = searcher.IndexReader.Document(hits[0].Doc);
+            assertEquals("wrong first document", "0", d.Get("id"));
+            DoTestHits(hits, 44, searcher.IndexReader);
+            reader.Dispose();
+
+            // fully merge
+            writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).SetOpenMode(OpenMode.APPEND).SetMergePolicy(NewLogMergePolicy()));
+            writer.ForceMerge(1);
+            writer.Dispose();
+
+            reader = DirectoryReader.Open(dir);
+            searcher = NewSearcher(reader);
+            hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).ScoreDocs;
+            Assert.AreEqual(44, hits.Length, "wrong number of hits");
+            d = searcher.Doc(hits[0].Doc);
+            DoTestHits(hits, 44, searcher.IndexReader);
+            assertEquals("wrong first document", "0", d.Get("id"));
+            reader.Dispose();
+        }
+
+        public virtual void ChangeIndexNoAdds(Random random, Directory dir)
+        {
+            // make sure searching sees right # hits
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            IndexSearcher searcher = NewSearcher(reader);
+            ScoreDoc[] hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).ScoreDocs;
+            Assert.AreEqual(34, hits.Length, "wrong number of hits");
+            Document d = searcher.Doc(hits[0].Doc);
+            assertEquals("wrong first document", "0", d.Get("id"));
+            reader.Dispose();
+
+            // fully merge
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).SetOpenMode(OpenMode.APPEND));
+            writer.ForceMerge(1);
+            writer.Dispose();
+
+            reader = DirectoryReader.Open(dir);
+            searcher = NewSearcher(reader);
+            hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).ScoreDocs;
+            Assert.AreEqual(34, hits.Length, "wrong number of hits");
+            DoTestHits(hits, 34, searcher.IndexReader);
+            reader.Dispose();
+        }
+
+        public virtual DirectoryInfo CreateIndex(string dirName, bool doCFS, bool fullyMerged)
+        {
+            // we use a real directory name that is not cleaned up, because this method is only used to create backwards indexes:
+            DirectoryInfo indexDir = new DirectoryInfo(Path.Combine("/tmp/idx/", dirName));
+            TestUtil.Rm(indexDir);
+            Directory dir = NewFSDirectory(indexDir);
+            LogByteSizeMergePolicy mp = new LogByteSizeMergePolicy();
+            mp.NoCFSRatio = doCFS ? 1.0 : 0.0;
+            mp.MaxCFSSegmentSizeMB = double.PositiveInfinity;
+            // TODO: remove randomness
+            IndexWriterConfig conf = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetUseCompoundFile(doCFS).SetMaxBufferedDocs(10).SetMergePolicy(mp);
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            for (int i = 0; i < 35; i++)
+            {
+                AddDoc(writer, i);
+            }
+            Assert.AreEqual(35, writer.MaxDoc, "wrong doc count");
+            if (fullyMerged)
+            {
+                writer.ForceMerge(1);
+            }
+            writer.Dispose();
+
+            if (!fullyMerged)
+            {
+                // open fresh writer so we get no prx file in the added segment
+                mp = new LogByteSizeMergePolicy();
+                mp.NoCFSRatio = doCFS ? 1.0 : 0.0;
+                // TODO: remove randomness
+                conf = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetUseCompoundFile(doCFS).SetMaxBufferedDocs(10).SetMergePolicy(mp);
+                writer = new IndexWriter(dir, conf);
+                AddNoProxDoc(writer);
+                writer.Dispose();
+
+                conf = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetUseCompoundFile(doCFS).SetMaxBufferedDocs(10).SetMergePolicy(doCFS ? NoMergePolicy.COMPOUND_FILES : NoMergePolicy.NO_COMPOUND_FILES);
+                writer = new IndexWriter(dir, conf);
+                Term searchTerm = new Term("id", "7");
+                writer.DeleteDocuments(searchTerm);
+                writer.Dispose();
+            }
+
+            dir.Dispose();
+
+            return indexDir;
+        }
+
+        private void AddDoc(IndexWriter writer, int id)
+        {
+            Document doc = new Document();
+            doc.Add(new TextField("content", "aaa", Field.Store.NO));
+            doc.Add(new StringField("id", Convert.ToString(id), Field.Store.YES));
+            FieldType customType2 = new FieldType(TextField.TYPE_STORED);
+            customType2.StoreTermVectors = true;
+            customType2.StoreTermVectorPositions = true;
+            customType2.StoreTermVectorOffsets = true;
+            doc.Add(new Field("autf8", "Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", customType2));
+            doc.Add(new Field("utf8", "Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", customType2));
+            doc.Add(new Field("content2", "here is more content with aaa aaa aaa", customType2));
+            doc.Add(new Field("fie\u2C77ld", "field with non-ascii name", customType2));
+            // add numeric fields, to test if flex preserves encoding
+            doc.Add(new Int32Field("trieInt", id, Field.Store.NO));
+            doc.Add(new Int64Field("trieLong", (long)id, Field.Store.NO));
+            // add docvalues fields
+            doc.Add(new NumericDocValuesField("dvByte", (sbyte)id));
+            sbyte[] bytes = new sbyte[] { (sbyte)((int)((uint)id >> 24)), (sbyte)((int)((uint)id >> 16)), (sbyte)((int)((uint)id >> 8)), (sbyte)id };
+            BytesRef @ref = new BytesRef((byte[])(Array)bytes);
+            doc.Add(new BinaryDocValuesField("dvBytesDerefFixed", @ref));
+            doc.Add(new BinaryDocValuesField("dvBytesDerefVar", @ref));
+            doc.Add(new SortedDocValuesField("dvBytesSortedFixed", @ref));
+            doc.Add(new SortedDocValuesField("dvBytesSortedVar", @ref));
+            doc.Add(new BinaryDocValuesField("dvBytesStraightFixed", @ref));
+            doc.Add(new BinaryDocValuesField("dvBytesStraightVar", @ref));
+            doc.Add(new DoubleDocValuesField("dvDouble", (double)id));
+            doc.Add(new SingleDocValuesField("dvFloat", (float)id));
+            doc.Add(new NumericDocValuesField("dvInt", id));
+            doc.Add(new NumericDocValuesField("dvLong", id));
+            doc.Add(new NumericDocValuesField("dvPacked", id));
+            doc.Add(new NumericDocValuesField("dvShort", (short)id));
+            doc.Add(new SortedSetDocValuesField("dvSortedSet", @ref));
+            // a field with both offsets and term vectors for a cross-check
+            FieldType customType3 = new FieldType(TextField.TYPE_STORED);
+            customType3.StoreTermVectors = true;
+            customType3.StoreTermVectorPositions = true;
+            customType3.StoreTermVectorOffsets = true;
+            customType3.IndexOptions = IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS;
+            doc.Add(new Field("content5", "here is more content with aaa aaa aaa", customType3));
+            // a field that omits only positions
+            FieldType customType4 = new FieldType(TextField.TYPE_STORED);
+            customType4.StoreTermVectors = true;
+            customType4.StoreTermVectorPositions = false;
+            customType4.StoreTermVectorOffsets = true;
+            customType4.IndexOptions = IndexOptions.DOCS_AND_FREQS;
+            doc.Add(new Field("content6", "here is more content with aaa aaa aaa", customType4));
+            // TODO: 
+            //   index different norms types via similarity (we use a random one currently?!)
+            //   remove any analyzer randomness, explicitly add payloads for certain fields.
+            writer.AddDocument(doc);
+        }
+
+        private void AddNoProxDoc(IndexWriter writer)
+        {
+            Document doc = new Document();
+            FieldType customType = new FieldType(TextField.TYPE_STORED);
+            customType.IndexOptions = IndexOptions.DOCS_ONLY;
+            Field f = new Field("content3", "aaa", customType);
+            doc.Add(f);
+            FieldType customType2 = new FieldType();
+            customType2.IsStored = true;
+            customType2.IndexOptions = IndexOptions.DOCS_ONLY;
+            f = new Field("content4", "aaa", customType2);
+            doc.Add(f);
+            writer.AddDocument(doc);
+        }
+
+        private int CountDocs(DocsEnum docs)
+        {
+            int count = 0;
+            while ((docs.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS)
+            {
+                count++;
+            }
+            return count;
+        }
+
+        // flex: test basics of TermsEnum api on non-flex index
+        [Test]
+        public virtual void TestNextIntoWrongField()
+        {
+            foreach (string name in OldNames)
+            {
+                Directory dir = OldIndexDirs[name];
+                IndexReader r = DirectoryReader.Open(dir);
+                TermsEnum terms = MultiFields.GetFields(r).GetTerms("content").GetIterator(null);
+                BytesRef t = terms.Next();
+                Assert.IsNotNull(t);
+
+                // content field only has term aaa:
+                Assert.AreEqual("aaa", t.Utf8ToString());
+                Assert.IsNull(terms.Next());
+
+                BytesRef aaaTerm = new BytesRef("aaa");
+
+                // should be found exactly
+                Assert.AreEqual(TermsEnum.SeekStatus.FOUND, terms.SeekCeil(aaaTerm));
+                Assert.AreEqual(35, CountDocs(TestUtil.Docs(Random(), terms, null, null, DocsEnum.FLAG_NONE)));
+                Assert.IsNull(terms.Next());
+
+                // should hit end of field
+                Assert.AreEqual(TermsEnum.SeekStatus.END, terms.SeekCeil(new BytesRef("bbb")));
+                Assert.IsNull(terms.Next());
+
+                // should seek to aaa
+                Assert.AreEqual(TermsEnum.SeekStatus.NOT_FOUND, terms.SeekCeil(new BytesRef("a")));
+                Assert.IsTrue(terms.Term.BytesEquals(aaaTerm));
+                Assert.AreEqual(35, CountDocs(TestUtil.Docs(Random(), terms, null, null, DocsEnum.FLAG_NONE)));
+                Assert.IsNull(terms.Next());
+
+                Assert.AreEqual(TermsEnum.SeekStatus.FOUND, terms.SeekCeil(aaaTerm));
+                Assert.AreEqual(35, CountDocs(TestUtil.Docs(Random(), terms, null, null, DocsEnum.FLAG_NONE)));
+                Assert.IsNull(terms.Next());
+
+                r.Dispose();
+            }
+        }
+
+        /// <summary>
+        /// Test that we didn't forget to bump the current Constants.LUCENE_MAIN_VERSION.
+        /// this is important so that we can determine which version of lucene wrote the segment.
+        /// </summary>
+        [Test]
+        public virtual void TestOldVersions()
+        {
+            // first create a little index with the current code and get the version
+            Directory currentDir = NewDirectory();
+            RandomIndexWriter riw = new RandomIndexWriter(Random(), currentDir, Similarity, TimeZone);
+            riw.AddDocument(new Document());
+            riw.Dispose();
+            DirectoryReader ir = DirectoryReader.Open(currentDir);
+            SegmentReader air = (SegmentReader)ir.Leaves[0].Reader;
+            string currentVersion = air.SegmentInfo.Info.Version;
+            Assert.IsNotNull(currentVersion); // only 3.0 segments can have a null version
+            ir.Dispose();
+            currentDir.Dispose();
+
+            IComparer<string> comparer = StringHelper.VersionComparer;
+
+            // now check all the old indexes, their version should be < the current version
+            foreach (string name in OldNames)
+            {
+                Directory dir = OldIndexDirs[name];
+                DirectoryReader r = DirectoryReader.Open(dir);
+                foreach (AtomicReaderContext context in r.Leaves)
+                {
+                    air = (SegmentReader)context.Reader;
+                    string oldVersion = air.SegmentInfo.Info.Version;
+                    Assert.IsNotNull(oldVersion); // only 3.0 segments can have a null version
+                    Assert.IsTrue(comparer.Compare(oldVersion, currentVersion) < 0, "current Constants.LUCENE_MAIN_VERSION is <= an old index: did you forget to bump it?!");
+                }
+                r.Dispose();
+            }
+        }
+
+        [Test]
+        public virtual void TestNumericFields()
+        {
+            foreach (string name in OldNames)
+            {
+
+                Directory dir = OldIndexDirs[name];
+                IndexReader reader = DirectoryReader.Open(dir);
+                IndexSearcher searcher = NewSearcher(reader);
+
+                for (int id = 10; id < 15; id++)
+                {
+                    ScoreDoc[] hits = searcher.Search(NumericRangeQuery.NewInt32Range("trieInt", 4, Convert.ToInt32(id), Convert.ToInt32(id), true, true), 100).ScoreDocs;
+                    Assert.AreEqual(1, hits.Length, "wrong number of hits");
+                    Document d = searcher.Doc(hits[0].Doc);
+                    Assert.AreEqual(Convert.ToString(id), d.Get("id"));
+
+                    hits = searcher.Search(NumericRangeQuery.NewInt64Range("trieLong", 4, Convert.ToInt64(id), Convert.ToInt64(id), true, true), 100).ScoreDocs;
+                    Assert.AreEqual(1, hits.Length, "wrong number of hits");
+                    d = searcher.Doc(hits[0].Doc);
+                    Assert.AreEqual(Convert.ToString(id), d.Get("id"));
+                }
+
+                // check that also lower-precision fields are ok
+                ScoreDoc[] hits_ = searcher.Search(NumericRangeQuery.NewInt32Range("trieInt", 4, int.MinValue, int.MaxValue, false, false), 100).ScoreDocs;
+                Assert.AreEqual(34, hits_.Length, "wrong number of hits");
+
+                hits_ = searcher.Search(NumericRangeQuery.NewInt64Range("trieLong", 4, long.MinValue, long.MaxValue, false, false), 100).ScoreDocs;
+                Assert.AreEqual(34, hits_.Length, "wrong number of hits");
+
+                // check decoding into field cache
+                FieldCache.Int32s fci = FieldCache.DEFAULT.GetInt32s(SlowCompositeReaderWrapper.Wrap(searcher.IndexReader), "trieInt", false);
+                int maxDoc = searcher.IndexReader.MaxDoc;
+                for (int doc = 0; doc < maxDoc; doc++)
+                {
+                    int val = fci.Get(doc);
+                    Assert.IsTrue(val >= 0 && val < 35, "value in id bounds");
+                }
+
+                FieldCache.Int64s fcl = FieldCache.DEFAULT.GetInt64s(SlowCompositeReaderWrapper.Wrap(searcher.IndexReader), "trieLong", false);
+                for (int doc = 0; doc < maxDoc; doc++)
+                {
+                    long val = fcl.Get(doc);
+                    Assert.IsTrue(val >= 0L && val < 35L, "value in id bounds");
+                }
+
+                reader.Dispose();
+            }
+        }
+
+        private int CheckAllSegmentsUpgraded(Directory dir)
+        {
+            SegmentInfos infos = new SegmentInfos();
+            infos.Read(dir);
+            if (VERBOSE)
+            {
+                Console.WriteLine("checkAllSegmentsUpgraded: " + infos);
+            }
+            foreach (SegmentCommitInfo si in infos.Segments)
+            {
+                Assert.AreEqual(Constants.LUCENE_MAIN_VERSION, si.Info.Version);
+            }
+            return infos.Count;
+        }
+
+        private int GetNumberOfSegments(Directory dir)
+        {
+            SegmentInfos infos = new SegmentInfos();
+            infos.Read(dir);
+            return infos.Count;
+        }
+
+        [Test]
+        public virtual void TestUpgradeOldIndex()
+        {
+            IList<string> names = new List<string>(OldNames.Length + OldSingleSegmentNames.Length);
+            names.AddRange(Arrays.AsList(OldNames));
+            names.AddRange(Arrays.AsList(OldSingleSegmentNames));
+            foreach (string name in names)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("testUpgradeOldIndex: index=" + name);
+                }
+                Directory dir = NewDirectory(OldIndexDirs[name]);
+
+                NewIndexUpgrader(dir).Upgrade();
+
+                CheckAllSegmentsUpgraded(dir);
+
+                dir.Dispose();
+            }
+        }
+
+        [Test]
+        public virtual void TestCommandLineArgs()
+        {
+
+            foreach (string name in OldIndexDirs.Keys)
+            {
+                DirectoryInfo dir = CreateTempDir(name);
+                using (Stream dataFile = this.GetType().Assembly.GetManifestResourceStream(CURRENT_RESOURCE_DIRECTORY + "index." + name + ".zip"))
+                {
+                    TestUtil.Unzip(dataFile, dir);
+                }
+
+                string path = dir.FullName;
+
+                IList<string> args = new List<string>();
+                if (Random().NextBoolean())
+                {
+                    args.Add("-verbose");
+                }
+                if (Random().NextBoolean())
+                {
+                    args.Add("-delete-prior-commits");
+                }
+                if (Random().NextBoolean())
+                {
+                    // TODO: need to better randomize this, but ...
+                    //  - LuceneTestCase.FS_DIRECTORIES is private
+                    //  - newFSDirectory returns BaseDirectoryWrapper
+                    //  - BaseDirectoryWrapper doesn't expose delegate
+                    Type dirImpl = Random().NextBoolean() ? typeof(SimpleFSDirectory) : typeof(NIOFSDirectory);
+
+                    args.Add("-dir-impl");
+                    args.Add(dirImpl.Name);
+                }
+                args.Add(path);
+
+                IndexUpgrader upgrader = null;
+                try
+                {
+                    upgrader = IndexUpgrader.ParseArgs(args.ToArray());
+                }
+                catch (Exception e)
+                {
+                    throw new Exception("unable to parse args: " + args, e);
+                }
+                upgrader.Upgrade();
+
+                Directory upgradedDir = NewFSDirectory(dir);
+                try
+                {
+                    CheckAllSegmentsUpgraded(upgradedDir);
+                }
+                finally
+                {
+                    upgradedDir.Dispose();
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestUpgradeOldSingleSegmentIndexWithAdditions()
+        {
+            foreach (string name in OldSingleSegmentNames)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("testUpgradeOldSingleSegmentIndexWithAdditions: index=" + name);
+                }
+                Directory dir = NewDirectory(OldIndexDirs[name]);
+
+                Assert.AreEqual(1, GetNumberOfSegments(dir), "Original index must be single segment");
+
+                // create a bunch of dummy segments
+                int id = 40;
+                RAMDirectory ramDir = new RAMDirectory();
+                for (int i = 0; i < 3; i++)
+                {
+                    // only use Log- or TieredMergePolicy, to make document addition predictable and not suddenly merge:
+                    MergePolicy mp = Random().NextBoolean() ? (MergePolicy)NewLogMergePolicy() : NewTieredMergePolicy();
+                    IndexWriterConfig iwc = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetMergePolicy(mp);
+                    IndexWriter w = new IndexWriter(ramDir, iwc);
+                    // add few more docs:
+                    for (int j = 0; j < RANDOM_MULTIPLIER * Random().Next(30); j++)
+                    {
+                        AddDoc(w, id++);
+                    }
+                    w.Dispose(false);
+                }
+
+                // add dummy segments (which are all in current
+                // version) to single segment index
+                MergePolicy mp_ = Random().NextBoolean() ? (MergePolicy)NewLogMergePolicy() : NewTieredMergePolicy();
+                IndexWriterConfig iwc_ = (new IndexWriterConfig(TEST_VERSION_CURRENT, null)).SetMergePolicy(mp_);
+                IndexWriter iw = new IndexWriter(dir, iwc_);
+                iw.AddIndexes(ramDir);
+                iw.Dispose(false);
+
+                // determine count of segments in modified index
+                int origSegCount = GetNumberOfSegments(dir);
+
+                NewIndexUpgrader(dir).Upgrade();
+
+                int segCount = CheckAllSegmentsUpgraded(dir);
+                Assert.AreEqual(origSegCount, segCount, "Index must still contain the same number of segments, as only one segment was upgraded and nothing else merged");
+
+                dir.Dispose();
+            }
+        }
+
+        public const string MoreTermsIndex = "moreterms.40.zip";
+
+        [Test]
+        public virtual void TestMoreTerms()
+        {
+            DirectoryInfo oldIndexDir = CreateTempDir("moreterms");
+            using (Stream dataFile = this.GetType().Assembly.GetManifestResourceStream(CURRENT_RESOURCE_DIRECTORY + MoreTermsIndex))
+            {
+                TestUtil.Unzip(dataFile, oldIndexDir);
+            }
+            Directory dir = NewFSDirectory(oldIndexDir);
+            // TODO: more tests
+            TestUtil.CheckIndex(dir);
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file


[41/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestBinaryDocValuesUpdates.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestBinaryDocValuesUpdates.cs b/src/Lucene.Net.Tests/Index/TestBinaryDocValuesUpdates.cs
new file mode 100644
index 0000000..72ea385
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestBinaryDocValuesUpdates.cs
@@ -0,0 +1,1786 @@
+using System;
+using System.Collections.Generic;
+using System.Threading;
+using Lucene.Net.Attributes;
+using Lucene.Net.Codecs;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Randomized.Generators;
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using System.IO;
+    using System.Threading;
+    using AssertingDocValuesFormat = Lucene.Net.Codecs.asserting.AssertingDocValuesFormat;
+    using BinaryDocValuesField = BinaryDocValuesField;
+    using IBits = Lucene.Net.Util.IBits;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Codec = Lucene.Net.Codecs.Codec;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using DocValuesFormat = Lucene.Net.Codecs.DocValuesFormat;
+    using IOUtils = Lucene.Net.Util.IOUtils;
+    using Lucene40RWCodec = Lucene.Net.Codecs.Lucene40.Lucene40RWCodec;
+    using Lucene41RWCodec = Lucene.Net.Codecs.Lucene41.Lucene41RWCodec;
+    using Lucene42RWCodec = Lucene.Net.Codecs.Lucene42.Lucene42RWCodec;
+    using Lucene45DocValuesFormat = Lucene.Net.Codecs.Lucene45.Lucene45DocValuesFormat;
+    using Lucene45RWCodec = Lucene.Net.Codecs.Lucene45.Lucene45RWCodec;
+    using Lucene46Codec = Lucene.Net.Codecs.Lucene46.Lucene46Codec;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockTokenizer = Lucene.Net.Analysis.MockTokenizer;
+    using NumericDocValuesField = NumericDocValuesField;
+    using SortedDocValuesField = SortedDocValuesField;
+    using SortedSetDocValuesField = SortedSetDocValuesField;
+    using Store = Field.Store;
+    using StringField = StringField;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    [SuppressCodecs("Appending", "Lucene3x", "Lucene40", "Lucene41", "Lucene42", "Lucene45")]
+    [TestFixture]
+    public class TestBinaryDocValuesUpdates : LuceneTestCase
+    {
+        internal static long GetValue(BinaryDocValues bdv, int idx, BytesRef scratch)
+        {
+            bdv.Get(idx, scratch);
+            idx = scratch.Offset;
+            var b = scratch.Bytes[idx++];
+            long value = b & 0x7FL;
+            for (int shift = 7; (b & 0x80L) != 0; shift += 7)
+            {
+                b = scratch.Bytes[idx++];
+                value |= (b & 0x7FL) << shift;
+            }
+            return value;
+        }
+
+        // encodes a long into a BytesRef as VLong so that we get varying number of bytes when we update
+        internal static BytesRef ToBytes(long value)
+        {
+            //    long orig = value;
+            BytesRef bytes = new BytesRef(10); // negative longs may take 10 bytes
+            while ((value & ~0x7FL) != 0L)
+            {
+                bytes.Bytes[bytes.Length++] = unchecked((byte)((value & 0x7FL) | 0x80L));
+                value = (long)((ulong)value >> 7);
+            }
+            bytes.Bytes[bytes.Length++] = (byte)value;
+            //    System.err.println("[" + Thread.currentThread().getName() + "] value=" + orig + ", bytes=" + bytes);
+            return bytes;
+        }
+
+        private Document Doc(int id)
+        {
+            Document doc = new Document();
+            doc.Add(new StringField("id", "doc-" + id, Store.NO));
+            doc.Add(new BinaryDocValuesField("val", ToBytes(id + 1)));
+            return doc;
+        }
+
+        [Test]
+        public virtual void TestUpdatesAreFlushed()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false)).SetRAMBufferSizeMB(0.00000001));
+            writer.AddDocument(Doc(0)); // val=1
+            writer.AddDocument(Doc(1)); // val=2
+            writer.AddDocument(Doc(3)); // val=2
+            writer.Commit();
+            Assert.AreEqual(1, writer.FlushDeletesCount);
+            writer.UpdateBinaryDocValue(new Term("id", "doc-0"), "val", ToBytes(5));
+            Assert.AreEqual(2, writer.FlushDeletesCount);
+            writer.UpdateBinaryDocValue(new Term("id", "doc-1"), "val", ToBytes(6));
+            Assert.AreEqual(3, writer.FlushDeletesCount);
+            writer.UpdateBinaryDocValue(new Term("id", "doc-2"), "val", ToBytes(7));
+            Assert.AreEqual(4, writer.FlushDeletesCount);
+            writer.Config.SetRAMBufferSizeMB(1000d);
+            writer.UpdateBinaryDocValue(new Term("id", "doc-2"), "val", ToBytes(7));
+            Assert.AreEqual(4, writer.FlushDeletesCount);
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestSimple()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            // make sure random config doesn't flush on us
+            conf.SetMaxBufferedDocs(10);
+            conf.SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH);
+            IndexWriter writer = new IndexWriter(dir, conf);
+            writer.AddDocument(Doc(0)); // val=1
+            writer.AddDocument(Doc(1)); // val=2
+            if (Random().NextBoolean()) // randomly commit before the update is sent
+            {
+                writer.Commit();
+            }
+            writer.UpdateBinaryDocValue(new Term("id", "doc-0"), "val", ToBytes(2)); // doc=0, exp=2
+
+            DirectoryReader reader;
+            if (Random().NextBoolean()) // not NRT
+            {
+                writer.Dispose();
+                reader = DirectoryReader.Open(dir);
+            } // NRT
+            else
+            {
+                reader = DirectoryReader.Open(writer, true);
+                writer.Dispose();
+            }
+
+            Assert.AreEqual(1, reader.Leaves.Count);
+            AtomicReader r = (AtomicReader)reader.Leaves[0].Reader;
+            BinaryDocValues bdv = r.GetBinaryDocValues("val");
+            BytesRef scratch = new BytesRef();
+            Assert.AreEqual(2, GetValue(bdv, 0, scratch));
+            Assert.AreEqual(2, GetValue(bdv, 1, scratch));
+            reader.Dispose();
+
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestUpdateFewSegments()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            conf.SetMaxBufferedDocs(2); // generate few segments
+            conf.SetMergePolicy(NoMergePolicy.COMPOUND_FILES); // prevent merges for this test
+            IndexWriter writer = new IndexWriter(dir, conf);
+            int numDocs = 10;
+            long[] expectedValues = new long[numDocs];
+            for (int i = 0; i < numDocs; i++)
+            {
+                writer.AddDocument(Doc(i));
+                expectedValues[i] = i + 1;
+            }
+            writer.Commit();
+
+            // update few docs
+            for (int i = 0; i < numDocs; i++)
+            {
+                if (Random().NextDouble() < 0.4)
+                {
+                    long value = (i + 1) * 2;
+                    writer.UpdateBinaryDocValue(new Term("id", "doc-" + i), "val", ToBytes(value));
+                    expectedValues[i] = value;
+                }
+            }
+
+            DirectoryReader reader;
+            if (Random().NextBoolean()) // not NRT
+            {
+                writer.Dispose();
+                reader = DirectoryReader.Open(dir);
+            } // NRT
+            else
+            {
+                reader = DirectoryReader.Open(writer, true);
+                writer.Dispose();
+            }
+
+            BytesRef scratch = new BytesRef();
+            foreach (AtomicReaderContext context in reader.Leaves)
+            {
+                AtomicReader r = context.AtomicReader;
+                BinaryDocValues bdv = r.GetBinaryDocValues("val");
+                Assert.IsNotNull(bdv);
+                for (int i = 0; i < r.MaxDoc; i++)
+                {
+                    long expected = expectedValues[i + context.DocBase];
+                    long actual = GetValue(bdv, i, scratch);
+                    Assert.AreEqual(expected, actual);
+                }
+            }
+
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestReopen()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, conf);
+            writer.AddDocument(Doc(0));
+            writer.AddDocument(Doc(1));
+
+            bool isNRT = Random().NextBoolean();
+            DirectoryReader reader1;
+            if (isNRT)
+            {
+                reader1 = DirectoryReader.Open(writer, true);
+            }
+            else
+            {
+                writer.Commit();
+                reader1 = DirectoryReader.Open(dir);
+            }
+
+            // update doc
+            writer.UpdateBinaryDocValue(new Term("id", "doc-0"), "val", ToBytes(10)); // update doc-0's value to 10
+            if (!isNRT)
+            {
+                writer.Commit();
+            }
+
+            // reopen reader and assert only it sees the update
+            DirectoryReader reader2 = DirectoryReader.OpenIfChanged(reader1);
+            Assert.IsNotNull(reader2);
+            Assert.IsTrue(reader1 != reader2);
+
+            BytesRef scratch = new BytesRef();
+            BinaryDocValues bdv1 = ((AtomicReader)reader1.Leaves[0].Reader).GetBinaryDocValues("val");
+            BinaryDocValues bdv2 = ((AtomicReader)reader2.Leaves[0].Reader).GetBinaryDocValues("val");
+            Assert.AreEqual(1, GetValue(bdv1, 0, scratch));
+            Assert.AreEqual(10, GetValue(bdv2, 0, scratch));
+
+            IOUtils.Close(writer, reader1, reader2, dir);
+        }
+
+        [Test]
+        public virtual void TestUpdatesAndDeletes()
+        {
+            // create an index with a segment with only deletes, a segment with both
+            // deletes and updates and a segment with only updates
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            conf.SetMaxBufferedDocs(10); // control segment flushing
+            conf.SetMergePolicy(NoMergePolicy.COMPOUND_FILES); // prevent merges for this test
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            for (int i = 0; i < 6; i++)
+            {
+                writer.AddDocument(Doc(i));
+                if (i % 2 == 1)
+                {
+                    writer.Commit(); // create 2-docs segments
+                }
+            }
+
+            // delete doc-1 and doc-2
+            writer.DeleteDocuments(new Term("id", "doc-1"), new Term("id", "doc-2")); // 1st and 2nd segments
+
+            // update docs 3 and 5
+            writer.UpdateBinaryDocValue(new Term("id", "doc-3"), "val", ToBytes(17L));
+            writer.UpdateBinaryDocValue(new Term("id", "doc-5"), "val", ToBytes(17L));
+
+            DirectoryReader reader;
+            if (Random().NextBoolean()) // not NRT
+            {
+                writer.Dispose();
+                reader = DirectoryReader.Open(dir);
+            } // NRT
+            else
+            {
+                reader = DirectoryReader.Open(writer, true);
+                writer.Dispose();
+            }
+
+            AtomicReader slow = SlowCompositeReaderWrapper.Wrap(reader);
+
+            IBits liveDocs = slow.LiveDocs;
+            bool[] expectedLiveDocs = new bool[] { true, false, false, true, true, true };
+            for (int i = 0; i < expectedLiveDocs.Length; i++)
+            {
+                Assert.AreEqual(expectedLiveDocs[i], liveDocs.Get(i));
+            }
+
+            long[] expectedValues = new long[] { 1, 2, 3, 17, 5, 17 };
+            BinaryDocValues bdv = slow.GetBinaryDocValues("val");
+            BytesRef scratch = new BytesRef();
+            for (int i = 0; i < expectedValues.Length; i++)
+            {
+                Assert.AreEqual(expectedValues[i], GetValue(bdv, i, scratch));
+            }
+
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestUpdatesWithDeletes()
+        {
+            // update and delete different documents in the same commit session
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            conf.SetMaxBufferedDocs(10); // control segment flushing
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            writer.AddDocument(Doc(0));
+            writer.AddDocument(Doc(1));
+
+            if (Random().NextBoolean())
+            {
+                writer.Commit();
+            }
+
+            writer.DeleteDocuments(new Term("id", "doc-0"));
+            writer.UpdateBinaryDocValue(new Term("id", "doc-1"), "val", ToBytes(17L));
+
+            DirectoryReader reader;
+            if (Random().NextBoolean()) // not NRT
+            {
+                writer.Dispose();
+                reader = DirectoryReader.Open(dir);
+            } // NRT
+            else
+            {
+                reader = DirectoryReader.Open(writer, true);
+                writer.Dispose();
+            }
+
+            AtomicReader r = (AtomicReader)reader.Leaves[0].Reader;
+            Assert.IsFalse(r.LiveDocs.Get(0));
+            Assert.AreEqual(17, GetValue(r.GetBinaryDocValues("val"), 1, new BytesRef()));
+
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestUpdateAndDeleteSameDocument()
+        {
+            // update and delete same document in same commit session
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            conf.SetMaxBufferedDocs(10); // control segment flushing
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            writer.AddDocument(Doc(0));
+            writer.AddDocument(Doc(1));
+
+            if (Random().NextBoolean())
+            {
+                writer.Commit();
+            }
+
+            writer.DeleteDocuments(new Term("id", "doc-0"));
+            writer.UpdateBinaryDocValue(new Term("id", "doc-0"), "val", ToBytes(17L));
+
+            DirectoryReader reader;
+            if (Random().NextBoolean()) // not NRT
+            {
+                writer.Dispose();
+                reader = DirectoryReader.Open(dir);
+            } // NRT
+            else
+            {
+                reader = DirectoryReader.Open(writer, true);
+                writer.Dispose();
+            }
+
+            AtomicReader r = (AtomicReader)reader.Leaves[0].Reader;
+            Assert.IsFalse(r.LiveDocs.Get(0));
+            Assert.AreEqual(1, GetValue(r.GetBinaryDocValues("val"), 0, new BytesRef())); // deletes are currently applied first
+
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestMultipleDocValuesTypes()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            conf.SetMaxBufferedDocs(10); // prevent merges
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            for (int i = 0; i < 4; i++)
+            {
+                Document doc = new Document();
+                doc.Add(new StringField("dvUpdateKey", "dv", Store.NO));
+                doc.Add(new NumericDocValuesField("ndv", i));
+                doc.Add(new BinaryDocValuesField("bdv", new BytesRef(Convert.ToString(i))));
+                doc.Add(new SortedDocValuesField("sdv", new BytesRef(Convert.ToString(i))));
+                doc.Add(new SortedSetDocValuesField("ssdv", new BytesRef(Convert.ToString(i))));
+                doc.Add(new SortedSetDocValuesField("ssdv", new BytesRef(Convert.ToString(i * 2))));
+                writer.AddDocument(doc);
+            }
+            writer.Commit();
+
+            // update all docs' bdv field
+            writer.UpdateBinaryDocValue(new Term("dvUpdateKey", "dv"), "bdv", ToBytes(17L));
+            writer.Dispose();
+
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            AtomicReader r = (AtomicReader)reader.Leaves[0].Reader;
+            NumericDocValues ndv = r.GetNumericDocValues("ndv");
+            BinaryDocValues bdv = r.GetBinaryDocValues("bdv");
+            SortedDocValues sdv = r.GetSortedDocValues("sdv");
+            SortedSetDocValues ssdv = r.GetSortedSetDocValues("ssdv");
+            BytesRef scratch = new BytesRef();
+            for (int i = 0; i < r.MaxDoc; i++)
+            {
+                Assert.AreEqual(i, ndv.Get(i));
+                Assert.AreEqual(17, GetValue(bdv, i, scratch));
+                sdv.Get(i, scratch);
+                Assert.AreEqual(new BytesRef(Convert.ToString(i)), scratch);
+                ssdv.SetDocument(i);
+                long ord = ssdv.NextOrd();
+                ssdv.LookupOrd(ord, scratch);
+                Assert.AreEqual(i, Convert.ToInt32(scratch.Utf8ToString()));
+                if (i != 0)
+                {
+                    ord = ssdv.NextOrd();
+                    ssdv.LookupOrd(ord, scratch);
+                    Assert.AreEqual(i * 2, Convert.ToInt32(scratch.Utf8ToString()));
+                }
+                Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, ssdv.NextOrd());
+            }
+
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestMultipleBinaryDocValues()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            conf.SetMaxBufferedDocs(10); // prevent merges
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            for (int i = 0; i < 2; i++)
+            {
+                Document doc = new Document();
+                doc.Add(new StringField("dvUpdateKey", "dv", Store.NO));
+                doc.Add(new BinaryDocValuesField("bdv1", ToBytes(i)));
+                doc.Add(new BinaryDocValuesField("bdv2", ToBytes(i)));
+                writer.AddDocument(doc);
+            }
+            writer.Commit();
+
+            // update all docs' bdv1 field
+            writer.UpdateBinaryDocValue(new Term("dvUpdateKey", "dv"), "bdv1", ToBytes(17L));
+            writer.Dispose();
+
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            AtomicReader r = (AtomicReader)reader.Leaves[0].Reader;
+
+            BinaryDocValues bdv1 = r.GetBinaryDocValues("bdv1");
+            BinaryDocValues bdv2 = r.GetBinaryDocValues("bdv2");
+            BytesRef scratch = new BytesRef();
+            for (int i = 0; i < r.MaxDoc; i++)
+            {
+                Assert.AreEqual(17, GetValue(bdv1, i, scratch));
+                Assert.AreEqual(i, GetValue(bdv2, i, scratch));
+            }
+
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestDocumentWithNoValue()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            for (int i = 0; i < 2; i++)
+            {
+                Document doc = new Document();
+                doc.Add(new StringField("dvUpdateKey", "dv", Store.NO));
+                if (i == 0) // index only one document with value
+                {
+                    doc.Add(new BinaryDocValuesField("bdv", ToBytes(5L)));
+                }
+                writer.AddDocument(doc);
+            }
+            writer.Commit();
+
+            // update all docs' bdv field
+            writer.UpdateBinaryDocValue(new Term("dvUpdateKey", "dv"), "bdv", ToBytes(17L));
+            writer.Dispose();
+
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            AtomicReader r = (AtomicReader)reader.Leaves[0].Reader;
+            BinaryDocValues bdv = r.GetBinaryDocValues("bdv");
+            BytesRef scratch = new BytesRef();
+            for (int i = 0; i < r.MaxDoc; i++)
+            {
+                Assert.AreEqual(17, GetValue(bdv, i, scratch));
+            }
+
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestUnsetValue()
+        {
+            AssumeTrue("codec does not support docsWithField", DefaultCodecSupportsDocsWithField());
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            for (int i = 0; i < 2; i++)
+            {
+                Document doc = new Document();
+                doc.Add(new StringField("id", "doc" + i, Store.NO));
+                doc.Add(new BinaryDocValuesField("bdv", ToBytes(5L)));
+                writer.AddDocument(doc);
+            }
+            writer.Commit();
+
+            // unset the value of 'doc0'
+            writer.UpdateBinaryDocValue(new Term("id", "doc0"), "bdv", null);
+            writer.Dispose();
+
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            AtomicReader r = (AtomicReader)reader.Leaves[0].Reader;
+            BinaryDocValues bdv = r.GetBinaryDocValues("bdv");
+            BytesRef scratch = new BytesRef();
+            for (int i = 0; i < r.MaxDoc; i++)
+            {
+                if (i == 0)
+                {
+                    bdv.Get(i, scratch);
+                    Assert.AreEqual(0, scratch.Length);
+                }
+                else
+                {
+                    Assert.AreEqual(5, GetValue(bdv, i, scratch));
+                }
+            }
+
+            IBits docsWithField = r.GetDocsWithField("bdv");
+            Assert.IsFalse(docsWithField.Get(0));
+            Assert.IsTrue(docsWithField.Get(1));
+
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestUnsetAllValues()
+        {
+            AssumeTrue("codec does not support docsWithField", DefaultCodecSupportsDocsWithField());
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            for (int i = 0; i < 2; i++)
+            {
+                Document doc = new Document();
+                doc.Add(new StringField("id", "doc", Store.NO));
+                doc.Add(new BinaryDocValuesField("bdv", ToBytes(5L)));
+                writer.AddDocument(doc);
+            }
+            writer.Commit();
+
+            // unset the value of 'doc'
+            writer.UpdateBinaryDocValue(new Term("id", "doc"), "bdv", null);
+            writer.Dispose();
+
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            AtomicReader r = (AtomicReader)reader.Leaves[0].Reader;
+            BinaryDocValues bdv = r.GetBinaryDocValues("bdv");
+            BytesRef scratch = new BytesRef();
+            for (int i = 0; i < r.MaxDoc; i++)
+            {
+                bdv.Get(i, scratch);
+                Assert.AreEqual(0, scratch.Length);
+            }
+
+            IBits docsWithField = r.GetDocsWithField("bdv");
+            Assert.IsFalse(docsWithField.Get(0));
+            Assert.IsFalse(docsWithField.Get(1));
+
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestUpdateNonBinaryDocValuesField()
+        {
+            // we don't support adding new fields or updating existing non-binary-dv
+            // fields through binary updates
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            Document doc = new Document();
+            doc.Add(new StringField("key", "doc", Store.NO));
+            doc.Add(new StringField("foo", "bar", Store.NO));
+            writer.AddDocument(doc); // flushed document
+            writer.Commit();
+            writer.AddDocument(doc); // in-memory document
+
+            try
+            {
+                writer.UpdateBinaryDocValue(new Term("key", "doc"), "bdv", ToBytes(17L));
+                Assert.Fail("should not have allowed creating new fields through update");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException e)
+#pragma warning restore 168
+            {
+                // ok
+            }
+
+            try
+            {
+                writer.UpdateBinaryDocValue(new Term("key", "doc"), "foo", ToBytes(17L));
+                Assert.Fail("should not have allowed updating an existing field to binary-dv");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException e)
+#pragma warning restore 168
+            {
+                // ok
+            }
+
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestDifferentDVFormatPerField()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            conf.SetCodec(new Lucene46CodecAnonymousInnerClassHelper(this));
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            Document doc = new Document();
+            doc.Add(new StringField("key", "doc", Store.NO));
+            doc.Add(new BinaryDocValuesField("bdv", ToBytes(5L)));
+            doc.Add(new SortedDocValuesField("sorted", new BytesRef("value")));
+            writer.AddDocument(doc); // flushed document
+            writer.Commit();
+            writer.AddDocument(doc); // in-memory document
+
+            writer.UpdateBinaryDocValue(new Term("key", "doc"), "bdv", ToBytes(17L));
+            writer.Dispose();
+
+            DirectoryReader reader = DirectoryReader.Open(dir);
+
+            AtomicReader r = SlowCompositeReaderWrapper.Wrap(reader);
+            BinaryDocValues bdv = r.GetBinaryDocValues("bdv");
+            SortedDocValues sdv = r.GetSortedDocValues("sorted");
+            BytesRef scratch = new BytesRef();
+            for (int i = 0; i < r.MaxDoc; i++)
+            {
+                Assert.AreEqual(17, GetValue(bdv, i, scratch));
+                sdv.Get(i, scratch);
+                Assert.AreEqual(new BytesRef("value"), scratch);
+            }
+
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        private class Lucene46CodecAnonymousInnerClassHelper : Lucene46Codec
+        {
+            private readonly TestBinaryDocValuesUpdates OuterInstance;
+
+            public Lucene46CodecAnonymousInnerClassHelper(TestBinaryDocValuesUpdates outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            public override DocValuesFormat GetDocValuesFormatForField(string field)
+            {
+                return new Lucene45DocValuesFormat();
+            }
+        }
+
+        [Test]
+        public virtual void TestUpdateSameDocMultipleTimes()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            Document doc = new Document();
+            doc.Add(new StringField("key", "doc", Store.NO));
+            doc.Add(new BinaryDocValuesField("bdv", ToBytes(5L)));
+            writer.AddDocument(doc); // flushed document
+            writer.Commit();
+            writer.AddDocument(doc); // in-memory document
+
+            writer.UpdateBinaryDocValue(new Term("key", "doc"), "bdv", ToBytes(17L)); // update existing field
+            writer.UpdateBinaryDocValue(new Term("key", "doc"), "bdv", ToBytes(3L)); // update existing field 2nd time in this commit
+            writer.Dispose();
+
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            AtomicReader r = SlowCompositeReaderWrapper.Wrap(reader);
+            BinaryDocValues bdv = r.GetBinaryDocValues("bdv");
+            BytesRef scratch = new BytesRef();
+            for (int i = 0; i < r.MaxDoc; i++)
+            {
+                Assert.AreEqual(3, GetValue(bdv, i, scratch));
+            }
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestSegmentMerges()
+        {
+            Directory dir = NewDirectory();
+            Random random = Random();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
+            IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)conf.Clone());
+
+            int docid = 0;
+            int numRounds = AtLeast(10);
+            for (int rnd = 0; rnd < numRounds; rnd++)
+            {
+                Document doc = new Document();
+                doc.Add(new StringField("key", "doc", Store.NO));
+                doc.Add(new BinaryDocValuesField("bdv", ToBytes(-1)));
+                int numDocs = AtLeast(30);
+                for (int i = 0; i < numDocs; i++)
+                {
+                    doc.RemoveField("id");
+                    doc.Add(new StringField("id", Convert.ToString(docid++), Store.NO));
+                    writer.AddDocument(doc);
+                }
+
+                long value = rnd + 1;
+                writer.UpdateBinaryDocValue(new Term("key", "doc"), "bdv", ToBytes(value));
+
+                if (random.NextDouble() < 0.2) // randomly delete some docs
+                {
+                    writer.DeleteDocuments(new Term("id", Convert.ToString(random.Next(docid))));
+                }
+
+                // randomly commit or reopen-IW (or nothing), before forceMerge
+                if (random.NextDouble() < 0.4)
+                {
+                    writer.Commit();
+                }
+                else if (random.NextDouble() < 0.1)
+                {
+                    writer.Dispose();
+                    writer = new IndexWriter(dir, (IndexWriterConfig)conf.Clone());
+                }
+
+                // add another document with the current value, to be sure forceMerge has
+                // something to merge (for instance, it could be that CMS finished merging
+                // all segments down to 1 before the delete was applied, so when
+                // forceMerge is called, the index will be with one segment and deletes
+                // and some MPs might now merge it, thereby invalidating test's
+                // assumption that the reader has no deletes).
+                doc = new Document();
+                doc.Add(new StringField("id", Convert.ToString(docid++), Store.NO));
+                doc.Add(new StringField("key", "doc", Store.NO));
+                doc.Add(new BinaryDocValuesField("bdv", ToBytes(value)));
+                writer.AddDocument(doc);
+
+                writer.ForceMerge(1, true);
+                DirectoryReader reader;
+                if (random.NextBoolean())
+                {
+                    writer.Commit();
+                    reader = DirectoryReader.Open(dir);
+                }
+                else
+                {
+                    reader = DirectoryReader.Open(writer, true);
+                }
+
+                Assert.AreEqual(1, reader.Leaves.Count);
+                AtomicReader r = (AtomicReader)reader.Leaves[0].Reader;
+                Assert.IsNull(r.LiveDocs, "index should have no deletes after forceMerge");
+                BinaryDocValues bdv = r.GetBinaryDocValues("bdv");
+                Assert.IsNotNull(bdv);
+                BytesRef scratch = new BytesRef();
+                for (int i = 0; i < r.MaxDoc; i++)
+                {
+                    Assert.AreEqual(value, GetValue(bdv, i, scratch));
+                }
+                reader.Dispose();
+            }
+
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestUpdateDocumentByMultipleTerms()
+        {
+            // make sure the order of updates is respected, even when multiple terms affect same document
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            Document doc = new Document();
+            doc.Add(new StringField("k1", "v1", Store.NO));
+            doc.Add(new StringField("k2", "v2", Store.NO));
+            doc.Add(new BinaryDocValuesField("bdv", ToBytes(5L)));
+            writer.AddDocument(doc); // flushed document
+            writer.Commit();
+            writer.AddDocument(doc); // in-memory document
+
+            writer.UpdateBinaryDocValue(new Term("k1", "v1"), "bdv", ToBytes(17L));
+            writer.UpdateBinaryDocValue(new Term("k2", "v2"), "bdv", ToBytes(3L));
+            writer.Dispose();
+
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            AtomicReader r = SlowCompositeReaderWrapper.Wrap(reader);
+            BinaryDocValues bdv = r.GetBinaryDocValues("bdv");
+            BytesRef scratch = new BytesRef();
+            for (int i = 0; i < r.MaxDoc; i++)
+            {
+                Assert.AreEqual(3, GetValue(bdv, i, scratch));
+            }
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestManyReopensAndFields()
+        {
+            Directory dir = NewDirectory();
+            Random random = Random();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
+            LogMergePolicy lmp = NewLogMergePolicy();
+            lmp.MergeFactor = 3; // merge often
+            conf.SetMergePolicy(lmp);
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            bool isNRT = random.NextBoolean();
+            DirectoryReader reader;
+            if (isNRT)
+            {
+                reader = DirectoryReader.Open(writer, true);
+            }
+            else
+            {
+                writer.Commit();
+                reader = DirectoryReader.Open(dir);
+            }
+
+            int numFields = random.Next(4) + 3; // 3-7
+            long[] fieldValues = new long[numFields];
+            bool[] fieldHasValue = new bool[numFields];
+            Arrays.Fill(fieldHasValue, true);
+            for (int i = 0; i < fieldValues.Length; i++)
+            {
+                fieldValues[i] = 1;
+            }
+
+            int numRounds = AtLeast(15);
+            int docID = 0;
+            for (int i = 0; i < numRounds; i++)
+            {
+                int numDocs = AtLeast(5);
+                //      System.out.println("[" + Thread.currentThread().getName() + "]: round=" + i + ", numDocs=" + numDocs);
+                for (int j = 0; j < numDocs; j++)
+                {
+                    Document doc = new Document();
+                    doc.Add(new StringField("id", "doc-" + docID, Store.NO));
+                    doc.Add(new StringField("key", "all", Store.NO)); // update key
+                    // add all fields with their current value
+                    for (int f = 0; f < fieldValues.Length; f++)
+                    {
+                        doc.Add(new BinaryDocValuesField("f" + f, ToBytes(fieldValues[f])));
+                    }
+                    writer.AddDocument(doc);
+                    ++docID;
+                }
+
+                // if field's value was unset before, unset it from all new added documents too
+                for (int field = 0; field < fieldHasValue.Length; field++)
+                {
+                    if (!fieldHasValue[field])
+                    {
+                        writer.UpdateBinaryDocValue(new Term("key", "all"), "f" + field, null);
+                    }
+                }
+
+                int fieldIdx = random.Next(fieldValues.Length);
+                string updateField = "f" + fieldIdx;
+                if (random.NextBoolean())
+                {
+                    //        System.out.println("[" + Thread.currentThread().getName() + "]: unset field '" + updateField + "'");
+                    fieldHasValue[fieldIdx] = false;
+                    writer.UpdateBinaryDocValue(new Term("key", "all"), updateField, null);
+                }
+                else
+                {
+                    fieldHasValue[fieldIdx] = true;
+                    writer.UpdateBinaryDocValue(new Term("key", "all"), updateField, ToBytes(++fieldValues[fieldIdx]));
+                    //        System.out.println("[" + Thread.currentThread().getName() + "]: updated field '" + updateField + "' to value " + fieldValues[fieldIdx]);
+                }
+
+                if (random.NextDouble() < 0.2)
+                {
+                    int deleteDoc = random.Next(docID); // might also delete an already deleted document, ok!
+                    writer.DeleteDocuments(new Term("id", "doc-" + deleteDoc));
+                    //        System.out.println("[" + Thread.currentThread().getName() + "]: deleted document: doc-" + deleteDoc);
+                }
+
+                // verify reader
+                if (!isNRT)
+                {
+                    writer.Commit();
+                }
+
+                //      System.out.println("[" + Thread.currentThread().getName() + "]: reopen reader: " + reader);
+                DirectoryReader newReader = DirectoryReader.OpenIfChanged(reader);
+                Assert.IsNotNull(newReader);
+                reader.Dispose();
+                reader = newReader;
+                //      System.out.println("[" + Thread.currentThread().getName() + "]: reopened reader: " + reader);
+                Assert.IsTrue(reader.NumDocs > 0); // we delete at most one document per round
+                BytesRef scratch = new BytesRef();
+                foreach (AtomicReaderContext context in reader.Leaves)
+                {
+                    AtomicReader r = context.AtomicReader;
+                    //        System.out.println(((SegmentReader) r).getSegmentName());
+                    IBits liveDocs = r.LiveDocs;
+                    for (int field = 0; field < fieldValues.Length; field++)
+                    {
+                        string f = "f" + field;
+                        BinaryDocValues bdv = r.GetBinaryDocValues(f);
+                        IBits docsWithField = r.GetDocsWithField(f);
+                        Assert.IsNotNull(bdv);
+                        int maxDoc = r.MaxDoc;
+                        for (int doc = 0; doc < maxDoc; doc++)
+                        {
+                            if (liveDocs == null || liveDocs.Get(doc))
+                            {
+                                //              System.out.println("doc=" + (doc + context.DocBase) + " f='" + f + "' vslue=" + getValue(bdv, doc, scratch));
+                                if (fieldHasValue[field])
+                                {
+                                    Assert.IsTrue(docsWithField.Get(doc));
+                                    Assert.AreEqual(fieldValues[field], GetValue(bdv, doc, scratch), "invalid value for doc=" + doc + ", field=" + f + ", reader=" + r);
+                                }
+                                else
+                                {
+                                    Assert.IsFalse(docsWithField.Get(doc));
+                                }
+                            }
+                        }
+                    }
+                }
+                //      System.out.println();
+            }
+
+            IOUtils.Close(writer, reader, dir);
+        }
+
+        [Test]
+        public virtual void TestUpdateSegmentWithNoDocValues()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            // prevent merges, otherwise by the time updates are applied
+            // (writer.Dispose()), the segments might have merged and that update becomes
+            // legit.
+            conf.SetMergePolicy(NoMergePolicy.COMPOUND_FILES);
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            // first segment with BDV
+            Document doc = new Document();
+            doc.Add(new StringField("id", "doc0", Store.NO));
+            doc.Add(new BinaryDocValuesField("bdv", ToBytes(3L)));
+            writer.AddDocument(doc);
+            doc = new Document();
+            doc.Add(new StringField("id", "doc4", Store.NO)); // document without 'bdv' field
+            writer.AddDocument(doc);
+            writer.Commit();
+
+            // second segment with no BDV
+            doc = new Document();
+            doc.Add(new StringField("id", "doc1", Store.NO));
+            writer.AddDocument(doc);
+            doc = new Document();
+            doc.Add(new StringField("id", "doc2", Store.NO)); // document that isn't updated
+            writer.AddDocument(doc);
+            writer.Commit();
+
+            // update document in the first segment - should not affect docsWithField of
+            // the document without BDV field
+            writer.UpdateBinaryDocValue(new Term("id", "doc0"), "bdv", ToBytes(5L));
+
+            // update document in the second segment - field should be added and we should
+            // be able to handle the other document correctly (e.g. no NPE)
+            writer.UpdateBinaryDocValue(new Term("id", "doc1"), "bdv", ToBytes(5L));
+            writer.Dispose();
+
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            BytesRef scratch = new BytesRef();
+            foreach (AtomicReaderContext context in reader.Leaves)
+            {
+                AtomicReader r = context.AtomicReader;
+                BinaryDocValues bdv = r.GetBinaryDocValues("bdv");
+                IBits docsWithField = r.GetDocsWithField("bdv");
+                Assert.IsNotNull(docsWithField);
+                Assert.IsTrue(docsWithField.Get(0));
+                Assert.AreEqual(5L, GetValue(bdv, 0, scratch));
+                Assert.IsFalse(docsWithField.Get(1));
+                bdv.Get(1, scratch);
+                Assert.AreEqual(0, scratch.Length);
+            }
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestUpdateSegmentWithPostingButNoDocValues()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            // prevent merges, otherwise by the time updates are applied
+            // (writer.Dispose()), the segments might have merged and that update becomes
+            // legit.
+            conf.SetMergePolicy(NoMergePolicy.COMPOUND_FILES);
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            // first segment with BDV
+            Document doc = new Document();
+            doc.Add(new StringField("id", "doc0", Store.NO));
+            doc.Add(new StringField("bdv", "mock-value", Store.NO));
+            doc.Add(new BinaryDocValuesField("bdv", ToBytes(5L)));
+            writer.AddDocument(doc);
+            writer.Commit();
+
+            // second segment with no BDV
+            doc = new Document();
+            doc.Add(new StringField("id", "doc1", Store.NO));
+            doc.Add(new StringField("bdv", "mock-value", Store.NO));
+            writer.AddDocument(doc);
+            writer.Commit();
+
+            // update document in the second segment
+            writer.UpdateBinaryDocValue(new Term("id", "doc1"), "bdv", ToBytes(5L));
+            writer.Dispose();
+
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            BytesRef scratch = new BytesRef();
+            foreach (AtomicReaderContext context in reader.Leaves)
+            {
+                AtomicReader r = context.AtomicReader;
+                BinaryDocValues bdv = r.GetBinaryDocValues("bdv");
+                for (int i = 0; i < r.MaxDoc; i++)
+                {
+                    Assert.AreEqual(5L, GetValue(bdv, i, scratch));
+                }
+            }
+            reader.Dispose();
+
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestUpdateBinaryDVFieldWithSameNameAsPostingField()
+        {
+            // this used to fail because FieldInfos.Builder neglected to update
+            // globalFieldMaps.docValueTypes map
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            Document doc = new Document();
+            doc.Add(new StringField("f", "mock-value", Store.NO));
+            doc.Add(new BinaryDocValuesField("f", ToBytes(5L)));
+            writer.AddDocument(doc);
+            writer.Commit();
+            writer.UpdateBinaryDocValue(new Term("f", "mock-value"), "f", ToBytes(17L));
+            writer.Dispose();
+
+            DirectoryReader r = DirectoryReader.Open(dir);
+            BinaryDocValues bdv = ((AtomicReader)r.Leaves[0].Reader).GetBinaryDocValues("f");
+            Assert.AreEqual(17, GetValue(bdv, 0, new BytesRef()));
+            r.Dispose();
+
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestUpdateOldSegments()
+        {
+            OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true;
+
+            Codec[] oldCodecs = new Codec[] {
+                new Lucene40RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE),
+                new Lucene41RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE),
+                new Lucene42RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE),
+                new Lucene45RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE)
+            };
+            Directory dir = NewDirectory();
+
+            // create a segment with an old Codec
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            conf.SetCodec(oldCodecs[Random().Next(oldCodecs.Length)]);
+            IndexWriter writer = new IndexWriter(dir, conf);
+            Document doc = new Document();
+            doc.Add(new StringField("id", "doc", Store.NO));
+            doc.Add(new BinaryDocValuesField("f", ToBytes(5L)));
+            writer.AddDocument(doc);
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Split from <see cref="TestUpdateOldSegments"/> because OLD_FORMAT_IMPERSONATION_IS_ACTIVE
+        /// is no longer static and the existing codecs have to be remade.
+        /// </summary>
+        [Test, LuceneNetSpecific]
+        public virtual void TestUpdateOldSegments_OldFormatNotActive()
+        {
+            bool oldValue = OLD_FORMAT_IMPERSONATION_IS_ACTIVE;
+
+            OLD_FORMAT_IMPERSONATION_IS_ACTIVE = false;
+
+            Codec[] oldCodecs = new Codec[] {
+                new Lucene40RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE),
+                new Lucene41RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE),
+                new Lucene42RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE),
+                new Lucene45RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE)
+            };
+
+            Directory dir = NewDirectory();
+            Document doc = new Document();
+            doc.Add(new StringField("id", "doc", Store.NO));
+            doc.Add(new BinaryDocValuesField("f", ToBytes(5L)));
+
+            var conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            conf.SetCodec(oldCodecs[Random().Next(oldCodecs.Length)]);
+
+            var writer = new IndexWriter(dir, conf);
+            writer.AddDocument(doc);
+            writer.UpdateBinaryDocValue(new Term("id", "doc"), "f", ToBytes(4L));
+
+            try
+            {
+                writer.Dispose();
+                Assert.Fail("should not have succeeded to update a segment written with an old Codec");
+            }
+#pragma warning disable 168
+            catch (System.NotSupportedException e)
+#pragma warning restore 168
+            {
+                writer.Rollback();
+            }
+            finally
+            {
+                OLD_FORMAT_IMPERSONATION_IS_ACTIVE = oldValue;
+            }
+
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestStressMultiThreading()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            // create index
+            int numThreads = TestUtil.NextInt(Random(), 3, 6);
+            int numDocs = AtLeast(2000);
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document doc = new Document();
+                doc.Add(new StringField("id", "doc" + i, Store.NO));
+                double group = Random().NextDouble();
+                string g;
+                if (group < 0.1)
+                {
+                    g = "g0";
+                }
+                else if (group < 0.5)
+                {
+                    g = "g1";
+                }
+                else if (group < 0.8)
+                {
+                    g = "g2";
+                }
+                else
+                {
+                    g = "g3";
+                }
+                doc.Add(new StringField("updKey", g, Store.NO));
+                for (int j = 0; j < numThreads; j++)
+                {
+                    long value = Random().Next();
+                    doc.Add(new BinaryDocValuesField("f" + j, ToBytes(value)));
+                    doc.Add(new BinaryDocValuesField("cf" + j, ToBytes(value * 2))); // control, always updated to f * 2
+                }
+                writer.AddDocument(doc);
+            }
+
+            CountdownEvent done = new CountdownEvent(numThreads);
+            AtomicInt32 numUpdates = new AtomicInt32(AtLeast(100));
+
+            // same thread updates a field as well as reopens
+            ThreadClass[] threads = new ThreadClass[numThreads];
+            for (int i = 0; i < threads.Length; i++)
+            {
+                string f = "f" + i;
+                string cf = "cf" + i;
+                threads[i] = new ThreadAnonymousInnerClassHelper(this, "UpdateThread-" + i, writer, numDocs, done, numUpdates, f, cf);
+            }
+
+            foreach (ThreadClass t in threads)
+            {
+                t.Start();
+            }
+            done.Wait();
+            writer.Dispose();
+
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            BytesRef scratch = new BytesRef();
+            foreach (AtomicReaderContext context in reader.Leaves)
+            {
+                AtomicReader r = context.AtomicReader;
+                for (int i = 0; i < numThreads; i++)
+                {
+                    BinaryDocValues bdv = r.GetBinaryDocValues("f" + i);
+                    BinaryDocValues control = r.GetBinaryDocValues("cf" + i);
+                    IBits docsWithBdv = r.GetDocsWithField("f" + i);
+                    IBits docsWithControl = r.GetDocsWithField("cf" + i);
+                    IBits liveDocs = r.LiveDocs;
+                    for (int j = 0; j < r.MaxDoc; j++)
+                    {
+                        if (liveDocs == null || liveDocs.Get(j))
+                        {
+                            Assert.AreEqual(docsWithBdv.Get(j), docsWithControl.Get(j));
+                            if (docsWithBdv.Get(j))
+                            {
+                                Assert.AreEqual(GetValue(control, j, scratch), GetValue(bdv, j, scratch) * 2);
+                            }
+                        }
+                    }
+                }
+            }
+            reader.Dispose();
+
+            dir.Dispose();
+        }
+
+        private class ThreadAnonymousInnerClassHelper : ThreadClass
+        {
+            private readonly TestBinaryDocValuesUpdates OuterInstance;
+
+            private IndexWriter Writer;
+            private int NumDocs;
+            private CountdownEvent Done;
+            private AtomicInt32 NumUpdates;
+            private string f;
+            private string Cf;
+
+            public ThreadAnonymousInnerClassHelper(TestBinaryDocValuesUpdates outerInstance, string str, IndexWriter writer, int numDocs, CountdownEvent done, AtomicInt32 numUpdates, string f, string cf)
+                : base(str)
+            {
+                this.OuterInstance = outerInstance;
+                this.Writer = writer;
+                this.NumDocs = numDocs;
+                this.Done = done;
+                this.NumUpdates = numUpdates;
+                this.f = f;
+                this.Cf = cf;
+            }
+
+            public override void Run()
+            {
+                DirectoryReader reader = null;
+                bool success = false;
+                try
+                {
+                    Random random = Random();
+                    while (NumUpdates.GetAndDecrement() > 0)
+                    {
+                        double group = random.NextDouble();
+                        Term t;
+                        if (group < 0.1)
+                        {
+                            t = new Term("updKey", "g0");
+                        }
+                        else if (group < 0.5)
+                        {
+                            t = new Term("updKey", "g1");
+                        }
+                        else if (group < 0.8)
+                        {
+                            t = new Term("updKey", "g2");
+                        }
+                        else
+                        {
+                            t = new Term("updKey", "g3");
+                        }
+                        //              System.out.println("[" + Thread.currentThread().getName() + "] numUpdates=" + numUpdates + " updateTerm=" + t);
+                        if (random.NextBoolean()) // sometimes unset a value
+                        {
+                            Writer.UpdateBinaryDocValue(t, f, null);
+                            Writer.UpdateBinaryDocValue(t, Cf, null);
+                        }
+                        else
+                        {
+                            long updValue = random.Next();
+                            Writer.UpdateBinaryDocValue(t, f, ToBytes(updValue));
+                            Writer.UpdateBinaryDocValue(t, Cf, ToBytes(updValue * 2));
+                        }
+
+                        if (random.NextDouble() < 0.2)
+                        {
+                            // delete a random document
+                            int doc = random.Next(NumDocs);
+                            //                System.out.println("[" + Thread.currentThread().getName() + "] deleteDoc=doc" + doc);
+                            Writer.DeleteDocuments(new Term("id", "doc" + doc));
+                        }
+
+                        if (random.NextDouble() < 0.05) // commit every 20 updates on average
+                        {
+                            //                  System.out.println("[" + Thread.currentThread().getName() + "] commit");
+                            Writer.Commit();
+                        }
+
+                        if (random.NextDouble() < 0.1) // reopen NRT reader (apply updates), on average once every 10 updates
+                        {
+                            if (reader == null)
+                            {
+                                //                  System.out.println("[" + Thread.currentThread().getName() + "] open NRT");
+                                reader = DirectoryReader.Open(Writer, true);
+                            }
+                            else
+                            {
+                                //                  System.out.println("[" + Thread.currentThread().getName() + "] reopen NRT");
+                                DirectoryReader r2 = DirectoryReader.OpenIfChanged(reader, Writer, true);
+                                if (r2 != null)
+                                {
+                                    reader.Dispose();
+                                    reader = r2;
+                                }
+                            }
+                        }
+                    }
+                    //            System.out.println("[" + Thread.currentThread().getName() + "] DONE");
+                    success = true;
+                }
+                catch (IOException e)
+                {
+                    throw new Exception(e.Message, e);
+                }
+                finally
+                {
+                    if (reader != null)
+                    {
+                        try
+                        {
+                            reader.Dispose();
+                        }
+                        catch (IOException e)
+                        {
+                            if (success) // suppress this exception only if there was another exception
+                            {
+                                throw new Exception(e.Message, e);
+                            }
+                        }
+                    }
+                    Done.Signal();
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestUpdateDifferentDocsInDifferentGens()
+        {
+            // update same document multiple times across generations
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            conf.SetMaxBufferedDocs(4);
+            IndexWriter writer = new IndexWriter(dir, conf);
+            int numDocs = AtLeast(10);
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document doc = new Document();
+                doc.Add(new StringField("id", "doc" + i, Store.NO));
+                long value = Random().Next();
+                doc.Add(new BinaryDocValuesField("f", ToBytes(value)));
+                doc.Add(new BinaryDocValuesField("cf", ToBytes(value * 2)));
+                writer.AddDocument(doc);
+            }
+
+            int numGens = AtLeast(5);
+            BytesRef scratch = new BytesRef();
+            for (int i = 0; i < numGens; i++)
+            {
+                int doc = Random().Next(numDocs);
+                Term t = new Term("id", "doc" + doc);
+                long value = Random().NextLong();
+                writer.UpdateBinaryDocValue(t, "f", ToBytes(value));
+                writer.UpdateBinaryDocValue(t, "cf", ToBytes(value * 2));
+                DirectoryReader reader = DirectoryReader.Open(writer, true);
+                foreach (AtomicReaderContext context in reader.Leaves)
+                {
+                    AtomicReader r = context.AtomicReader;
+                    BinaryDocValues fbdv = r.GetBinaryDocValues("f");
+                    BinaryDocValues cfbdv = r.GetBinaryDocValues("cf");
+                    for (int j = 0; j < r.MaxDoc; j++)
+                    {
+                        Assert.AreEqual(GetValue(cfbdv, j, scratch), GetValue(fbdv, j, scratch) * 2);
+                    }
+                }
+                reader.Dispose();
+            }
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestChangeCodec()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            conf.SetMergePolicy(NoMergePolicy.COMPOUND_FILES); // disable merges to simplify test assertions.
+            conf.SetCodec(new Lucene46CodecAnonymousInnerClassHelper2(this));
+            IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)conf.Clone());
+            Document doc = new Document();
+            doc.Add(new StringField("id", "d0", Store.NO));
+            doc.Add(new BinaryDocValuesField("f1", ToBytes(5L)));
+            doc.Add(new BinaryDocValuesField("f2", ToBytes(13L)));
+            writer.AddDocument(doc);
+            writer.Dispose();
+
+            // change format
+            conf.SetCodec(new Lucene46CodecAnonymousInnerClassHelper3(this));
+            writer = new IndexWriter(dir, (IndexWriterConfig)conf.Clone());
+            doc = new Document();
+            doc.Add(new StringField("id", "d1", Store.NO));
+            doc.Add(new BinaryDocValuesField("f1", ToBytes(17L)));
+            doc.Add(new BinaryDocValuesField("f2", ToBytes(2L)));
+            writer.AddDocument(doc);
+            writer.UpdateBinaryDocValue(new Term("id", "d0"), "f1", ToBytes(12L));
+            writer.Dispose();
+
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            AtomicReader r = SlowCompositeReaderWrapper.Wrap(reader);
+            BinaryDocValues f1 = r.GetBinaryDocValues("f1");
+            BinaryDocValues f2 = r.GetBinaryDocValues("f2");
+            BytesRef scratch = new BytesRef();
+            Assert.AreEqual(12L, GetValue(f1, 0, scratch));
+            Assert.AreEqual(13L, GetValue(f2, 0, scratch));
+            Assert.AreEqual(17L, GetValue(f1, 1, scratch));
+            Assert.AreEqual(2L, GetValue(f2, 1, scratch));
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        private class Lucene46CodecAnonymousInnerClassHelper2 : Lucene46Codec
+        {
+            private readonly TestBinaryDocValuesUpdates OuterInstance;
+
+            public Lucene46CodecAnonymousInnerClassHelper2(TestBinaryDocValuesUpdates outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            public override DocValuesFormat GetDocValuesFormatForField(string field)
+            {
+                return new Lucene45DocValuesFormat();
+            }
+        }
+
+        private class Lucene46CodecAnonymousInnerClassHelper3 : Lucene46Codec
+        {
+            private readonly TestBinaryDocValuesUpdates OuterInstance;
+
+            public Lucene46CodecAnonymousInnerClassHelper3(TestBinaryDocValuesUpdates outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            public override DocValuesFormat GetDocValuesFormatForField(string field)
+            {
+                return new AssertingDocValuesFormat();
+            }
+        }
+
+        [Test]
+        public virtual void TestAddIndexes()
+        {
+            Directory dir1 = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir1, conf);
+
+            int numDocs = AtLeast(50);
+            int numTerms = TestUtil.NextInt(Random(), 1, numDocs / 5);
+            HashSet<string> randomTerms = new HashSet<string>();
+            while (randomTerms.Count < numTerms)
+            {
+                randomTerms.Add(TestUtil.RandomSimpleString(Random()));
+            }
+
+            // create first index
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document doc = new Document();
+                doc.Add(new StringField("id", RandomInts.RandomFrom(Random(), randomTerms), Store.NO));
+                doc.Add(new BinaryDocValuesField("bdv", ToBytes(4L)));
+                doc.Add(new BinaryDocValuesField("control", ToBytes(8L)));
+                writer.AddDocument(doc);
+            }
+
+            if (Random().NextBoolean())
+            {
+                writer.Commit();
+            }
+
+            // update some docs to a random value
+            long value = Random().Next();
+            Term term = new Term("id", RandomInts.RandomFrom(Random(), randomTerms));
+            writer.UpdateBinaryDocValue(term, "bdv", ToBytes(value));
+            writer.UpdateBinaryDocValue(term, "control", ToBytes(value * 2));
+            writer.Dispose();
+
+            Directory dir2 = NewDirectory();
+            conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            writer = new IndexWriter(dir2, conf);
+            if (Random().NextBoolean())
+            {
+                writer.AddIndexes(dir1);
+            }
+            else
+            {
+                DirectoryReader reader = DirectoryReader.Open(dir1);
+                writer.AddIndexes(reader);
+                reader.Dispose();
+            }
+            writer.Dispose();
+
+            DirectoryReader reader_ = DirectoryReader.Open(dir2);
+            BytesRef scratch = new BytesRef();
+            foreach (AtomicReaderContext context in reader_.Leaves)
+            {
+                AtomicReader r = context.AtomicReader;
+                BinaryDocValues bdv = r.GetBinaryDocValues("bdv");
+                BinaryDocValues control = r.GetBinaryDocValues("control");
+                for (int i = 0; i < r.MaxDoc; i++)
+                {
+                    Assert.AreEqual(GetValue(bdv, i, scratch) * 2, GetValue(control, i, scratch));
+                }
+            }
+            reader_.Dispose();
+
+            IOUtils.Close(dir1, dir2);
+        }
+
+        [Test]
+        public virtual void TestDeleteUnusedUpdatesFiles()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            Document doc = new Document();
+            doc.Add(new StringField("id", "d0", Store.NO));
+            doc.Add(new BinaryDocValuesField("f", ToBytes(1L)));
+            writer.AddDocument(doc);
+
+            // create first gen of update files
+            writer.UpdateBinaryDocValue(new Term("id", "d0"), "f", ToBytes(2L));
+            writer.Commit();
+            int numFiles = dir.ListAll().Length;
+
+            DirectoryReader r = DirectoryReader.Open(dir);
+            BytesRef scratch = new BytesRef();
+            Assert.AreEqual(2L, GetValue(((AtomicReader)r.Leaves[0].Reader).GetBinaryDocValues("f"), 0, scratch));
+            r.Dispose();
+
+            // create second gen of update files, first gen should be deleted
+            writer.UpdateBinaryDocValue(new Term("id", "d0"), "f", ToBytes(5L));
+            writer.Commit();
+            Assert.AreEqual(numFiles, dir.ListAll().Length);
+
+            r = DirectoryReader.Open(dir);
+            Assert.AreEqual(5L, GetValue(((AtomicReader)r.Leaves[0].Reader).GetBinaryDocValues("f"), 0, scratch));
+            r.Dispose();
+
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(40000)]
+#endif
+        [Test, LongRunningTest, HasTimeout]
+        public virtual void TestTonsOfUpdates()
+        {
+            // LUCENE-5248: make sure that when there are many updates, we don't use too much RAM
+            Directory dir = NewDirectory();
+            Random random = Random();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
+            conf.SetRAMBufferSizeMB(IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB);
+            conf.SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH); // don't flush by doc
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            // test data: lots of documents (few 10Ks) and lots of update terms (few hundreds)
+            int numDocs = AtLeast(20000);
+            int numBinaryFields = AtLeast(5);
+            int numTerms = TestUtil.NextInt(random, 10, 100); // terms should affect many docs
+            HashSet<string> updateTerms = new HashSet<string>();
+            while (updateTerms.Count < numTerms)
+            {
+                updateTerms.Add(TestUtil.RandomSimpleString(random));
+            }
+
+            //    System.out.println("numDocs=" + numDocs + " numBinaryFields=" + numBinaryFields + " numTerms=" + numTerms);
+
+            // build a large index with many BDV fields and update terms
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document doc = new Document();
+                int numUpdateTerms = TestUtil.NextInt(random, 1, numTerms / 10);
+                for (int j = 0; j < numUpdateTerms; j++)
+                {
+                    doc.Add(new StringField("upd", RandomInts.RandomFrom(random, updateTerms), Store.NO));
+                }
+                for (int j = 0; j < numBinaryFields; j++)
+                {
+                    long val = random.Next();
+                    doc.Add(new BinaryDocValuesField("f" + j, ToBytes(val)));
+                    doc.Add(new BinaryDocValuesField("cf" + j, ToBytes(val * 2)));
+                }
+                writer.AddDocument(doc);
+            }
+
+            writer.Commit(); // commit so there's something to apply to
+
+            // set to flush every 2048 bytes (approximately every 12 updates), so we get
+            // many flushes during binary updates
+            writer.Config.SetRAMBufferSizeMB(2048.0 / 1024 / 1024);
+            int numUpdates = AtLeast(100);
+            //    System.out.println("numUpdates=" + numUpdates);
+            for (int i = 0; i < numUpdates; i++)
+            {
+                int field = random.Next(numBinaryFields);
+                Term updateTerm = new Term("upd", RandomInts.RandomFrom(random, updateTerms));
+                long value = random.Next();
+                writer.UpdateBinaryDocValue(updateTerm, "f" + field, ToBytes(value));
+                writer.UpdateBinaryDocValue(updateTerm, "cf" + field, ToBytes(value * 2));
+            }
+
+            writer.Dispose();
+
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            BytesRef scratch = new BytesRef();
+            foreach (AtomicReaderContext context in reader.Leaves)
+            {
+                for (int i = 0; i < numBinaryFields; i++)
+                {
+                    AtomicReader r = context.AtomicReader;
+                    BinaryDocValues f = r.GetBinaryDocValues("f" + i);
+                    BinaryDocValues cf = r.GetBinaryDocValues("cf" + i);
+                    for (int j = 0; j < r.MaxDoc; j++)
+                    {
+                        Assert.AreEqual(GetValue(cf, j, scratch), GetValue(f, j, scratch) * 2, "reader=" + r + ", field=f" + i + ", doc=" + j);
+                    }
+                }
+            }
+            reader.Dispose();
+
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestUpdatesOrder()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            Document doc = new Document();
+            doc.Add(new StringField("upd", "t1", Store.NO));
+            doc.Add(new StringField("upd", "t2", Store.NO));
+            doc.Add(new BinaryDocValuesField("f1", ToBytes(1L)));
+            doc.Add(new BinaryDocValuesField("f2", ToBytes(1L)));
+            writer.AddDocument(doc);
+            writer.UpdateBinaryDocValue(new Term("upd", "t1"), "f1", ToBytes(2L)); // update f1 to 2
+            writer.UpdateBinaryDocValue(new Term("upd", "t1"), "f2", ToBytes(2L)); // update f2 to 2
+            writer.UpdateBinaryDocValue(new Term("upd", "t2"), "f1", ToBytes(3L)); // update f1 to 3
+            writer.UpdateBinaryDocValue(new Term("upd", "t2"), "f2", ToBytes(3L)); // update f2 to 3
+            writer.UpdateBinaryDocValue(new Term("upd", "t1"), "f1", ToBytes(4L)); // update f1 to 4 (but not f2)
+            writer.Dispose();
+
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            BytesRef scratch = new BytesRef();
+            Assert.AreEqual(4, GetValue(((AtomicReader)reader.Leaves[0].Reader).GetBinaryDocValues("f1"), 0, scratch));
+            Assert.AreEqual(3, GetValue(((AtomicReader)reader.Leaves[0].Reader).GetBinaryDocValues("f2"), 0, scratch));
+            reader.Dispose();
+
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestUpdateAllDeletedSegment()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            Document doc = new Document();
+            doc.Add(new StringField("id", "doc", Store.NO));
+            doc.Add(new BinaryDocValuesField("f1", ToBytes(1L)));
+            writer.AddDocument(doc);
+            writer.AddDocument(doc);
+            writer.Commit();
+            writer.DeleteDocuments(new Term("id", "doc")); // delete all docs in the first segment
+            writer.AddDocument(doc);
+            writer.UpdateBinaryDocValue(new Term("id", "doc"), "f1", ToBytes(2L));
+            writer.Dispose();
+
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(1, reader.Leaves.Count);
+            Assert.AreEqual(2L, GetValue(((AtomicReader)reader.Leaves[0].Reader).GetBinaryDocValues("f1"), 0, new BytesRef()));
+            reader.Dispose();
+
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestUpdateTwoNonexistingTerms()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            Document doc = new Document();
+            doc.Add(new StringField("id", "doc", Store.NO));
+            doc.Add(new BinaryDocValuesField("f1", ToBytes(1L)));
+            writer.AddDocument(doc);
+            // update w/ multiple nonexisting terms in same field
+            writer.UpdateBinaryDocValue(new Term("c", "foo"), "f1", ToBytes(2L));
+            writer.UpdateBinaryDocValue(new Term("c", "bar"), "f1", ToBytes(2L));
+            writer.Dispose();
+
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(1, reader.Leaves.Count);
+            Assert.AreEqual(1L, GetValue(((AtomicReader)reader.Leaves[0].Reader).GetBinaryDocValues("f1"), 0, new BytesRef()));
+            reader.Dispose();
+
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestBinaryTerms.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestBinaryTerms.cs b/src/Lucene.Net.Tests/Index/TestBinaryTerms.cs
new file mode 100644
index 0000000..7f5d598
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestBinaryTerms.cs
@@ -0,0 +1,88 @@
+using Lucene.Net.Documents;
+using NUnit.Framework;
+
+namespace Lucene.Net.Index
+{
+    /*
+    * Licensed to the Apache Software Foundation (ASF) under one or more
+    * contributor license agreements.  See the NOTICE file distributed with
+    * this work for additional information regarding copyright ownership.
+    * The ASF licenses this file to You under the Apache License, Version 2.0
+    * (the "License"); you may not use this file except in compliance with
+    * the License.  You may obtain a copy of the License at
+    *
+    *     http://www.apache.org/licenses/LICENSE-2.0
+    *
+    * Unless required by applicable law or agreed to in writing, software
+    * distributed under the License is distributed on an "AS IS" BASIS,
+    * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    * See the License for the specific language governing permissions and
+    * limitations under the License.
+    */
+
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldType = FieldType;
+    using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using TermQuery = Lucene.Net.Search.TermQuery;
+    using TextField = TextField;
+    using TopDocs = Lucene.Net.Search.TopDocs;
+
+    /// <summary>
+    /// Test indexing and searching some byte[] terms
+    /// </summary>
+    [SuppressCodecs("Lucene3x")]
+    [TestFixture]
+    public class TestBinaryTerms : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestBinary()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            BytesRef bytes = new BytesRef(2);
+            BinaryTokenStream tokenStream = new BinaryTokenStream(bytes);
+
+            for (int i = 0; i < 256; i++)
+            {
+                bytes.Bytes[0] = (byte)i;
+                bytes.Bytes[1] = unchecked((byte)(255 - i));
+                bytes.Length = 2;
+                Document doc = new Document();
+                FieldType customType = new FieldType();
+                customType.IsStored = true;
+                doc.Add(new Field("id", "" + i, customType));
+                doc.Add(new TextField("bytes", tokenStream));
+                iw.AddDocument(doc);
+            }
+
+            IndexReader ir = iw.Reader;
+            iw.Dispose();
+
+            IndexSearcher @is = NewSearcher(ir);
+
+            for (int i = 0; i < 256; i++)
+            {
+                bytes.Bytes[0] = (byte)i;
+                bytes.Bytes[1] = unchecked((byte)(255 - i));
+                bytes.Length = 2;
+                TopDocs docs = @is.Search(new TermQuery(new Term("bytes", bytes)), 5);
+                Assert.AreEqual(1, docs.TotalHits);
+                Assert.AreEqual("" + i, @is.Doc(docs.ScoreDocs[0].Doc).Get("id"));
+            }
+
+            ir.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestToString()
+        {
+            Term term = new Term("foo", new BytesRef(new[] { unchecked((byte)0xff), unchecked((byte)0xfe) }));
+            Assert.AreEqual("foo:[ff fe]", term.ToString());
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestByteSlices.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestByteSlices.cs b/src/Lucene.Net.Tests/Index/TestByteSlices.cs
new file mode 100644
index 0000000..9869364
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestByteSlices.cs
@@ -0,0 +1,141 @@
+using System;
+
+namespace Lucene.Net.Index
+{
+    using Attributes;
+    using Lucene.Net.Randomized.Generators;
+    using NUnit.Framework;
+
+    using ByteBlockPool = Lucene.Net.Util.ByteBlockPool;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using RecyclingByteBlockAllocator = Lucene.Net.Util.RecyclingByteBlockAllocator;
+
+    /// <summary>
+    /// Licensed under the Apache License, Version 2.0 (the "License");
+    /// you may not use this file except in compliance with the License.
+    /// You may obtain a copy of the License at
+    ///
+    ///     http://www.apache.org/licenses/LICENSE-2.0
+    ///
+    /// Unless required by applicable law or agreed to in writing, software
+    /// distributed under the License is distributed on an "AS IS" BASIS,
+    /// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    /// See the License for the specific language governing permissions and
+    /// limitations under the License.
+    /// </summary>
+    [TestFixture]
+    public class TestByteSlices : LuceneTestCase
+    {
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(300000)]
+#endif
+        [Test, HasTimeout]
+        public virtual void TestBasic()
+        {
+            fail("This test is somehow crashing NUnit and causing it not to complete");
+
+            ByteBlockPool pool = new ByteBlockPool(new RecyclingByteBlockAllocator(ByteBlockPool.BYTE_BLOCK_SIZE, Random().Next(100)));
+
+            int NUM_STREAM = AtLeast(100);
+
+            ByteSliceWriter writer = new ByteSliceWriter(pool);
+
+            int[] starts = new int[NUM_STREAM];
+            int[] uptos = new int[NUM_STREAM];
+            int[] counters = new int[NUM_STREAM];
+
+            ByteSliceReader reader = new ByteSliceReader();
+
+            for (int ti = 0; ti < 100; ti++)
+            {
+                for (int stream = 0; stream < NUM_STREAM; stream++)
+                {
+                    starts[stream] = -1;
+                    counters[stream] = 0;
+                }
+
+                int num = AtLeast(3000);
+                for (int iter = 0; iter < num; iter++)
+                {
+                    int stream;
+                    if (Random().NextBoolean())
+                    {
+                        stream = Random().Next(3);
+                    }
+                    else
+                    {
+                        stream = Random().Next(NUM_STREAM);
+                    }
+
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("write stream=" + stream);
+                    }
+
+                    if (starts[stream] == -1)
+                    {
+                        int spot = pool.NewSlice(ByteBlockPool.FIRST_LEVEL_SIZE);
+                        starts[stream] = uptos[stream] = spot + pool.ByteOffset;
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("  init to " + starts[stream]);
+                        }
+                    }
+
+                    writer.Init(uptos[stream]);
+                    int numValue;
+                    if (Random().Next(10) == 3)
+                    {
+                        numValue = Random().Next(100);
+                    }
+                    else if (Random().Next(5) == 3)
+                    {
+                        numValue = Random().Next(3);
+                    }
+                    else
+                    {
+                        numValue = Random().Next(20);
+                    }
+
+                    for (int j = 0; j < numValue; j++)
+                    {
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("    write " + (counters[stream] + j));
+                        }
+                        // write some large (incl. negative) ints:
+                        writer.WriteVInt32(Random().Next());
+                        writer.WriteVInt32(counters[stream] + j);
+                    }
+                    counters[stream] += numValue;
+                    uptos[stream] = writer.Address;
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("    addr now " + uptos[stream]);
+                    }
+                }
+
+                for (int stream = 0; stream < NUM_STREAM; stream++)
+                {
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("  stream=" + stream + " count=" + counters[stream]);
+                    }
+
+                    if (starts[stream] != -1 && starts[stream] != uptos[stream])
+                    {
+                        reader.Init(pool, starts[stream], uptos[stream]);
+                        for (int j = 0; j < counters[stream]; j++)
+                        {
+                            reader.ReadVInt32();
+                            Assert.AreEqual(j, reader.ReadVInt32());
+                        }
+                    }
+                }
+
+                pool.Reset();
+            }
+        }
+    }
+}
\ No newline at end of file


[59/72] [abbrv] lucenenet git commit: Lucene.Net.TestFramework: Renamed Codecs\lucene40\ to Codecs\Lucene40\

Posted by ni...@apache.org.
Lucene.Net.TestFramework: Renamed Codecs\lucene40\ to Codecs\Lucene40\


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/c0e9469c
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/c0e9469c
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/c0e9469c

Branch: refs/heads/api-work
Commit: c0e9469cc2dabf993d19e1ef342956778dfe686e
Parents: 8304ca8
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 26 03:12:28 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Feb 27 06:17:57 2017 +0700

----------------------------------------------------------------------
 .../Codecs/Lucene40/Lucene40DocValuesWriter.cs  | 624 +++++++++++++++++++
 .../Codecs/Lucene40/Lucene40FieldInfosWriter.cs | 134 ++++
 .../Codecs/Lucene40/Lucene40PostingsWriter.cs   | 381 +++++++++++
 .../Codecs/Lucene40/Lucene40RWCodec.cs          | 100 +++
 .../Lucene40/Lucene40RWDocValuesFormat.cs       |  66 ++
 .../Codecs/Lucene40/Lucene40RWNormsFormat.cs    |  66 ++
 .../Codecs/Lucene40/Lucene40RWPostingsFormat.cs |  84 +++
 .../Codecs/Lucene40/Lucene40SkipListWriter.cs   | 168 +++++
 .../Codecs/lucene40/Lucene40DocValuesWriter.cs  | 624 -------------------
 .../Codecs/lucene40/Lucene40FieldInfosWriter.cs | 134 ----
 .../Codecs/lucene40/Lucene40PostingsWriter.cs   | 381 -----------
 .../Codecs/lucene40/Lucene40RWCodec.cs          | 100 ---
 .../lucene40/Lucene40RWDocValuesFormat.cs       |  66 --
 .../Codecs/lucene40/Lucene40RWNormsFormat.cs    |  66 --
 .../Codecs/lucene40/Lucene40RWPostingsFormat.cs |  84 ---
 .../Codecs/lucene40/Lucene40SkipListWriter.cs   | 168 -----
 .../Lucene.Net.TestFramework.csproj             |  16 +-
 17 files changed, 1631 insertions(+), 1631 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/c0e9469c/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40DocValuesWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40DocValuesWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40DocValuesWriter.cs
new file mode 100644
index 0000000..42856fc
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40DocValuesWriter.cs
@@ -0,0 +1,624 @@
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.Linq;
+
+namespace Lucene.Net.Codecs.Lucene40
+{
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using CompoundFileDirectory = Lucene.Net.Store.CompoundFileDirectory;
+    using Directory = Lucene.Net.Store.Directory;
+    using FieldInfo = Lucene.Net.Index.FieldInfo;
+    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
+    using IndexOutput = Lucene.Net.Store.IndexOutput;
+    using IOUtils = Lucene.Net.Util.IOUtils;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using LegacyDocValuesType = Lucene.Net.Codecs.Lucene40.Lucene40FieldInfosReader.LegacyDocValuesType;
+    using PackedInt32s = Lucene.Net.Util.Packed.PackedInt32s;
+    using SegmentWriteState = Lucene.Net.Index.SegmentWriteState;
+
+#pragma warning disable 612, 618
+    internal class Lucene40DocValuesWriter : DocValuesConsumer
+    {
+        private readonly Directory Dir;
+        private readonly SegmentWriteState State;
+        private readonly string LegacyKey;
+        private const string SegmentSuffix = "dv";
+
+        // note: intentionally ignores seg suffix
+        internal Lucene40DocValuesWriter(SegmentWriteState state, string filename, string legacyKey)
+        {
+            this.State = state;
+            this.LegacyKey = legacyKey;
+            this.Dir = new CompoundFileDirectory(state.Directory, filename, state.Context, true);
+        }
+
+        public override void AddNumericField(FieldInfo field, IEnumerable<long?> values)
+        {
+            // examine the values to determine best type to use
+            long minValue = long.MaxValue;
+            long maxValue = long.MinValue;
+            foreach (long? n in values)
+            {
+                long v = n == null ? 0 : (long)n;
+                minValue = Math.Min(minValue, v);
+                maxValue = Math.Max(maxValue, v);
+            }
+
+            string fileName = IndexFileNames.SegmentFileName(State.SegmentInfo.Name + "_" + Convert.ToString(field.Number), SegmentSuffix, "dat");
+            IndexOutput data = Dir.CreateOutput(fileName, State.Context);
+            bool success = false;
+            try
+            {
+                if (minValue >= sbyte.MinValue && maxValue <= sbyte.MaxValue && PackedInt32s.BitsRequired(maxValue - minValue) > 4)
+                {
+                    // fits in a byte[], would be more than 4bpv, just write byte[]
+                    AddBytesField(field, data, values);
+                }
+                else if (minValue >= short.MinValue && maxValue <= short.MaxValue && PackedInt32s.BitsRequired(maxValue - minValue) > 8)
+                {
+                    // fits in a short[], would be more than 8bpv, just write short[]
+                    AddShortsField(field, data, values);
+                }
+                else if (minValue >= int.MinValue && maxValue <= int.MaxValue && PackedInt32s.BitsRequired(maxValue - minValue) > 16)
+                {
+                    // fits in a int[], would be more than 16bpv, just write int[]
+                    AddIntsField(field, data, values);
+                }
+                else
+                {
+                    AddVarIntsField(field, data, values, minValue, maxValue);
+                }
+                success = true;
+            }
+            finally
+            {
+                if (success)
+                {
+                    IOUtils.Close(data);
+                }
+                else
+                {
+                    IOUtils.CloseWhileHandlingException(data);
+                }
+            }
+        }
+
+        private void AddBytesField(FieldInfo field, IndexOutput output, IEnumerable<long?> values)
+        {
+            field.PutAttribute(LegacyKey, LegacyDocValuesType.FIXED_INTS_8.Name);
+            CodecUtil.WriteHeader(output, Lucene40DocValuesFormat.INTS_CODEC_NAME, Lucene40DocValuesFormat.INTS_VERSION_CURRENT);
+            output.WriteInt32(1); // size
+            foreach (long? n in values)
+            {
+                output.WriteByte(n == null ? (byte)0 : (byte)n);
+            }
+        }
+
+        private void AddShortsField(FieldInfo field, IndexOutput output, IEnumerable<long?> values)
+        {
+            field.PutAttribute(LegacyKey, LegacyDocValuesType.FIXED_INTS_16.Name);
+            CodecUtil.WriteHeader(output, Lucene40DocValuesFormat.INTS_CODEC_NAME, Lucene40DocValuesFormat.INTS_VERSION_CURRENT);
+            output.WriteInt32(2); // size
+            foreach (long? n in values)
+            {
+                output.WriteInt16(n == null ? (short)0 : (short)n);
+            }
+        }
+
+        private void AddIntsField(FieldInfo field, IndexOutput output, IEnumerable<long?> values)
+        {
+            field.PutAttribute(LegacyKey, LegacyDocValuesType.FIXED_INTS_32.Name);
+            CodecUtil.WriteHeader(output, Lucene40DocValuesFormat.INTS_CODEC_NAME, Lucene40DocValuesFormat.INTS_VERSION_CURRENT);
+            output.WriteInt32(4); // size
+            foreach (long? n in values)
+            {
+                output.WriteInt32(n == null ? 0 : (int)n);
+            }
+        }
+
+        private void AddVarIntsField(FieldInfo field, IndexOutput output, IEnumerable<long?> values, long minValue, long maxValue)
+        {
+            field.PutAttribute(LegacyKey, LegacyDocValuesType.VAR_INTS.Name);
+
+            CodecUtil.WriteHeader(output, Lucene40DocValuesFormat.VAR_INTS_CODEC_NAME, Lucene40DocValuesFormat.VAR_INTS_VERSION_CURRENT);
+
+            long delta = maxValue - minValue;
+
+            if (delta < 0)
+            {
+                // writes longs
+                output.WriteByte((byte)Lucene40DocValuesFormat.VAR_INTS_FIXED_64);
+                foreach (long? n in values)
+                {
+                    output.WriteInt64(n == null ? 0 : n.Value);
+                }
+            }
+            else
+            {
+                // writes packed ints
+                output.WriteByte((byte)Lucene40DocValuesFormat.VAR_INTS_PACKED);
+                output.WriteInt64(minValue);
+                output.WriteInt64(0 - minValue); // default value (representation of 0)
+                PackedInt32s.Writer writer = PackedInt32s.GetWriter(output, State.SegmentInfo.DocCount, PackedInt32s.BitsRequired(delta), PackedInt32s.DEFAULT);
+                foreach (long? n in values)
+                {
+                    long v = n == null ? 0 : (long)n;
+                    writer.Add(v - minValue);
+                }
+                writer.Finish();
+            }
+        }
+
+        public override void AddBinaryField(FieldInfo field, IEnumerable<BytesRef> values)
+        {
+            // examine the values to determine best type to use
+            HashSet<BytesRef> uniqueValues = new HashSet<BytesRef>();
+            int minLength = int.MaxValue;
+            int maxLength = int.MinValue;
+
+            var vals = values.ToArray();
+
+            for (int i = 0; i < vals.Length; i++)
+            {
+                var b = vals[i];
+
+                if (b == null)
+                {
+                    b = vals[i] = new BytesRef(); // 4.0 doesnt distinguish
+                }
+                if (b.Length > Lucene40DocValuesFormat.MAX_BINARY_FIELD_LENGTH)
+                {
+                    throw new System.ArgumentException("DocValuesField \"" + field.Name + "\" is too large, must be <= " + Lucene40DocValuesFormat.MAX_BINARY_FIELD_LENGTH);
+                }
+                minLength = Math.Min(minLength, b.Length);
+                maxLength = Math.Max(maxLength, b.Length);
+                if (uniqueValues != null)
+                {
+                    if (uniqueValues.Add(BytesRef.DeepCopyOf(b)))
+                    {
+                        if (uniqueValues.Count > 256)
+                        {
+                            uniqueValues = null;
+                        }
+                    }
+                }
+            }
+
+            int maxDoc = State.SegmentInfo.DocCount;
+            bool @fixed = minLength == maxLength;
+            bool dedup = uniqueValues != null && uniqueValues.Count * 2 < maxDoc;
+
+            if (dedup)
+            {
+                // we will deduplicate and deref values
+                bool success = false;
+                IndexOutput data = null;
+                IndexOutput index = null;
+                string dataName = IndexFileNames.SegmentFileName(State.SegmentInfo.Name + "_" + Convert.ToString(field.Number), SegmentSuffix, "dat");
+                string indexName = IndexFileNames.SegmentFileName(State.SegmentInfo.Name + "_" + Convert.ToString(field.Number), SegmentSuffix, "idx");
+                try
+                {
+                    data = Dir.CreateOutput(dataName, State.Context);
+                    index = Dir.CreateOutput(indexName, State.Context);
+                    if (@fixed)
+                    {
+                        AddFixedDerefBytesField(field, data, index, values, minLength);
+                    }
+                    else
+                    {
+                        AddVarDerefBytesField(field, data, index, values);
+                    }
+                    success = true;
+                }
+                finally
+                {
+                    if (success)
+                    {
+                        IOUtils.Close(data, index);
+                    }
+                    else
+                    {
+                        IOUtils.CloseWhileHandlingException(data, index);
+                    }
+                }
+            }
+            else
+            {
+                // we dont deduplicate, just write values straight
+                if (@fixed)
+                {
+                    // fixed byte[]
+                    string fileName = IndexFileNames.SegmentFileName(State.SegmentInfo.Name + "_" + Convert.ToString(field.Number), SegmentSuffix, "dat");
+                    IndexOutput data = Dir.CreateOutput(fileName, State.Context);
+                    bool success = false;
+                    try
+                    {
+                        AddFixedStraightBytesField(field, data, values, minLength);
+                        success = true;
+                    }
+                    finally
+                    {
+                        if (success)
+                        {
+                            IOUtils.Close(data);
+                        }
+                        else
+                        {
+                            IOUtils.CloseWhileHandlingException(data);
+                        }
+                    }
+                }
+                else
+                {
+                    // variable byte[]
+                    bool success = false;
+                    IndexOutput data = null;
+                    IndexOutput index = null;
+                    string dataName = IndexFileNames.SegmentFileName(State.SegmentInfo.Name + "_" + Convert.ToString(field.Number), SegmentSuffix, "dat");
+                    string indexName = IndexFileNames.SegmentFileName(State.SegmentInfo.Name + "_" + Convert.ToString(field.Number), SegmentSuffix, "idx");
+                    try
+                    {
+                        data = Dir.CreateOutput(dataName, State.Context);
+                        index = Dir.CreateOutput(indexName, State.Context);
+                        AddVarStraightBytesField(field, data, index, values);
+                        success = true;
+                    }
+                    finally
+                    {
+                        if (success)
+                        {
+                            IOUtils.Close(data, index);
+                        }
+                        else
+                        {
+                            IOUtils.CloseWhileHandlingException(data, index);
+                        }
+                    }
+                }
+            }
+        }
+
+        private void AddFixedStraightBytesField(FieldInfo field, IndexOutput output, IEnumerable<BytesRef> values, int length)
+        {
+            field.PutAttribute(LegacyKey, LegacyDocValuesType.BYTES_FIXED_STRAIGHT.Name);
+
+            CodecUtil.WriteHeader(output, Lucene40DocValuesFormat.BYTES_FIXED_STRAIGHT_CODEC_NAME, Lucene40DocValuesFormat.BYTES_FIXED_STRAIGHT_VERSION_CURRENT);
+
+            output.WriteInt32(length);
+            foreach (BytesRef v in values)
+            {
+                if (v != null)
+                {
+                    output.WriteBytes(v.Bytes, v.Offset, v.Length);
+                }
+            }
+        }
+
+        // NOTE: 4.0 file format docs are crazy/wrong here...
+        private void AddVarStraightBytesField(FieldInfo field, IndexOutput data, IndexOutput index, IEnumerable<BytesRef> values)
+        {
+            field.PutAttribute(LegacyKey, LegacyDocValuesType.BYTES_VAR_STRAIGHT.Name);
+
+            CodecUtil.WriteHeader(data, Lucene40DocValuesFormat.BYTES_VAR_STRAIGHT_CODEC_NAME_DAT, Lucene40DocValuesFormat.BYTES_VAR_STRAIGHT_VERSION_CURRENT);
+
+            CodecUtil.WriteHeader(index, Lucene40DocValuesFormat.BYTES_VAR_STRAIGHT_CODEC_NAME_IDX, Lucene40DocValuesFormat.BYTES_VAR_STRAIGHT_VERSION_CURRENT);
+
+            /* values */
+
+            long startPos = data.FilePointer;
+
+            foreach (BytesRef v in values)
+            {
+                if (v != null)
+                {
+                    data.WriteBytes(v.Bytes, v.Offset, v.Length);
+                }
+            }
+
+            /* addresses */
+
+            long maxAddress = data.FilePointer - startPos;
+            index.WriteVInt64(maxAddress);
+
+            int maxDoc = State.SegmentInfo.DocCount;
+            Debug.Assert(maxDoc != int.MaxValue); // unsupported by the 4.0 impl
+
+            PackedInt32s.Writer w = PackedInt32s.GetWriter(index, maxDoc + 1, PackedInt32s.BitsRequired(maxAddress), PackedInt32s.DEFAULT);
+            long currentPosition = 0;
+            foreach (BytesRef v in values)
+            {
+                w.Add(currentPosition);
+                if (v != null)
+                {
+                    currentPosition += v.Length;
+                }
+            }
+            // write sentinel
+            Debug.Assert(currentPosition == maxAddress);
+            w.Add(currentPosition);
+            w.Finish();
+        }
+
+        private void AddFixedDerefBytesField(FieldInfo field, IndexOutput data, IndexOutput index, IEnumerable<BytesRef> values, int length)
+        {
+            field.PutAttribute(LegacyKey, LegacyDocValuesType.BYTES_FIXED_DEREF.Name);
+
+            CodecUtil.WriteHeader(data, Lucene40DocValuesFormat.BYTES_FIXED_DEREF_CODEC_NAME_DAT, Lucene40DocValuesFormat.BYTES_FIXED_DEREF_VERSION_CURRENT);
+
+            CodecUtil.WriteHeader(index, Lucene40DocValuesFormat.BYTES_FIXED_DEREF_CODEC_NAME_IDX, Lucene40DocValuesFormat.BYTES_FIXED_DEREF_VERSION_CURRENT);
+
+            // deduplicate
+            SortedSet<BytesRef> dictionary = new SortedSet<BytesRef>();
+            foreach (BytesRef v in values)
+            {
+                dictionary.Add(v == null ? new BytesRef() : BytesRef.DeepCopyOf(v));
+            }
+
+            /* values */
+            data.WriteInt32(length);
+            foreach (BytesRef v in dictionary)
+            {
+                data.WriteBytes(v.Bytes, v.Offset, v.Length);
+            }
+
+            /* ordinals */
+            int valueCount = dictionary.Count;
+            Debug.Assert(valueCount > 0);
+            index.WriteInt32(valueCount);
+            int maxDoc = State.SegmentInfo.DocCount;
+            PackedInt32s.Writer w = PackedInt32s.GetWriter(index, maxDoc, PackedInt32s.BitsRequired(valueCount - 1), PackedInt32s.DEFAULT);
+
+            BytesRef brefDummy;
+            foreach (BytesRef v in values)
+            {
+                brefDummy = v;
+
+                if (v == null)
+                {
+                    brefDummy = new BytesRef();
+                }
+                //int ord = dictionary.HeadSet(brefDummy).Size();
+                int ord = dictionary.Count(@ref => @ref.CompareTo(brefDummy) < 0);
+                w.Add(ord);
+            }
+            w.Finish();
+        }
+
+        private void AddVarDerefBytesField(FieldInfo field, IndexOutput data, IndexOutput index, IEnumerable<BytesRef> values)
+        {
+            field.PutAttribute(LegacyKey, LegacyDocValuesType.BYTES_VAR_DEREF.Name);
+
+            CodecUtil.WriteHeader(data, Lucene40DocValuesFormat.BYTES_VAR_DEREF_CODEC_NAME_DAT, Lucene40DocValuesFormat.BYTES_VAR_DEREF_VERSION_CURRENT);
+
+            CodecUtil.WriteHeader(index, Lucene40DocValuesFormat.BYTES_VAR_DEREF_CODEC_NAME_IDX, Lucene40DocValuesFormat.BYTES_VAR_DEREF_VERSION_CURRENT);
+
+            // deduplicate
+            SortedSet<BytesRef> dictionary = new SortedSet<BytesRef>();
+            foreach (BytesRef v in values)
+            {
+                dictionary.Add(v == null ? new BytesRef() : BytesRef.DeepCopyOf(v));
+            }
+
+            /* values */
+            long startPosition = data.FilePointer;
+            long currentAddress = 0;
+            Dictionary<BytesRef, long> valueToAddress = new Dictionary<BytesRef, long>();
+            foreach (BytesRef v in dictionary)
+            {
+                currentAddress = data.FilePointer - startPosition;
+                valueToAddress[v] = currentAddress;
+                WriteVShort(data, v.Length);
+                data.WriteBytes(v.Bytes, v.Offset, v.Length);
+            }
+
+            /* ordinals */
+            long totalBytes = data.FilePointer - startPosition;
+            index.WriteInt64(totalBytes);
+            int maxDoc = State.SegmentInfo.DocCount;
+            PackedInt32s.Writer w = PackedInt32s.GetWriter(index, maxDoc, PackedInt32s.BitsRequired(currentAddress), PackedInt32s.DEFAULT);
+
+            foreach (BytesRef v in values)
+            {
+                w.Add(valueToAddress[v == null ? new BytesRef() : v]);
+            }
+            w.Finish();
+        }
+
+        // the little vint encoding used for var-deref
+        private static void WriteVShort(IndexOutput o, int i)
+        {
+            Debug.Assert(i >= 0 && i <= short.MaxValue);
+            if (i < 128)
+            {
+                o.WriteByte((byte)(sbyte)i);
+            }
+            else
+            {
+                o.WriteByte((byte)unchecked((sbyte)(0x80 | (i >> 8))));
+                o.WriteByte((byte)unchecked((sbyte)(i & 0xff)));
+            }
+        }
+
+        public override void AddSortedField(FieldInfo field, IEnumerable<BytesRef> values, IEnumerable<long?> docToOrd)
+        {
+            // examine the values to determine best type to use
+            int minLength = int.MaxValue;
+            int maxLength = int.MinValue;
+            foreach (BytesRef b in values)
+            {
+                minLength = Math.Min(minLength, b.Length);
+                maxLength = Math.Max(maxLength, b.Length);
+            }
+
+            // but dont use fixed if there are missing values (we are simulating how lucene40 wrote dv...)
+            bool anyMissing = false;
+            foreach (long n in docToOrd)
+            {
+                if ((long)n == -1)
+                {
+                    anyMissing = true;
+                    break;
+                }
+            }
+
+            bool success = false;
+            IndexOutput data = null;
+            IndexOutput index = null;
+            string dataName = IndexFileNames.SegmentFileName(State.SegmentInfo.Name + "_" + Convert.ToString(field.Number), SegmentSuffix, "dat");
+            string indexName = IndexFileNames.SegmentFileName(State.SegmentInfo.Name + "_" + Convert.ToString(field.Number), SegmentSuffix, "idx");
+
+            try
+            {
+                data = Dir.CreateOutput(dataName, State.Context);
+                index = Dir.CreateOutput(indexName, State.Context);
+                if (minLength == maxLength && !anyMissing)
+                {
+                    // fixed byte[]
+                    AddFixedSortedBytesField(field, data, index, values, docToOrd, minLength);
+                }
+                else
+                {
+                    // var byte[]
+                    // three cases for simulating the old writer:
+                    // 1. no missing
+                    // 2. missing (and empty string in use): remap ord=-1 -> ord=0
+                    // 3. missing (and empty string not in use): remap all ords +1, insert empty string into values
+                    if (!anyMissing)
+                    {
+                        AddVarSortedBytesField(field, data, index, values, docToOrd);
+                    }
+                    else if (minLength == 0)
+                    {
+                        AddVarSortedBytesField(field, data, index, values, MissingOrdRemapper.MapMissingToOrd0(docToOrd));
+                    }
+                    else
+                    {
+                        AddVarSortedBytesField(field, data, index, MissingOrdRemapper.InsertEmptyValue(values), MissingOrdRemapper.MapAllOrds(docToOrd));
+                    }
+                }
+                success = true;
+            }
+            finally
+            {
+                if (success)
+                {
+                    IOUtils.Close(data, index);
+                }
+                else
+                {
+                    IOUtils.CloseWhileHandlingException(data, index);
+                }
+            }
+        }
+
+        private void AddFixedSortedBytesField(FieldInfo field, IndexOutput data, IndexOutput index, IEnumerable<BytesRef> values, IEnumerable<long?> docToOrd, int length)
+        {
+            field.PutAttribute(LegacyKey, LegacyDocValuesType.BYTES_FIXED_SORTED.Name);
+
+            CodecUtil.WriteHeader(data, Lucene40DocValuesFormat.BYTES_FIXED_SORTED_CODEC_NAME_DAT, Lucene40DocValuesFormat.BYTES_FIXED_SORTED_VERSION_CURRENT);
+
+            CodecUtil.WriteHeader(index, Lucene40DocValuesFormat.BYTES_FIXED_SORTED_CODEC_NAME_IDX, Lucene40DocValuesFormat.BYTES_FIXED_SORTED_VERSION_CURRENT);
+
+            /* values */
+
+            data.WriteInt32(length);
+            int valueCount = 0;
+            foreach (BytesRef v in values)
+            {
+                data.WriteBytes(v.Bytes, v.Offset, v.Length);
+                valueCount++;
+            }
+
+            /* ordinals */
+
+            index.WriteInt32(valueCount);
+            int maxDoc = State.SegmentInfo.DocCount;
+            Debug.Assert(valueCount > 0);
+            PackedInt32s.Writer w = PackedInt32s.GetWriter(index, maxDoc, PackedInt32s.BitsRequired(valueCount - 1), PackedInt32s.DEFAULT);
+            foreach (long n in docToOrd)
+            {
+                w.Add((long)n);
+            }
+            w.Finish();
+        }
+
+        private void AddVarSortedBytesField(FieldInfo field, IndexOutput data, IndexOutput index, IEnumerable<BytesRef> values, IEnumerable<long?> docToOrd)
+        {
+            field.PutAttribute(LegacyKey, LegacyDocValuesType.BYTES_VAR_SORTED.Name);
+
+            CodecUtil.WriteHeader(data, Lucene40DocValuesFormat.BYTES_VAR_SORTED_CODEC_NAME_DAT, Lucene40DocValuesFormat.BYTES_VAR_SORTED_VERSION_CURRENT);
+
+            CodecUtil.WriteHeader(index, Lucene40DocValuesFormat.BYTES_VAR_SORTED_CODEC_NAME_IDX, Lucene40DocValuesFormat.BYTES_VAR_SORTED_VERSION_CURRENT);
+
+            /* values */
+
+            long startPos = data.FilePointer;
+
+            int valueCount = 0;
+            foreach (BytesRef v in values)
+            {
+                data.WriteBytes(v.Bytes, v.Offset, v.Length);
+                valueCount++;
+            }
+
+            /* addresses */
+
+            long maxAddress = data.FilePointer - startPos;
+            index.WriteInt64(maxAddress);
+
+            Debug.Assert(valueCount != int.MaxValue); // unsupported by the 4.0 impl
+
+            PackedInt32s.Writer w = PackedInt32s.GetWriter(index, valueCount + 1, PackedInt32s.BitsRequired(maxAddress), PackedInt32s.DEFAULT);
+            long currentPosition = 0;
+            foreach (BytesRef v in values)
+            {
+                w.Add(currentPosition);
+                currentPosition += v.Length;
+            }
+            // write sentinel
+            Debug.Assert(currentPosition == maxAddress);
+            w.Add(currentPosition);
+            w.Finish();
+
+            /* ordinals */
+
+            int maxDoc = State.SegmentInfo.DocCount;
+            Debug.Assert(valueCount > 0);
+            PackedInt32s.Writer ords = PackedInt32s.GetWriter(index, maxDoc, PackedInt32s.BitsRequired(valueCount - 1), PackedInt32s.DEFAULT);
+            foreach (long n in docToOrd)
+            {
+                ords.Add((long)n);
+            }
+            ords.Finish();
+        }
+
+        public override void AddSortedSetField(FieldInfo field, IEnumerable<BytesRef> values, IEnumerable<long?> docToOrdCount, IEnumerable<long?> ords)
+        {
+            throw new System.NotSupportedException("Lucene 4.0 does not support SortedSet docvalues");
+        }
+
+        protected override void Dispose(bool disposing)
+        {
+            Dir.Dispose();
+        }
+    }
+#pragma warning restore 612, 618
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/c0e9469c/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40FieldInfosWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40FieldInfosWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40FieldInfosWriter.cs
new file mode 100644
index 0000000..688e365
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40FieldInfosWriter.cs
@@ -0,0 +1,134 @@
+using System;
+using System.Diagnostics;
+
+namespace Lucene.Net.Codecs.Lucene40
+{
+    using Directory = Lucene.Net.Store.Directory;
+    using DocValuesType = Lucene.Net.Index.DocValuesType;
+    using FieldInfo = Lucene.Net.Index.FieldInfo;
+    using FieldInfos = Lucene.Net.Index.FieldInfos;
+    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
+    using IndexOutput = Lucene.Net.Store.IndexOutput;
+    using IOContext = Lucene.Net.Store.IOContext;
+    using IOUtils = Lucene.Net.Util.IOUtils;
+    using IndexOptions = Lucene.Net.Index.IndexOptions;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using LegacyDocValuesType = Lucene.Net.Codecs.Lucene40.Lucene40FieldInfosReader.LegacyDocValuesType;
+
+    /// <summary>
+    /// Lucene 4.0 FieldInfos writer.
+    /// </summary>
+    /// <seealso> cref= Lucene40FieldInfosFormat
+    /// @lucene.experimental </seealso>
+    [Obsolete]
+    public class Lucene40FieldInfosWriter : FieldInfosWriter
+    {
+        /// <summary>
+        /// Sole constructor. </summary>
+        public Lucene40FieldInfosWriter()
+        {
+        }
+
+        public override void Write(Directory directory, string segmentName, string segmentSuffix, FieldInfos infos, IOContext context)
+        {
+            string fileName = IndexFileNames.SegmentFileName(segmentName, "", Lucene40FieldInfosFormat.FIELD_INFOS_EXTENSION);
+            IndexOutput output = directory.CreateOutput(fileName, context);
+            bool success = false;
+            try
+            {
+                CodecUtil.WriteHeader(output, Lucene40FieldInfosFormat.CODEC_NAME, Lucene40FieldInfosFormat.FORMAT_CURRENT);
+                output.WriteVInt32(infos.Count);
+                foreach (FieldInfo fi in infos)
+                {
+                    IndexOptions? indexOptions = fi.IndexOptions;
+                    sbyte bits = 0x0;
+                    if (fi.HasVectors)
+                    {
+                        bits |= Lucene40FieldInfosFormat.STORE_TERMVECTOR;
+                    }
+                    if (fi.OmitsNorms)
+                    {
+                        bits |= Lucene40FieldInfosFormat.OMIT_NORMS;
+                    }
+                    if (fi.HasPayloads)
+                    {
+                        bits |= Lucene40FieldInfosFormat.STORE_PAYLOADS;
+                    }
+                    if (fi.IsIndexed)
+                    {
+                        bits |= Lucene40FieldInfosFormat.IS_INDEXED;
+                        Debug.Assert(indexOptions >= IndexOptions.DOCS_AND_FREQS_AND_POSITIONS || !fi.HasPayloads);
+                        if (indexOptions == IndexOptions.DOCS_ONLY)
+                        {
+                            bits |= Lucene40FieldInfosFormat.OMIT_TERM_FREQ_AND_POSITIONS;
+                        }
+                        else if (indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS)
+                        {
+                            bits |= Lucene40FieldInfosFormat.STORE_OFFSETS_IN_POSTINGS;
+                        }
+                        else if (indexOptions == IndexOptions.DOCS_AND_FREQS)
+                        {
+                            bits |= Lucene40FieldInfosFormat.OMIT_POSITIONS;
+                        }
+                    }
+                    output.WriteString(fi.Name);
+                    output.WriteVInt32(fi.Number);
+                    output.WriteByte((byte)bits);
+
+                    // pack the DV types in one byte
+                    sbyte dv = DocValuesByte(fi.DocValuesType, fi.GetAttribute(Lucene40FieldInfosReader.LEGACY_DV_TYPE_KEY));
+                    sbyte nrm = DocValuesByte(fi.NormType, fi.GetAttribute(Lucene40FieldInfosReader.LEGACY_NORM_TYPE_KEY));
+                    Debug.Assert((dv & (~0xF)) == 0 && (nrm & (~0x0F)) == 0);
+                    var val = unchecked((sbyte)(0xff & ((nrm << 4) | dv)));
+                    output.WriteByte((byte)val);
+                    output.WriteStringStringMap(fi.Attributes);
+                }
+                success = true;
+            }
+            finally
+            {
+                if (success)
+                {
+                    output.Dispose();
+                }
+                else
+                {
+                    IOUtils.CloseWhileHandlingException(output);
+                }
+            }
+        }
+
+        /// <summary>
+        /// 4.0-style docvalues byte </summary>
+        public virtual sbyte DocValuesByte(DocValuesType? type, string legacyTypeAtt)
+        {
+            if (type == null)
+            {
+                Debug.Assert(legacyTypeAtt == null);
+                return 0;
+            }
+            else
+            {
+                Debug.Assert(legacyTypeAtt != null);
+                return (sbyte)LegacyDocValuesType.ordinalLookup[legacyTypeAtt];
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/c0e9469c/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40PostingsWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40PostingsWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40PostingsWriter.cs
new file mode 100644
index 0000000..11e2dc0
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40PostingsWriter.cs
@@ -0,0 +1,381 @@
+using System.Diagnostics;
+
+namespace Lucene.Net.Codecs.Lucene40
+{
+    using BytesRef = Lucene.Net.Util.BytesRef;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    /// <summary>
+    /// Consumes doc & freq, writing them using the current
+    ///  index file format
+    /// </summary>
+
+    using CorruptIndexException = Lucene.Net.Index.CorruptIndexException;
+    using DataOutput = Lucene.Net.Store.DataOutput;
+    using FieldInfo = Lucene.Net.Index.FieldInfo;
+    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
+    using IndexOptions = Lucene.Net.Index.IndexOptions;
+    using IndexOutput = Lucene.Net.Store.IndexOutput;
+    using IOUtils = Lucene.Net.Util.IOUtils;
+    using SegmentWriteState = Lucene.Net.Index.SegmentWriteState;
+
+    /// <summary>
+    /// Concrete class that writes the 4.0 frq/prx postings format.
+    /// </summary>
+    /// <seealso> cref= Lucene40PostingsFormat
+    /// @lucene.experimental  </seealso>
+#pragma warning disable 612, 618
+    public sealed class Lucene40PostingsWriter : PostingsWriterBase
+    {
+        internal readonly IndexOutput FreqOut;
+        internal readonly IndexOutput ProxOut;
+        internal readonly Lucene40SkipListWriter SkipListWriter;
+
+        /// <summary>
+        /// Expert: The fraction of TermDocs entries stored in skip tables,
+        /// used to accelerate <seealso cref="DocsEnum#advance(int)"/>.  Larger values result in
+        /// smaller indexes, greater acceleration, but fewer accelerable cases, while
+        /// smaller values result in bigger indexes, less acceleration and more
+        /// accelerable cases. More detailed experiments would be useful here.
+        /// </summary>
+        internal const int DEFAULT_SKIP_INTERVAL = 16;
+
+        internal readonly int SkipInterval;
+
+        /// <summary>
+        /// Expert: minimum docFreq to write any skip data at all
+        /// </summary>
+        internal readonly int SkipMinimum;
+
+        /// <summary>
+        /// Expert: The maximum number of skip levels. Smaller values result in
+        /// slightly smaller indexes, but slower skipping in big posting lists.
+        /// </summary>
+        internal readonly int MaxSkipLevels = 10;
+
+        internal readonly int TotalNumDocs;
+
+        internal IndexOptions? IndexOptions;
+        internal bool StorePayloads;
+        internal bool StoreOffsets;
+
+        // Starts a new term
+        internal long FreqStart;
+
+        internal long ProxStart;
+        internal FieldInfo FieldInfo;
+        internal int LastPayloadLength;
+        internal int LastOffsetLength;
+        internal int LastPosition;
+        internal int LastOffset;
+
+        internal static readonly StandardTermState EmptyState = new StandardTermState();
+        internal StandardTermState LastState;
+
+        // private String segment;
+
+        /// <summary>
+        /// Creates a <seealso cref="Lucene40PostingsWriter"/>, with the
+        ///  <seealso cref="#DEFAULT_SKIP_INTERVAL"/>.
+        /// </summary>
+        public Lucene40PostingsWriter(SegmentWriteState state)
+            : this(state, DEFAULT_SKIP_INTERVAL)
+        {
+        }
+
+        /// <summary>
+        /// Creates a <seealso cref="Lucene40PostingsWriter"/>, with the
+        ///  specified {@code skipInterval}.
+        /// </summary>
+        public Lucene40PostingsWriter(SegmentWriteState state, int skipInterval)
+            : base()
+        {
+            this.SkipInterval = skipInterval;
+            this.SkipMinimum = skipInterval; // set to the same for now
+            // this.segment = state.segmentName;
+            string fileName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, Lucene40PostingsFormat.FREQ_EXTENSION);
+            FreqOut = state.Directory.CreateOutput(fileName, state.Context);
+            bool success = false;
+            IndexOutput proxOut = null;
+            try
+            {
+                CodecUtil.WriteHeader(FreqOut, Lucene40PostingsReader.FRQ_CODEC, Lucene40PostingsReader.VERSION_CURRENT);
+                // TODO: this is a best effort, if one of these fields has no postings
+                // then we make an empty prx file, same as if we are wrapped in
+                // per-field postingsformat. maybe... we shouldn't
+                // bother w/ this opto?  just create empty prx file...?
+                if (state.FieldInfos.HasProx)
+                {
+                    // At least one field does not omit TF, so create the
+                    // prox file
+                    fileName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, Lucene40PostingsFormat.PROX_EXTENSION);
+                    proxOut = state.Directory.CreateOutput(fileName, state.Context);
+                    CodecUtil.WriteHeader(proxOut, Lucene40PostingsReader.PRX_CODEC, Lucene40PostingsReader.VERSION_CURRENT);
+                }
+                else
+                {
+                    // Every field omits TF so we will write no prox file
+                    proxOut = null;
+                }
+                this.ProxOut = proxOut;
+                success = true;
+            }
+            finally
+            {
+                if (!success)
+                {
+                    IOUtils.CloseWhileHandlingException(FreqOut, proxOut);
+                }
+            }
+
+            TotalNumDocs = state.SegmentInfo.DocCount;
+
+            SkipListWriter = new Lucene40SkipListWriter(skipInterval, MaxSkipLevels, TotalNumDocs, FreqOut, proxOut);
+        }
+
+        public override void Init(IndexOutput termsOut)
+        {
+            CodecUtil.WriteHeader(termsOut, Lucene40PostingsReader.TERMS_CODEC, Lucene40PostingsReader.VERSION_CURRENT);
+            termsOut.WriteInt32(SkipInterval); // write skipInterval
+            termsOut.WriteInt32(MaxSkipLevels); // write maxSkipLevels
+            termsOut.WriteInt32(SkipMinimum); // write skipMinimum
+        }
+
+        public override BlockTermState NewTermState()
+        {
+            return new StandardTermState();
+        }
+
+        public override void StartTerm()
+        {
+            FreqStart = FreqOut.FilePointer;
+            //if (DEBUG) System.out.println("SPW: startTerm freqOut.fp=" + freqStart);
+            if (ProxOut != null)
+            {
+                ProxStart = ProxOut.FilePointer;
+            }
+            // force first payload to write its length
+            LastPayloadLength = -1;
+            // force first offset to write its length
+            LastOffsetLength = -1;
+            SkipListWriter.ResetSkip();
+        }
+
+        // Currently, this instance is re-used across fields, so
+        // our parent calls setField whenever the field changes
+        public override int SetField(FieldInfo fieldInfo)
+        {
+            //System.out.println("SPW: setField");
+            /*
+            if (BlockTreeTermsWriter.DEBUG && fieldInfo.Name.equals("id")) {
+              DEBUG = true;
+            } else {
+              DEBUG = false;
+            }
+            */
+            this.FieldInfo = fieldInfo;
+            IndexOptions = fieldInfo.IndexOptions;
+
+            StoreOffsets = IndexOptions >= Index.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS;
+            StorePayloads = fieldInfo.HasPayloads;
+            LastState = EmptyState;
+            //System.out.println("  set init blockFreqStart=" + freqStart);
+            //System.out.println("  set init blockProxStart=" + proxStart);
+            return 0;
+        }
+
+        internal int LastDocID;
+        internal int Df;
+
+        public override void StartDoc(int docID, int termDocFreq)
+        {
+            // if (DEBUG) System.out.println("SPW:   startDoc seg=" + segment + " docID=" + docID + " tf=" + termDocFreq + " freqOut.fp=" + freqOut.getFilePointer());
+
+            int delta = docID - LastDocID;
+
+            if (docID < 0 || (Df > 0 && delta <= 0))
+            {
+                throw new CorruptIndexException("docs out of order (" + docID + " <= " + LastDocID + " ) (freqOut: " + FreqOut + ")");
+            }
+
+            if ((++Df % SkipInterval) == 0)
+            {
+                SkipListWriter.SetSkipData(LastDocID, StorePayloads, LastPayloadLength, StoreOffsets, LastOffsetLength);
+                SkipListWriter.BufferSkip(Df);
+            }
+
+            Debug.Assert(docID < TotalNumDocs, "docID=" + docID + " totalNumDocs=" + TotalNumDocs);
+
+            LastDocID = docID;
+            if (IndexOptions == Index.IndexOptions.DOCS_ONLY)
+            {
+                FreqOut.WriteVInt32(delta);
+            }
+            else if (1 == termDocFreq)
+            {
+                FreqOut.WriteVInt32((delta << 1) | 1);
+            }
+            else
+            {
+                FreqOut.WriteVInt32(delta << 1);
+                FreqOut.WriteVInt32(termDocFreq);
+            }
+
+            LastPosition = 0;
+            LastOffset = 0;
+        }
+
+        /// <summary>
+        /// Add a new position & payload </summary>
+        public override void AddPosition(int position, BytesRef payload, int startOffset, int endOffset)
+        {
+            //if (DEBUG) System.out.println("SPW:     addPos pos=" + position + " payload=" + (payload == null ? "null" : (payload.Length + " bytes")) + " proxFP=" + proxOut.getFilePointer());
+            Debug.Assert(IndexOptions >= Index.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, "invalid indexOptions: " + IndexOptions);
+            Debug.Assert(ProxOut != null);
+
+            int delta = position - LastPosition;
+
+            Debug.Assert(delta >= 0, "position=" + position + " lastPosition=" + LastPosition); // not quite right (if pos=0 is repeated twice we don't catch it)
+
+            LastPosition = position;
+
+            int payloadLength = 0;
+
+            if (StorePayloads)
+            {
+                payloadLength = payload == null ? 0 : payload.Length;
+
+                if (payloadLength != LastPayloadLength)
+                {
+                    LastPayloadLength = payloadLength;
+                    ProxOut.WriteVInt32((delta << 1) | 1);
+                    ProxOut.WriteVInt32(payloadLength);
+                }
+                else
+                {
+                    ProxOut.WriteVInt32(delta << 1);
+                }
+            }
+            else
+            {
+                ProxOut.WriteVInt32(delta);
+            }
+
+            if (StoreOffsets)
+            {
+                // don't use startOffset - lastEndOffset, because this creates lots of negative vints for synonyms,
+                // and the numbers aren't that much smaller anyways.
+                int offsetDelta = startOffset - LastOffset;
+                int offsetLength = endOffset - startOffset;
+                Debug.Assert(offsetDelta >= 0 && offsetLength >= 0, "startOffset=" + startOffset + ",lastOffset=" + LastOffset + ",endOffset=" + endOffset);
+                if (offsetLength != LastOffsetLength)
+                {
+                    ProxOut.WriteVInt32(offsetDelta << 1 | 1);
+                    ProxOut.WriteVInt32(offsetLength);
+                }
+                else
+                {
+                    ProxOut.WriteVInt32(offsetDelta << 1);
+                }
+                LastOffset = startOffset;
+                LastOffsetLength = offsetLength;
+            }
+
+            if (payloadLength > 0)
+            {
+                ProxOut.WriteBytes(payload.Bytes, payload.Offset, payloadLength);
+            }
+        }
+
+        public override void FinishDoc()
+        {
+        }
+
+        internal class StandardTermState : BlockTermState
+        {
+            public long FreqStart;
+            public long ProxStart;
+            public long SkipOffset;
+        }
+
+        /// <summary>
+        /// Called when we are done adding docs to this term </summary>
+        public override void FinishTerm(BlockTermState _state)
+        {
+            StandardTermState state = (StandardTermState)_state;
+            // if (DEBUG) System.out.println("SPW: finishTerm seg=" + segment + " freqStart=" + freqStart);
+            Debug.Assert(state.DocFreq > 0);
+
+            // TODO: wasteful we are counting this (counting # docs
+            // for this term) in two places?
+            Debug.Assert(state.DocFreq == Df);
+            state.FreqStart = FreqStart;
+            state.ProxStart = ProxStart;
+            if (Df >= SkipMinimum)
+            {
+                state.SkipOffset = SkipListWriter.WriteSkip(FreqOut) - FreqStart;
+            }
+            else
+            {
+                state.SkipOffset = -1;
+            }
+            LastDocID = 0;
+            Df = 0;
+        }
+
+        public override void EncodeTerm(long[] empty, DataOutput @out, FieldInfo fieldInfo, BlockTermState _state, bool absolute)
+        {
+            StandardTermState state = (StandardTermState)_state;
+            if (absolute)
+            {
+                LastState = EmptyState;
+            }
+            @out.WriteVInt64(state.FreqStart - LastState.FreqStart);
+            if (state.SkipOffset != -1)
+            {
+                Debug.Assert(state.SkipOffset > 0);
+                @out.WriteVInt64(state.SkipOffset);
+            }
+            if (IndexOptions >= Index.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS)
+            {
+                @out.WriteVInt64(state.ProxStart - LastState.ProxStart);
+            }
+            LastState = state;
+        }
+
+        protected override void Dispose(bool disposing)
+        {
+            if (disposing)
+            {
+                try
+                {
+                    FreqOut.Dispose();
+                }
+                finally
+                {
+                    if (ProxOut != null)
+                    {
+                        ProxOut.Dispose();
+                    }
+                }
+            }
+        }
+    }
+#pragma warning restore 612, 618
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/c0e9469c/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWCodec.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWCodec.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWCodec.cs
new file mode 100644
index 0000000..79fbb42
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWCodec.cs
@@ -0,0 +1,100 @@
+namespace Lucene.Net.Codecs.Lucene40
+{
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Read-write version of Lucene40Codec for testing </summary>
+#pragma warning disable 612, 618
+    public sealed class Lucene40RWCodec : Lucene40Codec
+    {
+        private readonly FieldInfosFormat fieldInfos;
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Creates the codec with OldFormatImpersonationIsActive = true.
+        /// </summary>
+        /// <remarks>
+        /// Added so that SPIClassIterator can locate this Codec.  The iterator
+        /// only recognises classes that have empty constructors.
+        /// </remarks>
+        public Lucene40RWCodec()
+            : this(true)
+        { }
+
+        /// <param name="oldFormatImpersonationIsActive">
+        /// LUCENENET specific
+        /// Added to remove dependency on then-static <see cref="LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/> 
+        /// </param>
+        public Lucene40RWCodec(bool oldFormatImpersonationIsActive) : base()
+        {
+            fieldInfos = new Lucene40FieldInfosFormatAnonymousInnerClassHelper(oldFormatImpersonationIsActive);
+            DocValues = new Lucene40RWDocValuesFormat(oldFormatImpersonationIsActive);
+            Norms = new Lucene40RWNormsFormat(oldFormatImpersonationIsActive);
+        }
+
+        private class Lucene40FieldInfosFormatAnonymousInnerClassHelper : Lucene40FieldInfosFormat
+        {
+            private readonly bool _oldFormatImpersonationIsActive;
+
+            /// <param name="oldFormatImpersonationIsActive">
+            /// LUCENENET specific
+            /// Added to remove dependency on then-static <see cref="LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/> 
+            /// </param>
+            public Lucene40FieldInfosFormatAnonymousInnerClassHelper(bool oldFormatImpersonationIsActive) : base()
+            {
+                _oldFormatImpersonationIsActive = oldFormatImpersonationIsActive;
+            }
+
+            public override FieldInfosWriter FieldInfosWriter
+            {
+                get
+                {
+                    if (!_oldFormatImpersonationIsActive)
+                    {
+                        return base.FieldInfosWriter;
+                    }
+                    else
+                    {
+                        return new Lucene40FieldInfosWriter();
+                    }
+                }
+            }
+        }
+
+        private readonly DocValuesFormat DocValues;
+        private readonly NormsFormat Norms;
+
+        public override FieldInfosFormat FieldInfosFormat
+        {
+            get { return fieldInfos; }
+        }
+
+        public override DocValuesFormat DocValuesFormat
+        {
+            get { return DocValues; }
+        }
+
+        public override NormsFormat NormsFormat
+        {
+            get { return Norms; }
+        }
+    }
+#pragma warning restore 612, 618
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/c0e9469c/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWDocValuesFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWDocValuesFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWDocValuesFormat.cs
new file mode 100644
index 0000000..2281475
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWDocValuesFormat.cs
@@ -0,0 +1,66 @@
+namespace Lucene.Net.Codecs.Lucene40
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using SegmentWriteState = Lucene.Net.Index.SegmentWriteState;
+
+    /// <summary>
+    /// Read-write version of <seealso cref="Lucene40DocValuesFormat"/> for testing </summary>
+#pragma warning disable 612, 618
+    public class Lucene40RWDocValuesFormat : Lucene40DocValuesFormat
+    {
+        private readonly bool _oldFormatImpersonationIsActive;
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Creates the codec with OldFormatImpersonationIsActive = true.
+        /// </summary>
+        /// <remarks>
+        /// Added so that SPIClassIterator can locate this Codec.  The iterator
+        /// only recognises classes that have empty constructors.
+        /// </remarks>
+        public Lucene40RWDocValuesFormat()
+            : this(true)
+        { }
+
+        /// <param name="oldFormatImpersonationIsActive">
+        /// LUCENENET specific
+        /// Added to remove dependency on then-static <see cref="LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/> 
+        /// </param>
+        public Lucene40RWDocValuesFormat(bool oldFormatImpersonationIsActive) : base()
+        {
+            _oldFormatImpersonationIsActive = oldFormatImpersonationIsActive;
+        }
+
+        public override DocValuesConsumer FieldsConsumer(SegmentWriteState state)
+        {
+            if (!_oldFormatImpersonationIsActive)
+            {
+                return base.FieldsConsumer(state);
+            }
+            else
+            {
+                string filename = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, "dv", IndexFileNames.COMPOUND_FILE_EXTENSION);
+                return new Lucene40DocValuesWriter(state, filename, Lucene40FieldInfosReader.LEGACY_DV_TYPE_KEY);
+            }
+        }
+    }
+#pragma warning restore 612, 618
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/c0e9469c/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWNormsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWNormsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWNormsFormat.cs
new file mode 100644
index 0000000..0830c86
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWNormsFormat.cs
@@ -0,0 +1,66 @@
+namespace Lucene.Net.Codecs.Lucene40
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using SegmentWriteState = Lucene.Net.Index.SegmentWriteState;
+
+    /// <summary>
+    /// Read-write version of <seealso cref="Lucene40NormsFormat"/> for testing </summary>
+#pragma warning disable 612, 618
+    public class Lucene40RWNormsFormat : Lucene40NormsFormat
+    {
+        private readonly bool _oldFormatImpersonationIsActive;
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Creates the codec with OldFormatImpersonationIsActive = true.
+        /// </summary>
+        /// <remarks>
+        /// Added so that SPIClassIterator can locate this Codec.  The iterator
+        /// only recognises classes that have empty constructors.
+        /// </remarks>
+        public Lucene40RWNormsFormat()
+            : this(true)
+        { }
+
+        /// <param name="oldFormatImpersonationIsActive">
+        /// LUCENENET specific
+        /// Added to remove dependency on then-static <see cref="LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/> 
+        /// </param>
+        public Lucene40RWNormsFormat(bool oldFormatImpersonationIsActive) : base()
+        {
+            _oldFormatImpersonationIsActive = oldFormatImpersonationIsActive;
+        }
+
+        public override DocValuesConsumer NormsConsumer(SegmentWriteState state)
+        {
+            if (!_oldFormatImpersonationIsActive)
+            {
+                return base.NormsConsumer(state);
+            }
+            else
+            {
+                string filename = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, "nrm", IndexFileNames.COMPOUND_FILE_EXTENSION);
+                return new Lucene40DocValuesWriter(state, filename, Lucene40FieldInfosReader.LEGACY_NORM_TYPE_KEY);
+            }
+        }
+    }
+#pragma warning restore 612, 618
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/c0e9469c/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWPostingsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWPostingsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWPostingsFormat.cs
new file mode 100644
index 0000000..7a2c9cf
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40RWPostingsFormat.cs
@@ -0,0 +1,84 @@
+namespace Lucene.Net.Codecs.Lucene40
+{
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using SegmentWriteState = Lucene.Net.Index.SegmentWriteState;
+
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Read-write version of <seealso cref="Lucene40PostingsFormat"/> for testing.
+    /// </summary>
+#pragma warning disable 612, 618
+    public class Lucene40RWPostingsFormat : Lucene40PostingsFormat
+    {
+        private readonly bool _oldFormatImpersonationIsActive;
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Creates the codec with OldFormatImpersonationIsActive = true.
+        /// </summary>
+        /// <remarks>
+        /// Added so that SPIClassIterator can locate this Codec.  The iterator
+        /// only recognises classes that have empty constructors.
+        /// </remarks>
+        public Lucene40RWPostingsFormat()
+            : this(true)
+        { }
+
+        /// <param name="oldFormatImpersonationIsActive">
+        /// LUCENENET specific
+        /// Added to remove dependency on then-static <see cref="LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/> 
+        /// </param>
+        public Lucene40RWPostingsFormat(bool oldFormatImpersonationIsActive) : base()
+        {
+            _oldFormatImpersonationIsActive = oldFormatImpersonationIsActive;
+        }
+
+        public override FieldsConsumer FieldsConsumer(SegmentWriteState state)
+        {
+            if (!_oldFormatImpersonationIsActive)
+            {
+                return base.FieldsConsumer(state);
+            }
+            else
+            {
+                PostingsWriterBase docs = new Lucene40PostingsWriter(state);
+
+                // TODO: should we make the terms index more easily
+                // pluggable?  Ie so that this codec would record which
+                // index impl was used, and switch on loading?
+                // Or... you must make a new Codec for this?
+                bool success = false;
+                try
+                {
+                    FieldsConsumer ret = new BlockTreeTermsWriter(state, docs, m_minBlockSize, m_maxBlockSize);
+                    success = true;
+                    return ret;
+                }
+                finally
+                {
+                    if (!success)
+                    {
+                        docs.Dispose();
+                    }
+                }
+            }
+        }
+    }
+#pragma warning restore 612, 618
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/c0e9469c/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40SkipListWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40SkipListWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40SkipListWriter.cs
new file mode 100644
index 0000000..aa8e52e
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene40/Lucene40SkipListWriter.cs
@@ -0,0 +1,168 @@
+using System;
+using System.Diagnostics;
+
+namespace Lucene.Net.Codecs.Lucene40
+{
+    using Lucene.Net.Support;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using IndexOutput = Lucene.Net.Store.IndexOutput;
+
+    /// <summary>
+    /// Implements the skip list writer for the 4.0 posting list format
+    /// that stores positions and payloads.
+    /// </summary>
+    /// <seealso> cref= Lucene40PostingsFormat </seealso>
+    /// @deprecated Only for reading old 4.0 segments
+    [Obsolete("Only for reading old 4.0 segments")]
+    public class Lucene40SkipListWriter : MultiLevelSkipListWriter
+    {
+        private int[] LastSkipDoc;
+        private int[] LastSkipPayloadLength;
+        private int[] LastSkipOffsetLength;
+        private long[] LastSkipFreqPointer;
+        private long[] LastSkipProxPointer;
+
+        private IndexOutput FreqOutput;
+        private IndexOutput ProxOutput;
+
+        private int CurDoc;
+        private bool CurStorePayloads;
+        private bool CurStoreOffsets;
+        private int CurPayloadLength;
+        private int CurOffsetLength;
+        private long CurFreqPointer;
+        private long CurProxPointer;
+
+        /// <summary>
+        /// Sole constructor. </summary>
+        public Lucene40SkipListWriter(int skipInterval, int numberOfSkipLevels, int docCount, IndexOutput freqOutput, IndexOutput proxOutput)
+            : base(skipInterval, numberOfSkipLevels, docCount)
+        {
+            this.FreqOutput = freqOutput;
+            this.ProxOutput = proxOutput;
+
+            LastSkipDoc = new int[numberOfSkipLevels];
+            LastSkipPayloadLength = new int[numberOfSkipLevels];
+            LastSkipOffsetLength = new int[numberOfSkipLevels];
+            LastSkipFreqPointer = new long[numberOfSkipLevels];
+            LastSkipProxPointer = new long[numberOfSkipLevels];
+        }
+
+        /// <summary>
+        /// Sets the values for the current skip data.
+        /// </summary>
+        public virtual void SetSkipData(int doc, bool storePayloads, int payloadLength, bool storeOffsets, int offsetLength)
+        {
+            Debug.Assert(storePayloads || payloadLength == -1);
+            Debug.Assert(storeOffsets || offsetLength == -1);
+            this.CurDoc = doc;
+            this.CurStorePayloads = storePayloads;
+            this.CurPayloadLength = payloadLength;
+            this.CurStoreOffsets = storeOffsets;
+            this.CurOffsetLength = offsetLength;
+            this.CurFreqPointer = FreqOutput.FilePointer;
+            if (ProxOutput != null)
+            {
+                this.CurProxPointer = ProxOutput.FilePointer;
+            }
+        }
+
+        public override void ResetSkip()
+        {
+            base.ResetSkip();
+            Arrays.Fill(LastSkipDoc, 0);
+            Arrays.Fill(LastSkipPayloadLength, -1); // we don't have to write the first length in the skip list
+            Arrays.Fill(LastSkipOffsetLength, -1); // we don't have to write the first length in the skip list
+            Arrays.Fill(LastSkipFreqPointer, FreqOutput.FilePointer);
+            if (ProxOutput != null)
+            {
+                Arrays.Fill(LastSkipProxPointer, ProxOutput.FilePointer);
+            }
+        }
+
+        protected override void WriteSkipData(int level, IndexOutput skipBuffer)
+        {
+            // To efficiently store payloads/offsets in the posting lists we do not store the length of
+            // every payload/offset. Instead we omit the length if the previous lengths were the same
+            //
+            // However, in order to support skipping, the length at every skip point must be known.
+            // So we use the same length encoding that we use for the posting lists for the skip data as well:
+            // Case 1: current field does not store payloads/offsets
+            //           SkipDatum                 --> DocSkip, FreqSkip, ProxSkip
+            //           DocSkip,FreqSkip,ProxSkip --> VInt
+            //           DocSkip records the document number before every SkipInterval th  document in TermFreqs.
+            //           Document numbers are represented as differences from the previous value in the sequence.
+            // Case 2: current field stores payloads/offsets
+            //           SkipDatum                 --> DocSkip, PayloadLength?,OffsetLength?,FreqSkip,ProxSkip
+            //           DocSkip,FreqSkip,ProxSkip --> VInt
+            //           PayloadLength,OffsetLength--> VInt
+            //         In this case DocSkip/2 is the difference between
+            //         the current and the previous value. If DocSkip
+            //         is odd, then a PayloadLength encoded as VInt follows,
+            //         if DocSkip is even, then it is assumed that the
+            //         current payload/offset lengths equals the lengths at the previous
+            //         skip point
+            int delta = CurDoc - LastSkipDoc[level];
+
+            if (CurStorePayloads || CurStoreOffsets)
+            {
+                Debug.Assert(CurStorePayloads || CurPayloadLength == LastSkipPayloadLength[level]);
+                Debug.Assert(CurStoreOffsets || CurOffsetLength == LastSkipOffsetLength[level]);
+
+                if (CurPayloadLength == LastSkipPayloadLength[level] && CurOffsetLength == LastSkipOffsetLength[level])
+                {
+                    // the current payload/offset lengths equals the lengths at the previous skip point,
+                    // so we don't store the lengths again
+                    skipBuffer.WriteVInt32(delta << 1);
+                }
+                else
+                {
+                    // the payload and/or offset length is different from the previous one. We shift the DocSkip,
+                    // set the lowest bit and store the current payload and/or offset lengths as VInts.
+                    skipBuffer.WriteVInt32(delta << 1 | 1);
+
+                    if (CurStorePayloads)
+                    {
+                        skipBuffer.WriteVInt32(CurPayloadLength);
+                        LastSkipPayloadLength[level] = CurPayloadLength;
+                    }
+                    if (CurStoreOffsets)
+                    {
+                        skipBuffer.WriteVInt32(CurOffsetLength);
+                        LastSkipOffsetLength[level] = CurOffsetLength;
+                    }
+                }
+            }
+            else
+            {
+                // current field does not store payloads or offsets
+                skipBuffer.WriteVInt32(delta);
+            }
+
+            skipBuffer.WriteVInt32((int)(CurFreqPointer - LastSkipFreqPointer[level]));
+            skipBuffer.WriteVInt32((int)(CurProxPointer - LastSkipProxPointer[level]));
+
+            LastSkipDoc[level] = CurDoc;
+
+            LastSkipFreqPointer[level] = CurFreqPointer;
+            LastSkipProxPointer[level] = CurProxPointer;
+        }
+    }
+}
\ No newline at end of file


[26/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestIndexWriterUnicode.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterUnicode.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterUnicode.cs
new file mode 100644
index 0000000..e9fdbf4
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestIndexWriterUnicode.cs
@@ -0,0 +1,390 @@
+using Lucene.Net.Attributes;
+using Lucene.Net.Documents;
+using Lucene.Net.Support;
+using System;
+using System.Collections.Generic;
+using System.Text;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Randomized.Generators;
+    using Lucene.Net.Util;
+    using NUnit.Framework;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using CharsRef = Lucene.Net.Util.CharsRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using UnicodeUtil = Lucene.Net.Util.UnicodeUtil;
+
+    [TestFixture]
+    public class TestIndexWriterUnicode : LuceneTestCase
+    {
+        internal readonly string[] Utf8Data = new string[] { "ab\udc17cd", "ab\ufffdcd", "\udc17abcd", "\ufffdabcd", "\udc17", "\ufffd", "ab\udc17\udc17cd", "ab\ufffd\ufffdcd", "\udc17\udc17abcd", "\ufffd\ufffdabcd", "\udc17\udc17", "\ufffd\ufffd", "ab\ud917cd", "ab\ufffdcd", "\ud917abcd", "\ufffdabcd", "\ud917", "\ufffd", "ab\ud917\ud917cd", "ab\ufffd\ufffdcd", "\ud917\ud917abcd", "\ufffd\ufffdabcd", "\ud917\ud917", "\ufffd\ufffd", "ab\udc17\ud917cd", "ab\ufffd\ufffdcd", "\udc17\ud917abcd", "\ufffd\ufffdabcd", "\udc17\ud917", "\ufffd\ufffd", "ab\udc17\ud917\udc17\ud917cd", "ab\ufffd\ud917\udc17\ufffdcd", "\udc17\ud917\udc17\ud917abcd", "\ufffd\ud917\udc17\ufffdabcd", "\udc17\ud917\udc17\ud917", "\ufffd\ud917\udc17\ufffd" };
+
+        private int NextInt(int lim)
+        {
+            return Random().Next(lim);
+        }
+
+        private int NextInt(int start, int end)
+        {
+            return start + NextInt(end - start);
+        }
+
+        private bool FillUnicode(char[] buffer, char[] expected, int offset, int count)
+        {
+            int len = offset + count;
+            bool hasIllegal = false;
+
+            if (offset > 0 && buffer[offset] >= 0xdc00 && buffer[offset] < 0xe000)
+            // Don't start in the middle of a valid surrogate pair
+            {
+                offset--;
+            }
+
+            for (int i = offset; i < len; i++)
+            {
+                int t = NextInt(6);
+                if (0 == t && i < len - 1)
+                {
+                    // Make a surrogate pair
+                    // High surrogate
+                    expected[i] = buffer[i++] = (char)NextInt(0xd800, 0xdc00);
+                    // Low surrogate
+                    expected[i] = buffer[i] = (char)NextInt(0xdc00, 0xe000);
+                }
+                else if (t <= 1)
+                {
+                    expected[i] = buffer[i] = (char)NextInt(0x80);
+                }
+                else if (2 == t)
+                {
+                    expected[i] = buffer[i] = (char)NextInt(0x80, 0x800);
+                }
+                else if (3 == t)
+                {
+                    expected[i] = buffer[i] = (char)NextInt(0x800, 0xd800);
+                }
+                else if (4 == t)
+                {
+                    expected[i] = buffer[i] = (char)NextInt(0xe000, 0xffff);
+                }
+                else if (5 == t && i < len - 1)
+                {
+                    // Illegal unpaired surrogate
+                    if (NextInt(10) == 7)
+                    {
+                        if (Random().NextBoolean())
+                        {
+                            buffer[i] = (char)NextInt(0xd800, 0xdc00);
+                        }
+                        else
+                        {
+                            buffer[i] = (char)NextInt(0xdc00, 0xe000);
+                        }
+                        expected[i++] = (char)0xfffd;
+                        expected[i] = buffer[i] = (char)NextInt(0x800, 0xd800);
+                        hasIllegal = true;
+                    }
+                    else
+                    {
+                        expected[i] = buffer[i] = (char)NextInt(0x800, 0xd800);
+                    }
+                }
+                else
+                {
+                    expected[i] = buffer[i] = ' ';
+                }
+            }
+
+            return hasIllegal;
+        }
+
+        // both start & end are inclusive
+        private int GetInt(Random r, int start, int end)
+        {
+            return start + r.Next(1 + end - start);
+        }
+
+        private string AsUnicodeChar(char c)
+        {
+            return "U+" + ((int)c).ToString("x");
+        }
+
+        private string TermDesc(string s)
+        {
+            string s0;
+            Assert.IsTrue(s.Length <= 2);
+            if (s.Length == 1)
+            {
+                s0 = AsUnicodeChar(s[0]);
+            }
+            else
+            {
+                s0 = AsUnicodeChar(s[0]) + "," + AsUnicodeChar(s[1]);
+            }
+            return s0;
+        }
+
+        private void CheckTermsOrder(IndexReader r, ISet<string> allTerms, bool isTop)
+        {
+            TermsEnum terms = MultiFields.GetFields(r).GetTerms("f").GetIterator(null);
+
+            BytesRef last = new BytesRef();
+
+            HashSet<string> seenTerms = new HashSet<string>();
+
+            while (true)
+            {
+                BytesRef term = terms.Next();
+                if (term == null)
+                {
+                    break;
+                }
+
+                Assert.IsTrue(last.CompareTo(term) < 0);
+                last.CopyBytes(term);
+
+                string s = term.Utf8ToString();
+                Assert.IsTrue(allTerms.Contains(s), "term " + TermDesc(s) + " was not added to index (count=" + allTerms.Count + ")");
+                seenTerms.Add(s);
+            }
+
+            if (isTop)
+            {
+                Assert.IsTrue(allTerms.SetEquals(seenTerms));
+            }
+
+            // Test seeking:
+            IEnumerator<string> it = seenTerms.GetEnumerator();
+            while (it.MoveNext())
+            {
+                BytesRef tr = new BytesRef(it.Current);
+                Assert.AreEqual(TermsEnum.SeekStatus.FOUND, terms.SeekCeil(tr), "seek failed for term=" + TermDesc(tr.Utf8ToString()));
+            }
+        }
+
+        // LUCENE-510
+        [Test, LongRunningTest]
+        public virtual void TestRandomUnicodeStrings()
+        {
+            char[] buffer = new char[20];
+            char[] expected = new char[20];
+
+            BytesRef utf8 = new BytesRef(20);
+            CharsRef utf16 = new CharsRef(20);
+
+            int num = AtLeast(100000);
+            for (int iter = 0; iter < num; iter++)
+            {
+                bool hasIllegal = FillUnicode(buffer, expected, 0, 20);
+
+                UnicodeUtil.UTF16toUTF8(buffer, 0, 20, utf8);
+                if (!hasIllegal)
+                {
+#pragma warning disable 612, 618
+                    var b = (new string(buffer, 0, 20)).GetBytes(IOUtils.CHARSET_UTF_8);
+#pragma warning restore 612, 618
+                    Assert.AreEqual(b.Length, utf8.Length);
+                    for (int i = 0; i < b.Length; i++)
+                    {
+                        Assert.AreEqual(b[i], utf8.Bytes[i]);
+                    }
+                }
+
+                UnicodeUtil.UTF8toUTF16(utf8.Bytes, 0, utf8.Length, utf16);
+                Assert.AreEqual(utf16.Length, 20);
+                for (int i = 0; i < 20; i++)
+                {
+                    Assert.AreEqual(expected[i], utf16.Chars[i]);
+                }
+            }
+        }
+
+        // LUCENE-510
+        [Test]
+        public virtual void TestAllUnicodeChars()
+        {
+            BytesRef utf8 = new BytesRef(10);
+            CharsRef utf16 = new CharsRef(10);
+            char[] chars = new char[2];
+            for (int ch = 0; ch < 0x0010FFFF; ch++)
+            {
+                if (ch == 0xd800)
+                // Skip invalid code points
+                {
+                    ch = 0xe000;
+                }
+
+                int len = 0;
+                if (ch <= 0xffff)
+                {
+                    chars[len++] = (char)ch;
+                }
+                else
+                {
+                    chars[len++] = (char)(((ch - 0x0010000) >> 10) + UnicodeUtil.UNI_SUR_HIGH_START);
+                    chars[len++] = (char)(((ch - 0x0010000) & 0x3FFL) + UnicodeUtil.UNI_SUR_LOW_START);
+                }
+
+                UnicodeUtil.UTF16toUTF8(chars, 0, len, utf8);
+
+                string s1 = new string(chars, 0, len);
+                string s2 = Encoding.UTF8.GetString(utf8.Bytes, utf8.Offset, utf8.Length);
+                Assert.AreEqual(s1, s2, "codepoint " + ch);
+
+                UnicodeUtil.UTF8toUTF16(utf8.Bytes, 0, utf8.Length, utf16);
+                Assert.AreEqual(s1, new string(utf16.Chars, 0, utf16.Length), "codepoint " + ch);
+
+                var b = s1.GetBytes(Encoding.UTF8);
+                Assert.AreEqual(utf8.Length, b.Length);
+                for (int j = 0; j < utf8.Length; j++)
+                {
+                    Assert.AreEqual(utf8.Bytes[j], b[j]);
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestEmbeddedFFFF()
+        {
+            Directory d = NewDirectory();
+            IndexWriter w = new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document doc = new Document();
+            doc.Add(NewTextField("field", "a a\uffffb", Field.Store.NO));
+            w.AddDocument(doc);
+            doc = new Document();
+            doc.Add(NewTextField("field", "a", Field.Store.NO));
+            w.AddDocument(doc);
+            IndexReader r = w.Reader;
+            Assert.AreEqual(1, r.DocFreq(new Term("field", "a\uffffb")));
+            r.Dispose();
+            w.Dispose();
+            d.Dispose();
+        }
+
+        // LUCENE-510
+        [Test]
+        public virtual void TestInvalidUTF16()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new TestIndexWriter.StringSplitAnalyzer()));
+            Document doc = new Document();
+
+            int count = Utf8Data.Length / 2;
+            for (int i = 0; i < count; i++)
+            {
+                doc.Add(NewTextField("f" + i, Utf8Data[2 * i], Field.Store.YES));
+            }
+            w.AddDocument(doc);
+            w.Dispose();
+
+            IndexReader ir = DirectoryReader.Open(dir);
+            Document doc2 = ir.Document(0);
+            for (int i = 0; i < count; i++)
+            {
+                Assert.AreEqual(1, ir.DocFreq(new Term("f" + i, Utf8Data[2 * i + 1])), "field " + i + " was not indexed correctly");
+                Assert.AreEqual(Utf8Data[2 * i + 1], doc2.GetField("f" + i).GetStringValue(), "field " + i + " is incorrect");
+            }
+            ir.Dispose();
+            dir.Dispose();
+        }
+
+        // Make sure terms, including ones with surrogate pairs,
+        // sort in codepoint sort order by default
+        [Test]
+        public virtual void TestTermUTF16SortOrder()
+        {
+            Random rnd = Random();
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(rnd, dir, Similarity, TimeZone);
+            Document d = new Document();
+            // Single segment
+            Field f = NewStringField("f", "", Field.Store.NO);
+            d.Add(f);
+            char[] chars = new char[2];
+            HashSet<string> allTerms = new HashSet<string>();
+
+            int num = AtLeast(200);
+            for (int i = 0; i < num; i++)
+            {
+                string s;
+                if (rnd.NextBoolean())
+                {
+                    // Single char
+                    if (rnd.NextBoolean())
+                    {
+                        // Above surrogates
+                        chars[0] = (char)GetInt(rnd, 1 + UnicodeUtil.UNI_SUR_LOW_END, 0xffff);
+                    }
+                    else
+                    {
+                        // Below surrogates
+                        chars[0] = (char)GetInt(rnd, 0, UnicodeUtil.UNI_SUR_HIGH_START - 1);
+                    }
+                    s = new string(chars, 0, 1);
+                }
+                else
+                {
+                    // Surrogate pair
+                    chars[0] = (char)GetInt(rnd, UnicodeUtil.UNI_SUR_HIGH_START, UnicodeUtil.UNI_SUR_HIGH_END);
+                    Assert.IsTrue(((int)chars[0]) >= UnicodeUtil.UNI_SUR_HIGH_START && ((int)chars[0]) <= UnicodeUtil.UNI_SUR_HIGH_END);
+                    chars[1] = (char)GetInt(rnd, UnicodeUtil.UNI_SUR_LOW_START, UnicodeUtil.UNI_SUR_LOW_END);
+                    s = new string(chars, 0, 2);
+                }
+                allTerms.Add(s);
+                f.SetStringValue(s);
+
+                writer.AddDocument(d);
+
+                if ((1 + i) % 42 == 0)
+                {
+                    writer.Commit();
+                }
+            }
+
+            IndexReader r = writer.Reader;
+
+            // Test each sub-segment
+            foreach (AtomicReaderContext ctx in r.Leaves)
+            {
+                CheckTermsOrder(ctx.Reader, allTerms, false);
+            }
+            CheckTermsOrder(r, allTerms, true);
+
+            // Test multi segment
+            r.Dispose();
+
+            writer.ForceMerge(1);
+
+            // Test single segment
+            r = writer.Reader;
+            CheckTermsOrder(r, allTerms, true);
+            r.Dispose();
+
+            writer.Dispose();
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestIndexWriterWithThreads.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterWithThreads.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterWithThreads.cs
new file mode 100644
index 0000000..ee541be
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestIndexWriterWithThreads.cs
@@ -0,0 +1,796 @@
+using Lucene.Net.Documents;
+using Lucene.Net.Randomized.Generators;
+using Lucene.Net.Support;
+using Lucene.Net.Util;
+using NUnit.Framework;
+using System;
+using System.IO;
+using System.Threading;
+
+namespace Lucene.Net.Index
+{
+    //using Slow = Lucene.Net.Util.LuceneTestCase.Slow;
+    
+    using AlreadyClosedException = Lucene.Net.Store.AlreadyClosedException;
+    using BaseDirectoryWrapper = Lucene.Net.Store.BaseDirectoryWrapper;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldType = FieldType;
+    using IBits = Lucene.Net.Util.IBits;
+    using LineFileDocs = Lucene.Net.Util.LineFileDocs;
+    using LockObtainFailedException = Lucene.Net.Store.LockObtainFailedException;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+    using NumericDocValuesField = NumericDocValuesField;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using TextField = TextField;
+
+    /// <summary>
+    /// MultiThreaded IndexWriter tests
+    /// </summary>
+    [SuppressCodecs("Lucene3x")]
+    [TestFixture]
+    public class TestIndexWriterWithThreads : LuceneTestCase
+    {
+        // Used by test cases below
+        private class IndexerThread : ThreadClass
+        {
+            private readonly Func<string, string, FieldType, Field> NewField;
+
+            internal bool DiskFull;
+            internal Exception Error;
+            internal AlreadyClosedException Ace;
+            internal IndexWriter Writer;
+            internal bool NoErrors;
+            internal volatile int AddCount;
+
+            /// <param name="newField">
+            /// LUCENENET specific
+            /// Passed in because <see cref="LuceneTestCase.NewField(string, string, FieldType)"/>
+            /// is no longer static.
+            /// </param>
+            public IndexerThread(IndexWriter writer, bool noErrors, Func<string, string, FieldType, Field> newField)
+            {
+                this.Writer = writer;
+                this.NoErrors = noErrors;
+                NewField = newField;
+            }
+
+            public override void Run()
+            {
+                Document doc = new Document();
+                FieldType customType = new FieldType(TextField.TYPE_STORED);
+                customType.StoreTermVectors = true;
+                customType.StoreTermVectorPositions = true;
+                customType.StoreTermVectorOffsets = true;
+
+                doc.Add(NewField("field", "aaa bbb ccc ddd eee fff ggg hhh iii jjj", customType));
+                doc.Add(new NumericDocValuesField("dv", 5));
+
+                int idUpto = 0;
+                int fullCount = 0;
+                long stopTime = Environment.TickCount + 200;
+
+                do
+                {
+                    try
+                    {
+                        Writer.UpdateDocument(new Term("id", "" + (idUpto++)), doc);
+                        AddCount++;
+                    }
+                    catch (IOException ioe)
+                    {
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("TEST: expected exc:");
+                            Console.WriteLine(ioe.StackTrace);
+                        }
+                        //System.out.println(Thread.currentThread().getName() + ": hit exc");
+                        //ioConsole.WriteLine(e.StackTrace);
+                        if (ioe.Message.StartsWith("fake disk full at") || ioe.Message.Equals("now failing on purpose"))
+                        {
+                            DiskFull = true;
+#if !NETSTANDARD
+                            try
+                            {
+#endif
+                            Thread.Sleep(1);
+#if !NETSTANDARD
+                            }
+                            catch (ThreadInterruptedException ie)
+                            {
+                                throw new ThreadInterruptedException("Thread Interrupted Exception", ie);
+                            }
+#endif
+                            if (fullCount++ >= 5)
+                            {
+                                break;
+                            }
+                        }
+                        else
+                        {
+                            if (NoErrors)
+                            {
+                                Console.WriteLine(Thread.CurrentThread.Name + ": ERROR: unexpected IOException:");
+                                Console.WriteLine(ioe.StackTrace);
+                                Error = ioe;
+                            }
+                            break;
+                        }
+                    }
+                    catch (Exception t)
+                    {
+                        //Console.WriteLine(t.StackTrace);
+                        if (NoErrors)
+                        {
+                            Console.WriteLine(Thread.CurrentThread.Name + ": ERROR: unexpected Throwable:");
+                            Console.WriteLine(t.StackTrace);
+                            Error = t;
+                        }
+                        break;
+                    }
+                } while (Environment.TickCount < stopTime);
+            }
+        }
+
+        // LUCENE-1130: make sure immediate disk full on creating
+        // an IndexWriter (hit during DW.ThreadState.Init()), with
+        // multiple threads, is OK:
+        [Test]
+        public virtual void TestImmediateDiskFullWithThreads([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            int NUM_THREADS = 3;
+            int numIterations = TEST_NIGHTLY ? 10 : 3;
+            for (int iter = 0; iter < numIterations; iter++)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("\nTEST: iter=" + iter);
+                }
+                MockDirectoryWrapper dir = NewMockDirectory();
+                var config = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))
+                                .SetMaxBufferedDocs(2)
+                                .SetMergeScheduler(scheduler)
+                                .SetMergePolicy(NewLogMergePolicy(4));
+                IndexWriter writer = new IndexWriter(dir, config);
+                scheduler.SetSuppressExceptions();
+                dir.MaxSizeInBytes = 4 * 1024 + 20 * iter;
+
+                IndexerThread[] threads = new IndexerThread[NUM_THREADS];
+
+                for (int i = 0; i < NUM_THREADS; i++)
+                {
+                    threads[i] = new IndexerThread(writer, true, NewField);
+                }
+
+                for (int i = 0; i < NUM_THREADS; i++)
+                {
+                    threads[i].Start();
+                }
+
+                for (int i = 0; i < NUM_THREADS; i++)
+                {
+                    // Without fix for LUCENE-1130: one of the
+                    // threads will hang
+                    threads[i].Join();
+                    Assert.IsTrue(threads[i].Error == null, "hit unexpected Throwable");
+                }
+
+                // Make sure once disk space is avail again, we can
+                // cleanly close:
+                dir.MaxSizeInBytes = 0;
+                writer.Dispose(false);
+                dir.Dispose();
+            }
+        }
+
+        // LUCENE-1130: make sure we can close() even while
+        // threads are trying to add documents.  Strictly
+        // speaking, this isn't valid us of Lucene's APIs, but we
+        // still want to be robust to this case:
+        [Test]
+        public virtual void TestCloseWithThreads([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            int NUM_THREADS = 3;
+            int numIterations = TEST_NIGHTLY ? 7 : 3;
+            for (int iter = 0; iter < numIterations; iter++)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("\nTEST: iter=" + iter);
+                }
+                Directory dir = NewDirectory();
+                var config = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))
+                                .SetMaxBufferedDocs(10)
+                                .SetMergeScheduler(scheduler)
+                                .SetMergePolicy(NewLogMergePolicy(4));
+                IndexWriter writer = new IndexWriter(dir, config);
+                scheduler.SetSuppressExceptions();
+
+                IndexerThread[] threads = new IndexerThread[NUM_THREADS];
+
+                for (int i = 0; i < NUM_THREADS; i++)
+                {
+                    threads[i] = new IndexerThread(writer, false, NewField);
+                }
+
+                for (int i = 0; i < NUM_THREADS; i++)
+                {
+                    threads[i].Start();
+                }
+
+                bool done = false;
+                while (!done)
+                {
+                    Thread.Sleep(100);
+                    for (int i = 0; i < NUM_THREADS; i++)
+                    // only stop when at least one thread has added a doc
+                    {
+                        if (threads[i].AddCount > 0)
+                        {
+                            done = true;
+                            break;
+                        }
+                        else if (!threads[i].IsAlive)
+                        {
+                            Assert.Fail("thread failed before indexing a single document");
+                        }
+                    }
+                }
+
+                if (VERBOSE)
+                {
+                    Console.WriteLine("\nTEST: now close");
+                }
+                writer.Dispose(false);
+
+                // Make sure threads that are adding docs are not hung:
+                for (int i = 0; i < NUM_THREADS; i++)
+                {
+                    // Without fix for LUCENE-1130: one of the
+                    // threads will hang
+                    threads[i].Join();
+                    if (threads[i].IsAlive)
+                    {
+                        Assert.Fail("thread seems to be hung");
+                    }
+                }
+
+                // Quick test to make sure index is not corrupt:
+                IndexReader reader = DirectoryReader.Open(dir);
+                DocsEnum tdocs = TestUtil.Docs(Random(), reader, "field", new BytesRef("aaa"), MultiFields.GetLiveDocs(reader), null, 0);
+                int count = 0;
+                while (tdocs.NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
+                {
+                    count++;
+                }
+                Assert.IsTrue(count > 0);
+                reader.Dispose();
+
+                dir.Dispose();
+            }
+        }
+
+        // Runs test, with multiple threads, using the specific
+        // failure to trigger an IOException
+        public virtual void TestMultipleThreadsFailure(IConcurrentMergeScheduler scheduler, MockDirectoryWrapper.Failure failure)
+        {
+            int NUM_THREADS = 3;
+
+            for (int iter = 0; iter < 2; iter++)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: iter=" + iter);
+                }
+                MockDirectoryWrapper dir = NewMockDirectory();
+                var config = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))
+                                .SetMaxBufferedDocs(2)
+                                .SetMergeScheduler(scheduler)
+                                .SetMergePolicy(NewLogMergePolicy(4));
+                IndexWriter writer = new IndexWriter(dir, config);
+                scheduler.SetSuppressExceptions();
+
+                IndexerThread[] threads = new IndexerThread[NUM_THREADS];
+
+                for (int i = 0; i < NUM_THREADS; i++)
+                {
+                    threads[i] = new IndexerThread(writer, true, NewField);
+                }
+
+                for (int i = 0; i < NUM_THREADS; i++)
+                {
+                    threads[i].Start();
+                }
+
+                Thread.Sleep(10);
+
+                dir.FailOn(failure);
+                failure.SetDoFail();
+
+                for (int i = 0; i < NUM_THREADS; i++)
+                {
+                    threads[i].Join();
+                    Assert.IsTrue(threads[i].Error == null, "hit unexpected Throwable");
+                }
+
+                bool success = false;
+                try
+                {
+                    writer.Dispose(false);
+                    success = true;
+                }
+                catch (IOException)
+                {
+                    failure.ClearDoFail();
+                    writer.Dispose(false);
+                }
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: success=" + success);
+                }
+
+                if (success)
+                {
+                    IndexReader reader = DirectoryReader.Open(dir);
+                    IBits delDocs = MultiFields.GetLiveDocs(reader);
+                    for (int j = 0; j < reader.MaxDoc; j++)
+                    {
+                        if (delDocs == null || !delDocs.Get(j))
+                        {
+                            reader.Document(j);
+                            reader.GetTermVectors(j);
+                        }
+                    }
+                    reader.Dispose();
+                }
+
+                dir.Dispose();
+            }
+        }
+
+        // Runs test, with one thread, using the specific failure
+        // to trigger an IOException
+        public virtual void TestSingleThreadFailure(IConcurrentMergeScheduler scheduler, MockDirectoryWrapper.Failure failure)
+        {
+            MockDirectoryWrapper dir = NewMockDirectory();
+
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2).SetMergeScheduler(scheduler));
+            Document doc = new Document();
+            FieldType customType = new FieldType(TextField.TYPE_STORED);
+            customType.StoreTermVectors = true;
+            customType.StoreTermVectorPositions = true;
+            customType.StoreTermVectorOffsets = true;
+            doc.Add(NewField("field", "aaa bbb ccc ddd eee fff ggg hhh iii jjj", customType));
+
+            for (int i = 0; i < 6; i++)
+            {
+                writer.AddDocument(doc);
+            }
+
+            dir.FailOn(failure);
+            failure.SetDoFail();
+            try
+            {
+                writer.AddDocument(doc);
+                writer.AddDocument(doc);
+                writer.Commit();
+                Assert.Fail("did not hit exception");
+            }
+            catch (IOException)
+            {
+            }
+            failure.ClearDoFail();
+            writer.AddDocument(doc);
+            writer.Dispose(false);
+            dir.Dispose();
+        }
+
+        // Throws IOException during FieldsWriter.flushDocument and during DocumentsWriter.abort
+        private class FailOnlyOnAbortOrFlush : MockDirectoryWrapper.Failure
+        {
+            internal bool OnlyOnce;
+
+            public FailOnlyOnAbortOrFlush(bool onlyOnce)
+            {
+                this.OnlyOnce = onlyOnce;
+            }
+
+            public override void Eval(MockDirectoryWrapper dir)
+            {
+                // Since we throw exc during abort, eg when IW is
+                // attempting to delete files, we will leave
+                // leftovers:
+                dir.AssertNoUnrefencedFilesOnClose = false;
+
+                if (DoFail)
+                {
+                    bool sawAbortOrFlushDoc = StackTraceHelper.DoesStackTraceContainMethod("Abort")
+                        || StackTraceHelper.DoesStackTraceContainMethod("FinishDocument");
+                    bool sawClose = StackTraceHelper.DoesStackTraceContainMethod("Close")
+                        || StackTraceHelper.DoesStackTraceContainMethod("Dispose");
+                    bool sawMerge = StackTraceHelper.DoesStackTraceContainMethod("Merge");
+
+                    if (sawAbortOrFlushDoc && !sawClose && !sawMerge)
+                    {
+                        if (OnlyOnce)
+                        {
+                            DoFail = false;
+                        }
+                        //System.out.println(Thread.currentThread().getName() + ": now fail");
+                        //new Throwable(Console.WriteLine().StackTrace);
+                        throw new IOException("now failing on purpose");
+                    }
+                }
+            }
+        }
+
+        // LUCENE-1130: make sure initial IOException, and then 2nd
+        // IOException during rollback(), is OK:
+        [Test]
+        public virtual void TestIOExceptionDuringAbort([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            TestSingleThreadFailure(scheduler, new FailOnlyOnAbortOrFlush(false));
+        }
+
+        // LUCENE-1130: make sure initial IOException, and then 2nd
+        // IOException during rollback(), is OK:
+        [Test]
+        public virtual void TestIOExceptionDuringAbortOnlyOnce([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            TestSingleThreadFailure(scheduler, new FailOnlyOnAbortOrFlush(true));
+        }
+
+        // LUCENE-1130: make sure initial IOException, and then 2nd
+        // IOException during rollback(), with multiple threads, is OK:
+        [Test]
+        public virtual void TestIOExceptionDuringAbortWithThreads([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            TestMultipleThreadsFailure(scheduler, new FailOnlyOnAbortOrFlush(false));
+        }
+
+        // LUCENE-1130: make sure initial IOException, and then 2nd
+        // IOException during rollback(), with multiple threads, is OK:
+        [Test]
+        public virtual void TestIOExceptionDuringAbortWithThreadsOnlyOnce([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            TestMultipleThreadsFailure(scheduler, new FailOnlyOnAbortOrFlush(true));
+        }
+
+        // Throws IOException during DocumentsWriter.writeSegment
+        private class FailOnlyInWriteSegment : MockDirectoryWrapper.Failure
+        {
+            internal bool OnlyOnce;
+
+            public FailOnlyInWriteSegment(bool onlyOnce)
+            {
+                this.OnlyOnce = onlyOnce;
+            }
+
+            public override void Eval(MockDirectoryWrapper dir)
+            {
+                if (DoFail)
+                {
+                    if (StackTraceHelper.DoesStackTraceContainMethod("Flush") /*&& "Lucene.Net.Index.DocFieldProcessor".Equals(frame.GetType().Name)*/)
+                    {
+                        if (OnlyOnce)
+                        {
+                            DoFail = false;
+                        }
+                        //System.out.println(Thread.currentThread().getName() + ": NOW FAIL: onlyOnce=" + onlyOnce);
+                        //new Throwable(Console.WriteLine().StackTrace);
+                        throw new IOException("now failing on purpose");
+                    }
+                }
+            }
+        }
+
+        // LUCENE-1130: test IOException in writeSegment
+        [Test]
+        public virtual void TestIOExceptionDuringWriteSegment([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            TestSingleThreadFailure(scheduler, new FailOnlyInWriteSegment(false));
+        }
+
+        // LUCENE-1130: test IOException in writeSegment
+        [Test]
+        public virtual void TestIOExceptionDuringWriteSegmentOnlyOnce([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            TestSingleThreadFailure(scheduler, new FailOnlyInWriteSegment(true));
+        }
+
+        // LUCENE-1130: test IOException in writeSegment, with threads
+        [Test]
+        public virtual void TestIOExceptionDuringWriteSegmentWithThreads([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            TestMultipleThreadsFailure(scheduler, new FailOnlyInWriteSegment(false));
+        }
+
+        // LUCENE-1130: test IOException in writeSegment, with threads
+        [Test]
+        public virtual void TestIOExceptionDuringWriteSegmentWithThreadsOnlyOnce([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            TestMultipleThreadsFailure(scheduler, new FailOnlyInWriteSegment(true));
+        }
+
+        //  LUCENE-3365: Test adding two documents with the same field from two different IndexWriters
+        //  that we attempt to open at the same time.  As long as the first IndexWriter completes
+        //  and closes before the second IndexWriter time's out trying to get the Lock,
+        //  we should see both documents
+        [Test]
+        public virtual void TestOpenTwoIndexWritersOnDifferentThreads()
+        {
+            Directory dir = NewDirectory();
+            CountdownEvent oneIWConstructed = new CountdownEvent(1);
+            DelayedIndexAndCloseRunnable thread1 = new DelayedIndexAndCloseRunnable(dir, oneIWConstructed, this);
+            DelayedIndexAndCloseRunnable thread2 = new DelayedIndexAndCloseRunnable(dir, oneIWConstructed, this);
+
+            thread1.Start();
+            thread2.Start();
+            oneIWConstructed.Wait();
+
+            thread1.StartIndexing();
+            thread2.StartIndexing();
+
+            thread1.Join();
+            thread2.Join();
+
+            // ensure the directory is closed if we hit the timeout and throw assume
+            // TODO: can we improve this in LuceneTestCase? I dont know what the logic would be...
+            try
+            {
+                AssumeFalse("aborting test: timeout obtaining lock", thread1.Failure is LockObtainFailedException);
+                AssumeFalse("aborting test: timeout obtaining lock", thread2.Failure is LockObtainFailedException);
+
+                Assert.IsFalse(thread1.Failed, "Failed due to: " + thread1.Failure);
+                Assert.IsFalse(thread2.Failed, "Failed due to: " + thread2.Failure);
+                // now verify that we have two documents in the index
+                IndexReader reader = DirectoryReader.Open(dir);
+                Assert.AreEqual(2, reader.NumDocs, "IndexReader should have one document per thread running");
+
+                reader.Dispose();
+            }
+            finally
+            {
+                dir.Dispose();
+            }
+        }
+
+        internal class DelayedIndexAndCloseRunnable : ThreadClass
+        {
+            internal readonly Directory Dir;
+            internal bool Failed = false;
+            internal Exception Failure = null;
+            internal readonly CountdownEvent StartIndexing_Renamed = new CountdownEvent(1);
+            internal CountdownEvent IwConstructed;
+            private readonly LuceneTestCase OuterInstance;
+
+            /// <param name="outerInstance">
+            /// LUCENENET specific
+            /// Passed in because this class acceses non-static methods,
+            /// NewTextField and NewIndexWriterConfig
+            /// </param>
+            public DelayedIndexAndCloseRunnable(Directory dir, CountdownEvent iwConstructed, LuceneTestCase outerInstance)
+            {
+                this.Dir = dir;
+                this.IwConstructed = iwConstructed;
+                OuterInstance = outerInstance;
+            }
+
+            public virtual void StartIndexing()
+            {
+                this.StartIndexing_Renamed.Signal();
+            }
+
+            public override void Run()
+            {
+                try
+                {
+                    Document doc = new Document();
+                    Field field = OuterInstance.NewTextField("field", "testData", Field.Store.YES);
+                    doc.Add(field);
+                    using (IndexWriter writer = new IndexWriter(Dir, OuterInstance.NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))))
+                    {
+                        if (IwConstructed.CurrentCount > 0)
+                        {
+                            IwConstructed.Signal();
+                        }
+                        StartIndexing_Renamed.Wait();
+                        writer.AddDocument(doc);
+                    }
+                }
+                catch (Exception e)
+                {
+                    Failed = true;
+                    Failure = e;
+                    Console.WriteLine(e.ToString());
+                    return;
+                }
+            }
+        }
+
+        // LUCENE-4147
+        [Test]
+        public virtual void TestRollbackAndCommitWithThreads()
+        {
+            BaseDirectoryWrapper d = NewDirectory();
+            if (d is MockDirectoryWrapper)
+            {
+                ((MockDirectoryWrapper)d).PreventDoubleWrite = false;
+            }
+
+            int threadCount = TestUtil.NextInt(Random(), 2, 6);
+
+            MockAnalyzer analyzer = new MockAnalyzer(Random());
+            analyzer.MaxTokenLength = TestUtil.NextInt(Random(), 1, IndexWriter.MAX_TERM_LENGTH);
+            AtomicObject<IndexWriter> writerRef =
+                new AtomicObject<IndexWriter>(new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer)));
+
+            LineFileDocs docs = new LineFileDocs(Random());
+            ThreadClass[] threads = new ThreadClass[threadCount];
+            int iters = AtLeast(100);
+            AtomicBoolean failed = new AtomicBoolean();
+            ReentrantLock rollbackLock = new ReentrantLock();
+            ReentrantLock commitLock = new ReentrantLock();
+            for (int threadID = 0; threadID < threadCount; threadID++)
+            {
+                threads[threadID] = new ThreadAnonymousInnerClassHelper(this, d, writerRef, docs, iters, failed, rollbackLock, commitLock);
+                threads[threadID].Start();
+            }
+
+            for (int threadID = 0; threadID < threadCount; threadID++)
+            {
+                try
+                {
+                    threads[threadID].Join();
+                } 
+                catch (Exception e)
+                {
+                    Console.WriteLine("EXCEPTION in ThreadAnonymousInnerClassHelper: " + Environment.NewLine + e);
+                }
+            }
+
+            Assert.IsTrue(!failed.Get());
+            writerRef.Value.Dispose();
+            d.Dispose();
+        }
+
+        private class ThreadAnonymousInnerClassHelper : ThreadClass
+        {
+            private readonly TestIndexWriterWithThreads OuterInstance;
+
+            private BaseDirectoryWrapper d;
+            private AtomicObject<IndexWriter> WriterRef;
+            private LineFileDocs Docs;
+            private int Iters;
+            private AtomicBoolean Failed;
+            private ReentrantLock RollbackLock;
+            private ReentrantLock CommitLock;
+
+            public ThreadAnonymousInnerClassHelper(TestIndexWriterWithThreads outerInstance, BaseDirectoryWrapper d, AtomicObject<IndexWriter> writerRef, LineFileDocs docs, int iters, AtomicBoolean failed, ReentrantLock rollbackLock, ReentrantLock commitLock)
+            {
+                this.OuterInstance = outerInstance;
+                this.d = d;
+                this.WriterRef = writerRef;
+                this.Docs = docs;
+                this.Iters = iters;
+                this.Failed = failed;
+                this.RollbackLock = rollbackLock;
+                this.CommitLock = commitLock;
+            }
+
+            public override void Run()
+            {
+                for (int iter = 0; iter < Iters && !Failed.Get(); iter++)
+                {
+                    //final int x = Random().nextInt(5);
+                    int x = Random().Next(3);
+                    try
+                    {
+                        switch (x)
+                        {
+                            case 0:
+                                RollbackLock.@Lock();
+                                if (VERBOSE)
+                                {
+                                    Console.WriteLine("\nTEST: " + Thread.CurrentThread.Name + ": now rollback");
+                                }
+                                try
+                                {
+                                    WriterRef.Value.Rollback();
+                                    if (VERBOSE)
+                                    {
+                                        Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": rollback done; now open new writer");
+                                    }
+                                    WriterRef.Value = 
+                                        new IndexWriter(d, OuterInstance.NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+                                }
+                                finally
+                                {
+                                    RollbackLock.Unlock();
+                                }
+                                break;
+
+                            case 1:
+                                CommitLock.@Lock();
+                                if (VERBOSE)
+                                {
+                                    Console.WriteLine("\nTEST: " + Thread.CurrentThread.Name + ": now commit");
+                                }
+                                try
+                                {
+                                    if (Random().NextBoolean())
+                                    {
+                                        WriterRef.Value.PrepareCommit();
+                                    }
+                                    WriterRef.Value.Commit();
+                                }
+                                catch (AlreadyClosedException)
+                                {
+                                    // ok
+                                }
+                                catch (NullReferenceException)
+                                {
+                                    // ok
+                                }
+                                finally
+                                {
+                                    CommitLock.Unlock();
+                                }
+                                break;
+
+                            case 2:
+                                if (VERBOSE)
+                                {
+                                    Console.WriteLine("\nTEST: " + Thread.CurrentThread.Name + ": now add");
+                                }
+                                try
+                                {
+                                    WriterRef.Value.AddDocument(Docs.NextDoc());
+                                }
+                                catch (AlreadyClosedException)
+                                {
+                                    // ok
+                                }
+                                catch (System.NullReferenceException)
+                                {
+                                    // ok
+                                }
+                                catch (InvalidOperationException)
+                                {
+                                    // ok
+                                }
+                                break;
+                        }
+                    }
+                    catch (Exception t)
+                    {
+                        Failed.Set(true);
+                        throw new Exception(t.Message, t);
+                    }
+                }
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestIndexableField.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestIndexableField.cs b/src/Lucene.Net.Tests/Index/TestIndexableField.cs
new file mode 100644
index 0000000..88402dd
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestIndexableField.cs
@@ -0,0 +1,453 @@
+
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.IO;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Analyzer = Lucene.Net.Analysis.Analyzer;
+    using BooleanClause = Lucene.Net.Search.BooleanClause;
+    using BooleanQuery = Lucene.Net.Search.BooleanQuery;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Codec = Lucene.Net.Codecs.Codec;
+    using Directory = Lucene.Net.Store.Directory;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+    using Lucene3xCodec = Lucene.Net.Codecs.Lucene3x.Lucene3xCodec;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using Occur = Lucene.Net.Search.Occur;
+    using TermQuery = Lucene.Net.Search.TermQuery;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using TokenStream = Lucene.Net.Analysis.TokenStream;
+    using TopDocs = Lucene.Net.Search.TopDocs;
+
+    [TestFixture]
+    public class TestIndexableField : LuceneTestCase
+    {
+        private class MyField : IIndexableField
+        {
+            private readonly TestIndexableField OuterInstance;
+
+            internal readonly int Counter;
+            internal readonly IIndexableFieldType fieldType;
+
+            public MyField()
+            {
+                fieldType = new IndexableFieldTypeAnonymousInnerClassHelper(this);
+            }
+
+            private class IndexableFieldTypeAnonymousInnerClassHelper : IIndexableFieldType
+            {
+                private MyField OuterInstance;
+
+                public IndexableFieldTypeAnonymousInnerClassHelper(MyField outerInstance)
+                {
+                    OuterInstance = outerInstance;
+                }
+
+                public bool IsIndexed
+                {
+                    get { return (OuterInstance.Counter % 10) != 3; }
+                    set { }
+                }
+
+                public bool IsStored
+                {
+                    get { return (OuterInstance.Counter & 1) == 0 || (OuterInstance.Counter % 10) == 3; }
+                    set { }
+                }
+
+                public bool IsTokenized
+                {
+                    get { return true; }
+                    set { }
+                }
+
+                public bool StoreTermVectors
+                {
+                    get { return IsIndexed && OuterInstance.Counter % 2 == 1 && OuterInstance.Counter % 10 != 9; }
+                    set { }
+                }
+
+                public bool StoreTermVectorOffsets
+                {
+                    get { return StoreTermVectors && OuterInstance.Counter % 10 != 9; }
+                    set { }
+                }
+
+                public bool StoreTermVectorPositions
+                {
+                    get { return StoreTermVectors && OuterInstance.Counter % 10 != 9; }
+                    set { }
+                }
+
+                public bool StoreTermVectorPayloads
+                {
+                    get
+                    {
+#pragma warning disable 612, 618
+                        if (Codec.Default is Lucene3xCodec)
+#pragma warning restore 612, 618
+                        {
+                            return false; // 3.x doesnt support
+                        }
+                        else
+                        {
+                            return StoreTermVectors && OuterInstance.Counter % 10 != 9;
+                        }
+                    }
+                    set { }
+                }
+
+                public bool OmitNorms
+                {
+                    get { return false; }
+                    set { }
+                }
+
+                public IndexOptions? IndexOptions
+                {
+                    get { return Index.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS; }
+                    set { }
+                }
+
+                public NumericType? NumericType
+                {
+                    get { throw new NotImplementedException(); }
+                    set { }
+                }
+
+                public DocValuesType? DocValueType
+                {
+                    get { return null; }
+                    set { }
+                }
+            }
+
+            public MyField(TestIndexableField outerInstance, int counter)
+                : this()
+            {
+                this.OuterInstance = outerInstance;
+                this.Counter = counter;
+            }
+
+            public string Name
+            {
+                get { return "f" + Counter; }
+            }
+
+            public float Boost
+            {
+                get { return 1.0f + (float)Random().NextDouble(); }
+            }
+
+            public BytesRef GetBinaryValue()
+            {
+                if ((Counter % 10) == 3)
+                {
+                    var bytes = new byte[10];
+                    for (int idx = 0; idx < bytes.Length; idx++)
+                    {
+                        bytes[idx] = (byte)(Counter + idx);
+                    }
+                    return new BytesRef(bytes, 0, bytes.Length);
+                }
+                else
+                {
+                    return null;
+                }
+            }
+
+            public string GetStringValue()
+            {
+                int fieldID = Counter % 10;
+                if (fieldID != 3 && fieldID != 7)
+                {
+                    return "text " + Counter;
+                }
+                else
+                {
+                    return null;
+                }
+            }
+
+            public TextReader GetReaderValue()
+            {
+                if (Counter % 10 == 7)
+                {
+                    return new StringReader("text " + Counter);
+                }
+                else
+                {
+                    return null;
+                }
+            }
+
+            public object GetNumericValue()
+            {
+                return null;
+            }
+
+            public IIndexableFieldType FieldType
+            {
+                get { return fieldType; }
+            }
+
+            public TokenStream GetTokenStream(Analyzer analyzer)
+            {
+                return GetReaderValue() != null ? analyzer.TokenStream(Name, GetReaderValue()) : analyzer.TokenStream(Name, new StringReader(GetStringValue()));
+            }
+        }
+
+        // Silly test showing how to index documents w/o using Lucene's core
+        // Document nor Field class
+        [Test]
+        public virtual void TestArbitraryFields()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+
+            int NUM_DOCS = AtLeast(27);
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: " + NUM_DOCS + " docs");
+            }
+            int[] fieldsPerDoc = new int[NUM_DOCS];
+            int baseCount = 0;
+
+            for (int docCount = 0; docCount < NUM_DOCS; docCount++)
+            {
+                int fieldCount = TestUtil.NextInt(Random(), 1, 17);
+                fieldsPerDoc[docCount] = fieldCount - 1;
+
+                int finalDocCount = docCount;
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: " + fieldCount + " fields in doc " + docCount);
+                }
+
+                int finalBaseCount = baseCount;
+                baseCount += fieldCount - 1;
+
+                w.AddDocument(new IterableAnonymousInnerClassHelper(this, fieldCount, finalDocCount, finalBaseCount));
+            }
+
+            IndexReader r = w.Reader;
+            w.Dispose();
+
+            IndexSearcher s = NewSearcher(r);
+            int counter = 0;
+            for (int id = 0; id < NUM_DOCS; id++)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: verify doc id=" + id + " (" + fieldsPerDoc[id] + " fields) counter=" + counter);
+                }
+                TopDocs hits = s.Search(new TermQuery(new Term("id", "" + id)), 1);
+                Assert.AreEqual(1, hits.TotalHits);
+                int docID = hits.ScoreDocs[0].Doc;
+                Document doc = s.Doc(docID);
+                int endCounter = counter + fieldsPerDoc[id];
+                while (counter < endCounter)
+                {
+                    string name = "f" + counter;
+                    int fieldID = counter % 10;
+
+                    bool stored = (counter & 1) == 0 || fieldID == 3;
+                    bool binary = fieldID == 3;
+                    bool indexed = fieldID != 3;
+
+                    string stringValue;
+                    if (fieldID != 3 && fieldID != 9)
+                    {
+                        stringValue = "text " + counter;
+                    }
+                    else
+                    {
+                        stringValue = null;
+                    }
+
+                    // stored:
+                    if (stored)
+                    {
+                        IIndexableField f = doc.GetField(name);
+                        Assert.IsNotNull(f, "doc " + id + " doesn't have field f" + counter);
+                        if (binary)
+                        {
+                            Assert.IsNotNull(f, "doc " + id + " doesn't have field f" + counter);
+                            BytesRef b = f.GetBinaryValue();
+                            Assert.IsNotNull(b);
+                            Assert.AreEqual(10, b.Length);
+                            for (int idx = 0; idx < 10; idx++)
+                            {
+                                Assert.AreEqual((byte)(idx + counter), b.Bytes[b.Offset + idx]);
+                            }
+                        }
+                        else
+                        {
+                            Debug.Assert(stringValue != null);
+                            Assert.AreEqual(stringValue, f.GetStringValue());
+                        }
+                    }
+
+                    if (indexed)
+                    {
+                        bool tv = counter % 2 == 1 && fieldID != 9;
+                        if (tv)
+                        {
+                            Terms tfv = r.GetTermVectors(docID).GetTerms(name);
+                            Assert.IsNotNull(tfv);
+                            TermsEnum termsEnum = tfv.GetIterator(null);
+                            Assert.AreEqual(new BytesRef("" + counter), termsEnum.Next());
+                            Assert.AreEqual(1, termsEnum.TotalTermFreq);
+                            DocsAndPositionsEnum dpEnum = termsEnum.DocsAndPositions(null, null);
+                            Assert.IsTrue(dpEnum.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+                            Assert.AreEqual(1, dpEnum.Freq);
+                            Assert.AreEqual(1, dpEnum.NextPosition());
+
+                            Assert.AreEqual(new BytesRef("text"), termsEnum.Next());
+                            Assert.AreEqual(1, termsEnum.TotalTermFreq);
+                            dpEnum = termsEnum.DocsAndPositions(null, dpEnum);
+                            Assert.IsTrue(dpEnum.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+                            Assert.AreEqual(1, dpEnum.Freq);
+                            Assert.AreEqual(0, dpEnum.NextPosition());
+
+                            Assert.IsNull(termsEnum.Next());
+
+                            // TODO: offsets
+                        }
+                        else
+                        {
+                            Fields vectors = r.GetTermVectors(docID);
+                            Assert.IsTrue(vectors == null || vectors.GetTerms(name) == null);
+                        }
+
+                        BooleanQuery bq = new BooleanQuery();
+                        bq.Add(new TermQuery(new Term("id", "" + id)), Occur.MUST);
+                        bq.Add(new TermQuery(new Term(name, "text")), Occur.MUST);
+                        TopDocs hits2 = s.Search(bq, 1);
+                        Assert.AreEqual(1, hits2.TotalHits);
+                        Assert.AreEqual(docID, hits2.ScoreDocs[0].Doc);
+
+                        bq = new BooleanQuery();
+                        bq.Add(new TermQuery(new Term("id", "" + id)), Occur.MUST);
+                        bq.Add(new TermQuery(new Term(name, "" + counter)), Occur.MUST);
+                        TopDocs hits3 = s.Search(bq, 1);
+                        Assert.AreEqual(1, hits3.TotalHits);
+                        Assert.AreEqual(docID, hits3.ScoreDocs[0].Doc);
+                    }
+
+                    counter++;
+                }
+            }
+
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        private class IterableAnonymousInnerClassHelper : IEnumerable<IIndexableField>
+        {
+            private readonly TestIndexableField OuterInstance;
+
+            private int FieldCount;
+            private int FinalDocCount;
+            private int FinalBaseCount;
+
+            public IterableAnonymousInnerClassHelper(TestIndexableField outerInstance, int fieldCount, int finalDocCount, int finalBaseCount)
+            {
+                this.OuterInstance = outerInstance;
+                this.FieldCount = fieldCount;
+                this.FinalDocCount = finalDocCount;
+                this.FinalBaseCount = finalBaseCount;
+            }
+
+            public virtual IEnumerator<IIndexableField> GetEnumerator()
+            {
+                return new IteratorAnonymousInnerClassHelper(this, OuterInstance);
+            }
+
+            System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator()
+            {
+                return GetEnumerator();
+            }
+
+            private class IteratorAnonymousInnerClassHelper : IEnumerator<IIndexableField>
+            {
+                private readonly IterableAnonymousInnerClassHelper OuterInstance;
+                private readonly TestIndexableField OuterTextIndexableField;
+
+                public IteratorAnonymousInnerClassHelper(IterableAnonymousInnerClassHelper outerInstance, TestIndexableField outerTextIndexableField)
+                {
+                    this.OuterInstance = outerInstance;
+                    OuterTextIndexableField = outerTextIndexableField;
+                }
+
+                internal int fieldUpto;
+                private IIndexableField current;
+
+                public bool MoveNext()
+                {
+                    if (fieldUpto >= OuterInstance.FieldCount)
+                    {
+                        return false;
+                    }
+
+                    Debug.Assert(fieldUpto < OuterInstance.FieldCount);
+                    if (fieldUpto == 0)
+                    {
+                        fieldUpto = 1;
+                        current = OuterTextIndexableField.NewStringField("id", "" + OuterInstance.FinalDocCount, Field.Store.YES);
+                    }
+                    else
+                    {
+                        current = new MyField(OuterTextIndexableField, OuterInstance.FinalBaseCount + (fieldUpto++ - 1));
+                    }
+
+                    return true;
+                }
+
+                public IIndexableField Current
+                {
+                    get { return current; }
+                }
+
+                object System.Collections.IEnumerator.Current
+                {
+                    get { return Current; }
+                }
+
+                public void Dispose()
+                {
+                }
+
+                public void Reset()
+                {
+                    throw new NotImplementedException();
+                }
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestIntBlockPool.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestIntBlockPool.cs b/src/Lucene.Net.Tests/Index/TestIntBlockPool.cs
new file mode 100644
index 0000000..98fc2d2
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestIntBlockPool.cs
@@ -0,0 +1,185 @@
+using System.Collections.Generic;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Randomized.Generators;
+    using NUnit.Framework;
+
+    /*
+             * Licensed to the Apache Software Foundation (ASF) under one or more
+             * contributor license agreements.  See the NOTICE file distributed with
+             * this work for additional information regarding copyright ownership.
+             * The ASF licenses this file to You under the Apache License, Version 2.0
+             * (the "License"); you may not use this file except in compliance with
+             * the License.  You may obtain a copy of the License at
+             *
+             *     http://www.apache.org/licenses/LICENSE-2.0
+             *
+             * Unless required by applicable law or agreed to in writing, software
+             * distributed under the License is distributed on an "AS IS" BASIS,
+             * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+             * See the License for the specific language governing permissions and
+             * limitations under the License.
+             */
+
+    using Counter = Lucene.Net.Util.Counter;
+    using Int32BlockPool = Lucene.Net.Util.Int32BlockPool;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using RamUsageEstimator = Lucene.Net.Util.RamUsageEstimator;
+
+    /// <summary>
+    /// tests basic <seealso cref="Int32BlockPool"/> functionality
+    /// </summary>
+    [TestFixture]
+    public class TestIntBlockPool : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestSingleWriterReader()
+        {
+            Counter bytesUsed = Util.Counter.NewCounter();
+            Int32BlockPool pool = new Int32BlockPool(new ByteTrackingAllocator(bytesUsed));
+
+            for (int j = 0; j < 2; j++)
+            {
+                Int32BlockPool.SliceWriter writer = new Int32BlockPool.SliceWriter(pool);
+                int start = writer.StartNewSlice();
+                int num = AtLeast(100);
+                for (int i = 0; i < num; i++)
+                {
+                    writer.WriteInt32(i);
+                }
+
+                int upto = writer.CurrentOffset;
+                Int32BlockPool.SliceReader reader = new Int32BlockPool.SliceReader(pool);
+                reader.Reset(start, upto);
+                for (int i = 0; i < num; i++)
+                {
+                    Assert.AreEqual(i, reader.ReadInt32());
+                }
+                Assert.IsTrue(reader.EndOfSlice());
+                if (Random().NextBoolean())
+                {
+                    pool.Reset(true, false);
+                    Assert.AreEqual(0, bytesUsed.Get());
+                }
+                else
+                {
+                    pool.Reset(true, true);
+                    Assert.AreEqual(Int32BlockPool.INT32_BLOCK_SIZE * RamUsageEstimator.NUM_BYTES_INT32, bytesUsed.Get());
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestMultipleWriterReader()
+        {
+            Counter bytesUsed = Util.Counter.NewCounter();
+            Int32BlockPool pool = new Int32BlockPool(new ByteTrackingAllocator(bytesUsed));
+            for (int j = 0; j < 2; j++)
+            {
+                IList<StartEndAndValues> holders = new List<StartEndAndValues>();
+                int num = AtLeast(4);
+                for (int i = 0; i < num; i++)
+                {
+                    holders.Add(new StartEndAndValues(Random().Next(1000)));
+                }
+                Int32BlockPool.SliceWriter writer = new Int32BlockPool.SliceWriter(pool);
+                Int32BlockPool.SliceReader reader = new Int32BlockPool.SliceReader(pool);
+
+                int numValuesToWrite = AtLeast(10000);
+                for (int i = 0; i < numValuesToWrite; i++)
+                {
+                    StartEndAndValues values = holders[Random().Next(holders.Count)];
+                    if (values.ValueCount == 0)
+                    {
+                        values.Start = writer.StartNewSlice();
+                    }
+                    else
+                    {
+                        writer.Reset(values.End);
+                    }
+                    writer.WriteInt32(values.NextValue());
+                    values.End = writer.CurrentOffset;
+                    if (Random().Next(5) == 0)
+                    {
+                        // pick one and reader the ints
+                        AssertReader(reader, holders[Random().Next(holders.Count)]);
+                    }
+                }
+
+                while (holders.Count > 0)
+                {
+                    int randIndex = Random().Next(holders.Count);
+                    StartEndAndValues values = holders[randIndex];
+                    holders.RemoveAt(randIndex);
+                    AssertReader(reader, values);
+                }
+                if (Random().NextBoolean())
+                {
+                    pool.Reset(true, false);
+                    Assert.AreEqual(0, bytesUsed.Get());
+                }
+                else
+                {
+                    pool.Reset(true, true);
+                    Assert.AreEqual(Int32BlockPool.INT32_BLOCK_SIZE * RamUsageEstimator.NUM_BYTES_INT32, bytesUsed.Get());
+                }
+            }
+        }
+
+        private class ByteTrackingAllocator : Int32BlockPool.Allocator
+        {
+            internal readonly Counter BytesUsed;
+
+            public ByteTrackingAllocator(Counter bytesUsed)
+                : this(Int32BlockPool.INT32_BLOCK_SIZE, bytesUsed)
+            {
+            }
+
+            public ByteTrackingAllocator(int blockSize, Counter bytesUsed)
+                : base(blockSize)
+            {
+                this.BytesUsed = bytesUsed;
+            }
+
+            public override int[] GetInt32Block()
+            {
+                BytesUsed.AddAndGet(m_blockSize * RamUsageEstimator.NUM_BYTES_INT32);
+                return new int[m_blockSize];
+            }
+
+            public override void RecycleInt32Blocks(int[][] blocks, int start, int end)
+            {
+                BytesUsed.AddAndGet(-((end - start) * m_blockSize * RamUsageEstimator.NUM_BYTES_INT32));
+            }
+        }
+
+        private void AssertReader(Int32BlockPool.SliceReader reader, StartEndAndValues values)
+        {
+            reader.Reset(values.Start, values.End);
+            for (int i = 0; i < values.ValueCount; i++)
+            {
+                Assert.AreEqual(values.ValueOffset + i, reader.ReadInt32());
+            }
+            Assert.IsTrue(reader.EndOfSlice());
+        }
+
+        private class StartEndAndValues
+        {
+            internal int ValueOffset;
+            internal int ValueCount;
+            internal int Start;
+            internal int End;
+
+            public StartEndAndValues(int valueOffset)
+            {
+                this.ValueOffset = valueOffset;
+            }
+
+            public virtual int NextValue()
+            {
+                return ValueOffset + ValueCount++;
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestIsCurrent.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestIsCurrent.cs b/src/Lucene.Net.Tests/Index/TestIsCurrent.cs
new file mode 100644
index 0000000..d975080
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestIsCurrent.cs
@@ -0,0 +1,109 @@
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Store;
+    using Lucene.Net.Util;
+    using NUnit.Framework;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Document = Documents.Document;
+    using Field = Field;
+
+    [TestFixture]
+    public class TestIsCurrent : LuceneTestCase
+    {
+        private RandomIndexWriter Writer;
+
+        private Directory Directory;
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+
+            // initialize directory
+            Directory = NewDirectory();
+            Writer = new RandomIndexWriter(Random(), Directory, Similarity, TimeZone);
+
+            // write document
+            Document doc = new Document();
+            doc.Add(NewTextField("UUID", "1", Field.Store.YES));
+            Writer.AddDocument(doc);
+            Writer.Commit();
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            base.TearDown();
+            Writer.Dispose();
+            Directory.Dispose();
+        }
+
+        /// <summary>
+        /// Failing testcase showing the trouble
+        /// </summary>
+        [Test]
+        public virtual void TestDeleteByTermIsCurrent()
+        {
+            // get reader
+            DirectoryReader reader = Writer.Reader;
+
+            // assert index has a document and reader is up2date
+            Assert.AreEqual(1, Writer.NumDocs, "One document should be in the index");
+            Assert.IsTrue(reader.IsCurrent, "One document added, reader should be current");
+
+            // remove document
+            Term idTerm = new Term("UUID", "1");
+            Writer.DeleteDocuments(idTerm);
+            Writer.Commit();
+
+            // assert document has been deleted (index changed), reader is stale
+            Assert.AreEqual(0, Writer.NumDocs, "Document should be removed");
+            Assert.IsFalse(reader.IsCurrent, "Reader should be stale");
+
+            reader.Dispose();
+        }
+
+        /// <summary>
+        /// Testcase for example to show that writer.deleteAll() is working as expected
+        /// </summary>
+        [Test]
+        public virtual void TestDeleteAllIsCurrent()
+        {
+            // get reader
+            DirectoryReader reader = Writer.Reader;
+
+            // assert index has a document and reader is up2date
+            Assert.AreEqual(1, Writer.NumDocs, "One document should be in the index");
+            Assert.IsTrue(reader.IsCurrent, "Document added, reader should be stale ");
+
+            // remove all documents
+            Writer.DeleteAll();
+            Writer.Commit();
+
+            // assert document has been deleted (index changed), reader is stale
+            Assert.AreEqual(0, Writer.NumDocs, "Document should be removed");
+            Assert.IsFalse(reader.IsCurrent, "Reader should be stale");
+
+            reader.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestLazyProxSkipping.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestLazyProxSkipping.cs b/src/Lucene.Net.Tests/Index/TestLazyProxSkipping.cs
new file mode 100644
index 0000000..8f60ea8
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestLazyProxSkipping.cs
@@ -0,0 +1,258 @@
+namespace Lucene.Net.Index
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    using Lucene.Net.Analysis;
+    using NUnit.Framework;
+    using System.IO;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using IndexInput = Lucene.Net.Store.IndexInput;
+    using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+    using IOContext = Lucene.Net.Store.IOContext;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+    using PhraseQuery = Lucene.Net.Search.PhraseQuery;
+    using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+    using ScoreDoc = Lucene.Net.Search.ScoreDoc;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    /// <summary>
+    /// Tests lazy skipping on the proximity file.
+    ///
+    /// </summary>
+    [TestFixture]
+    public class TestLazyProxSkipping : LuceneTestCase
+    {
+        private IndexSearcher Searcher;
+        private int SeeksCounter = 0;
+
+        private string Field = "tokens";
+        private string Term1 = "xx";
+        private string Term2 = "yy";
+        private string Term3 = "zz";
+
+        private class SeekCountingDirectory : MockDirectoryWrapper
+        {
+            private readonly TestLazyProxSkipping OuterInstance;
+
+            public SeekCountingDirectory(TestLazyProxSkipping outerInstance, Directory @delegate)
+                : base(Random(), @delegate)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            public override IndexInput OpenInput(string name, IOContext context)
+            {
+                IndexInput ii = base.OpenInput(name, context);
+                if (name.EndsWith(".prx") || name.EndsWith(".pos"))
+                {
+                    // we decorate the proxStream with a wrapper class that allows to count the number of calls of seek()
+                    ii = new SeeksCountingStream(OuterInstance, ii);
+                }
+                return ii;
+            }
+        }
+
+        private void CreateIndex(int numHits)
+        {
+            int numDocs = 500;
+
+            Analyzer analyzer = new AnalyzerAnonymousInnerClassHelper(this);
+            Directory directory = new SeekCountingDirectory(this, new RAMDirectory());
+            // note: test explicitly disables payloads
+            IndexWriter writer = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetMaxBufferedDocs(10).SetMergePolicy(NewLogMergePolicy(false)));
+
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document doc = new Document();
+                string content;
+                if (i % (numDocs / numHits) == 0)
+                {
+                    // add a document that matches the query "term1 term2"
+                    content = this.Term1 + " " + this.Term2;
+                }
+                else if (i % 15 == 0)
+                {
+                    // add a document that only contains term1
+                    content = this.Term1 + " " + this.Term1;
+                }
+                else
+                {
+                    // add a document that contains term2 but not term 1
+                    content = this.Term3 + " " + this.Term2;
+                }
+
+                doc.Add(NewTextField(this.Field, content, Documents.Field.Store.YES));
+                writer.AddDocument(doc);
+            }
+
+            // make sure the index has only a single segment
+            writer.ForceMerge(1);
+            writer.Dispose();
+
+            SegmentReader reader = GetOnlySegmentReader(DirectoryReader.Open(directory));
+
+            this.Searcher = NewSearcher(reader);
+        }
+
+        private class AnalyzerAnonymousInnerClassHelper : Analyzer
+        {
+            private readonly TestLazyProxSkipping OuterInstance;
+
+            public AnalyzerAnonymousInnerClassHelper(TestLazyProxSkipping outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                return new TokenStreamComponents(new MockTokenizer(reader, MockTokenizer.WHITESPACE, true));
+            }
+        }
+
+        private ScoreDoc[] Search()
+        {
+            // create PhraseQuery "term1 term2" and search
+            PhraseQuery pq = new PhraseQuery();
+            pq.Add(new Term(this.Field, this.Term1));
+            pq.Add(new Term(this.Field, this.Term2));
+            return this.Searcher.Search(pq, null, 1000).ScoreDocs;
+        }
+
+        private void PerformTest(int numHits)
+        {
+            CreateIndex(numHits);
+            this.SeeksCounter = 0;
+            ScoreDoc[] hits = Search();
+            // verify that the right number of docs was found
+            Assert.AreEqual(numHits, hits.Length);
+
+            // check if the number of calls of seek() does not exceed the number of hits
+            Assert.IsTrue(this.SeeksCounter > 0);
+            Assert.IsTrue(this.SeeksCounter <= numHits + 1, "seeksCounter=" + this.SeeksCounter + " numHits=" + numHits);
+            Searcher.IndexReader.Dispose();
+        }
+
+        [Test]
+        public virtual void TestLazySkipping()
+        {
+            string fieldFormat = TestUtil.GetPostingsFormat(this.Field);
+            AssumeFalse("this test cannot run with Memory postings format", fieldFormat.Equals("Memory"));
+            AssumeFalse("this test cannot run with Direct postings format", fieldFormat.Equals("Direct"));
+            AssumeFalse("this test cannot run with SimpleText postings format", fieldFormat.Equals("SimpleText"));
+
+            // test whether only the minimum amount of seeks()
+            // are performed
+            PerformTest(5);
+            PerformTest(10);
+        }
+
+        [Test]
+        public virtual void TestSeek()
+        {
+            Directory directory = NewDirectory();
+            IndexWriter writer = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            for (int i = 0; i < 10; i++)
+            {
+                Document doc = new Document();
+                doc.Add(NewTextField(this.Field, "a b", Documents.Field.Store.YES));
+                writer.AddDocument(doc);
+            }
+
+            writer.Dispose();
+            IndexReader reader = DirectoryReader.Open(directory);
+
+            DocsAndPositionsEnum tp = MultiFields.GetTermPositionsEnum(reader, MultiFields.GetLiveDocs(reader), this.Field, new BytesRef("b"));
+
+            for (int i = 0; i < 10; i++)
+            {
+                tp.NextDoc();
+                Assert.AreEqual(tp.DocID, i);
+                Assert.AreEqual(tp.NextPosition(), 1);
+            }
+
+            tp = MultiFields.GetTermPositionsEnum(reader, MultiFields.GetLiveDocs(reader), this.Field, new BytesRef("a"));
+
+            for (int i = 0; i < 10; i++)
+            {
+                tp.NextDoc();
+                Assert.AreEqual(tp.DocID, i);
+                Assert.AreEqual(tp.NextPosition(), 0);
+            }
+            reader.Dispose();
+            directory.Dispose();
+        }
+
+        // Simply extends IndexInput in a way that we are able to count the number
+        // of invocations of seek()
+        internal class SeeksCountingStream : IndexInput
+        {
+            private readonly TestLazyProxSkipping OuterInstance;
+
+            internal IndexInput Input;
+
+            internal SeeksCountingStream(TestLazyProxSkipping outerInstance, IndexInput input)
+                : base("SeekCountingStream(" + input + ")")
+            {
+                this.OuterInstance = outerInstance;
+                this.Input = input;
+            }
+
+            public override byte ReadByte()
+            {
+                return this.Input.ReadByte();
+            }
+
+            public override void ReadBytes(byte[] b, int offset, int len)
+            {
+                this.Input.ReadBytes(b, offset, len);
+            }
+
+            public override void Dispose()
+            {
+                this.Input.Dispose();
+            }
+
+            public override long FilePointer
+            {
+                get
+                {
+                    return this.Input.FilePointer;
+                }
+            }
+
+            public override void Seek(long pos)
+            {
+                OuterInstance.SeeksCounter++;
+                this.Input.Seek(pos);
+            }
+
+            public override long Length
+            {
+                get { return this.Input.Length; }
+            }
+
+            public override object Clone()
+            {
+                return new SeeksCountingStream(OuterInstance, (IndexInput)this.Input.Clone());
+            }
+        }
+    }
+}
\ No newline at end of file


[46/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Codecs/Lucene45/TestLucene45DocValuesFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene45/TestLucene45DocValuesFormat.cs b/src/Lucene.Net.Tests/Codecs/Lucene45/TestLucene45DocValuesFormat.cs
new file mode 100644
index 0000000..dc18580
--- /dev/null
+++ b/src/Lucene.Net.Tests/Codecs/Lucene45/TestLucene45DocValuesFormat.cs
@@ -0,0 +1,565 @@
+using NUnit.Framework;
+
+namespace Lucene.Net.Codecs.Lucene45
+{
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using BaseCompressingDocValuesFormatTestCase = Lucene.Net.Index.BaseCompressingDocValuesFormatTestCase;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    /// <summary>
+    /// Tests Lucene45DocValuesFormat
+    /// </summary>
+    public class TestLucene45DocValuesFormat : BaseCompressingDocValuesFormatTestCase
+    {
+        private readonly Codec Codec_Renamed = TestUtil.AlwaysDocValuesFormat(new Lucene45DocValuesFormat());
+
+        protected override Codec Codec
+        {
+            get
+            {
+                return Codec_Renamed;
+            }
+        }
+
+        #region BaseCompressingDocValuesFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestUniqueValuesCompression()
+        {
+            base.TestUniqueValuesCompression();
+        }
+
+        [Test]
+        public override void TestDateCompression()
+        {
+            base.TestDateCompression();
+        }
+
+        [Test]
+        public override void TestSingleBigValueCompression()
+        {
+            base.TestSingleBigValueCompression();
+        }
+
+        #endregion
+
+        #region BaseDocValuesFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestOneNumber()
+        {
+            base.TestOneNumber();
+        }
+
+        [Test]
+        public override void TestOneFloat()
+        {
+            base.TestOneFloat();
+        }
+
+        [Test]
+        public override void TestTwoNumbers()
+        {
+            base.TestTwoNumbers();
+        }
+
+        [Test]
+        public override void TestTwoBinaryValues()
+        {
+            base.TestTwoBinaryValues();
+        }
+
+        [Test]
+        public override void TestTwoFieldsMixed()
+        {
+            base.TestTwoFieldsMixed();
+        }
+
+        [Test]
+        public override void TestThreeFieldsMixed()
+        {
+            base.TestThreeFieldsMixed();
+        }
+
+        [Test]
+        public override void TestThreeFieldsMixed2()
+        {
+            base.TestThreeFieldsMixed2();
+        }
+
+        [Test]
+        public override void TestTwoDocumentsNumeric()
+        {
+            base.TestTwoDocumentsNumeric();
+        }
+
+        [Test]
+        public override void TestTwoDocumentsMerged()
+        {
+            base.TestTwoDocumentsMerged();
+        }
+
+        [Test]
+        public override void TestBigNumericRange()
+        {
+            base.TestBigNumericRange();
+        }
+
+        [Test]
+        public override void TestBigNumericRange2()
+        {
+            base.TestBigNumericRange2();
+        }
+
+        [Test]
+        public override void TestBytes()
+        {
+            base.TestBytes();
+        }
+
+        [Test]
+        public override void TestBytesTwoDocumentsMerged()
+        {
+            base.TestBytesTwoDocumentsMerged();
+        }
+
+        [Test]
+        public override void TestSortedBytes()
+        {
+            base.TestSortedBytes();
+        }
+
+        [Test]
+        public override void TestSortedBytesTwoDocuments()
+        {
+            base.TestSortedBytesTwoDocuments();
+        }
+
+        [Test]
+        public override void TestSortedBytesThreeDocuments()
+        {
+            base.TestSortedBytesThreeDocuments();
+        }
+
+        [Test]
+        public override void TestSortedBytesTwoDocumentsMerged()
+        {
+            base.TestSortedBytesTwoDocumentsMerged();
+        }
+
+        [Test]
+        public override void TestSortedMergeAwayAllValues()
+        {
+            base.TestSortedMergeAwayAllValues();
+        }
+
+        [Test]
+        public override void TestBytesWithNewline()
+        {
+            base.TestBytesWithNewline();
+        }
+
+        [Test]
+        public override void TestMissingSortedBytes()
+        {
+            base.TestMissingSortedBytes();
+        }
+
+        [Test]
+        public override void TestSortedTermsEnum()
+        {
+            base.TestSortedTermsEnum();
+        }
+
+        [Test]
+        public override void TestEmptySortedBytes()
+        {
+            base.TestEmptySortedBytes();
+        }
+
+        [Test]
+        public override void TestEmptyBytes()
+        {
+            base.TestEmptyBytes();
+        }
+
+        [Test]
+        public override void TestVeryLargeButLegalBytes()
+        {
+            base.TestVeryLargeButLegalBytes();
+        }
+
+        [Test]
+        public override void TestVeryLargeButLegalSortedBytes()
+        {
+            base.TestVeryLargeButLegalSortedBytes();
+        }
+
+        [Test]
+        public override void TestCodecUsesOwnBytes()
+        {
+            base.TestCodecUsesOwnBytes();
+        }
+
+        [Test]
+        public override void TestCodecUsesOwnSortedBytes()
+        {
+            base.TestCodecUsesOwnSortedBytes();
+        }
+
+        [Test]
+        public override void TestCodecUsesOwnBytesEachTime()
+        {
+            base.TestCodecUsesOwnBytesEachTime();
+        }
+
+        [Test]
+        public override void TestCodecUsesOwnSortedBytesEachTime()
+        {
+            base.TestCodecUsesOwnSortedBytesEachTime();
+        }
+
+        /*
+         * Simple test case to show how to use the API
+         */
+        [Test]
+        public override void TestDocValuesSimple()
+        {
+            base.TestDocValuesSimple();
+        }
+
+        [Test]
+        public override void TestRandomSortedBytes()
+        {
+            base.TestRandomSortedBytes();
+        }
+
+        [Test]
+        public override void TestBooleanNumericsVsStoredFields()
+        {
+            base.TestBooleanNumericsVsStoredFields();
+        }
+
+        [Test]
+        public override void TestByteNumericsVsStoredFields()
+        {
+            base.TestByteNumericsVsStoredFields();
+        }
+
+        [Test]
+        public override void TestByteMissingVsFieldCache()
+        {
+            base.TestByteMissingVsFieldCache();
+        }
+
+        [Test]
+        public override void TestShortNumericsVsStoredFields()
+        {
+            base.TestShortNumericsVsStoredFields();
+        }
+
+        [Test]
+        public override void TestShortMissingVsFieldCache()
+        {
+            base.TestShortMissingVsFieldCache();
+        }
+
+        [Test]
+        public override void TestIntNumericsVsStoredFields()
+        {
+            base.TestIntNumericsVsStoredFields();
+        }
+
+        [Test]
+        public override void TestIntMissingVsFieldCache()
+        {
+            base.TestIntMissingVsFieldCache();
+        }
+
+        [Test]
+        public override void TestLongNumericsVsStoredFields()
+        {
+            base.TestLongNumericsVsStoredFields();
+        }
+
+        [Test]
+        public override void TestLongMissingVsFieldCache()
+        {
+            base.TestLongMissingVsFieldCache();
+        }
+
+        [Test]
+        public override void TestBinaryFixedLengthVsStoredFields()
+        {
+            base.TestBinaryFixedLengthVsStoredFields();
+        }
+
+        [Test]
+        public override void TestBinaryVariableLengthVsStoredFields()
+        {
+            base.TestBinaryVariableLengthVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedFixedLengthVsStoredFields()
+        {
+            base.TestSortedFixedLengthVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedFixedLengthVsFieldCache()
+        {
+            base.TestSortedFixedLengthVsFieldCache();
+        }
+
+        [Test]
+        public override void TestSortedVariableLengthVsFieldCache()
+        {
+            base.TestSortedVariableLengthVsFieldCache();
+        }
+
+        [Test]
+        public override void TestSortedVariableLengthVsStoredFields()
+        {
+            base.TestSortedVariableLengthVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedSetOneValue()
+        {
+            base.TestSortedSetOneValue();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoFields()
+        {
+            base.TestSortedSetTwoFields();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoDocumentsMerged()
+        {
+            base.TestSortedSetTwoDocumentsMerged();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoValues()
+        {
+            base.TestSortedSetTwoValues();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoValuesUnordered()
+        {
+            base.TestSortedSetTwoValuesUnordered();
+        }
+
+        [Test]
+        public override void TestSortedSetThreeValuesTwoDocs()
+        {
+            base.TestSortedSetThreeValuesTwoDocs();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoDocumentsLastMissing()
+        {
+            base.TestSortedSetTwoDocumentsLastMissing();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoDocumentsLastMissingMerge()
+        {
+            base.TestSortedSetTwoDocumentsLastMissingMerge();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoDocumentsFirstMissing()
+        {
+            base.TestSortedSetTwoDocumentsFirstMissing();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoDocumentsFirstMissingMerge()
+        {
+            base.TestSortedSetTwoDocumentsFirstMissingMerge();
+        }
+
+        [Test]
+        public override void TestSortedSetMergeAwayAllValues()
+        {
+            base.TestSortedSetMergeAwayAllValues();
+        }
+
+        [Test]
+        public override void TestSortedSetTermsEnum()
+        {
+            base.TestSortedSetTermsEnum();
+        }
+
+        [Test]
+        public override void TestSortedSetFixedLengthVsStoredFields()
+        {
+            base.TestSortedSetFixedLengthVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedSetVariableLengthVsStoredFields()
+        {
+            base.TestSortedSetVariableLengthVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedSetFixedLengthSingleValuedVsStoredFields()
+        {
+            base.TestSortedSetFixedLengthSingleValuedVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedSetVariableLengthSingleValuedVsStoredFields()
+        {
+            base.TestSortedSetVariableLengthSingleValuedVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedSetFixedLengthVsUninvertedField()
+        {
+            base.TestSortedSetFixedLengthVsUninvertedField();
+        }
+
+        [Test]
+        public override void TestSortedSetVariableLengthVsUninvertedField()
+        {
+            base.TestSortedSetVariableLengthVsUninvertedField();
+        }
+
+        [Test]
+        public override void TestGCDCompression()
+        {
+            base.TestGCDCompression();
+        }
+
+        [Test]
+        public override void TestZeros()
+        {
+            base.TestZeros();
+        }
+
+        [Test]
+        public override void TestZeroOrMin()
+        {
+            base.TestZeroOrMin();
+        }
+
+        [Test]
+        public override void TestTwoNumbersOneMissing()
+        {
+            base.TestTwoNumbersOneMissing();
+        }
+
+        [Test]
+        public override void TestTwoNumbersOneMissingWithMerging()
+        {
+            base.TestTwoNumbersOneMissingWithMerging();
+        }
+
+        [Test]
+        public override void TestThreeNumbersOneMissingWithMerging()
+        {
+            base.TestThreeNumbersOneMissingWithMerging();
+        }
+
+        [Test]
+        public override void TestTwoBytesOneMissing()
+        {
+            base.TestTwoBytesOneMissing();
+        }
+
+        [Test]
+        public override void TestTwoBytesOneMissingWithMerging()
+        {
+            base.TestTwoBytesOneMissingWithMerging();
+        }
+
+        [Test]
+        public override void TestThreeBytesOneMissingWithMerging()
+        {
+            base.TestThreeBytesOneMissingWithMerging();
+        }
+
+        // LUCENE-4853
+        [Test]
+        public override void TestHugeBinaryValues()
+        {
+            base.TestHugeBinaryValues();
+        }
+
+        // TODO: get this out of here and into the deprecated codecs (4.0, 4.2)
+        [Test]
+        public override void TestHugeBinaryValueLimit()
+        {
+            base.TestHugeBinaryValueLimit();
+        }
+
+        /// <summary>
+        /// Tests dv against stored fields with threads (binary/numeric/sorted, no missing)
+        /// </summary>
+        [Test]
+        public override void TestThreads()
+        {
+            base.TestThreads();
+        }
+
+        /// <summary>
+        /// Tests dv against stored fields with threads (all types + missing)
+        /// </summary>
+        [Test]
+        public override void TestThreads2()
+        {
+            base.TestThreads2();
+        }
+
+        // LUCENE-5218
+        [Test]
+        public override void TestEmptyBinaryValueOnPageSizes()
+        {
+            base.TestEmptyBinaryValueOnPageSizes();
+        }
+
+        #endregion
+
+        #region BaseIndexFileFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestMergeStability()
+        {
+            base.TestMergeStability();
+        }
+
+        #endregion
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Codecs/PerField/TestPerFieldDocValuesFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/PerField/TestPerFieldDocValuesFormat.cs b/src/Lucene.Net.Tests/Codecs/PerField/TestPerFieldDocValuesFormat.cs
new file mode 100644
index 0000000..ddb4527
--- /dev/null
+++ b/src/Lucene.Net.Tests/Codecs/PerField/TestPerFieldDocValuesFormat.cs
@@ -0,0 +1,658 @@
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Codecs.PerField
+{
+    using Lucene.Net.Index;
+    using NUnit.Framework;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Analyzer = Lucene.Net.Analysis.Analyzer;
+    using BaseDocValuesFormatTestCase = Lucene.Net.Index.BaseDocValuesFormatTestCase;
+    using BinaryDocValues = Lucene.Net.Index.BinaryDocValues;
+    using BinaryDocValuesField = BinaryDocValuesField;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+    using IndexWriter = Lucene.Net.Index.IndexWriter;
+    using IndexWriterConfig = Lucene.Net.Index.IndexWriterConfig;
+    using Lucene46Codec = Lucene.Net.Codecs.Lucene46.Lucene46Codec;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using NumericDocValues = Lucene.Net.Index.NumericDocValues;
+    using NumericDocValuesField = NumericDocValuesField;
+    using Query = Lucene.Net.Search.Query;
+    using RandomCodec = Lucene.Net.Index.RandomCodec;
+    using Term = Lucene.Net.Index.Term;
+    using TermQuery = Lucene.Net.Search.TermQuery;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using TopDocs = Lucene.Net.Search.TopDocs;
+
+    /// <summary>
+    /// Basic tests of PerFieldDocValuesFormat
+    /// </summary>
+    [TestFixture]
+    public class TestPerFieldDocValuesFormat : BaseDocValuesFormatTestCase
+    {
+        private Codec Codec_Renamed;
+
+        [SetUp]
+        public override void SetUp()
+        {
+            Codec_Renamed = new RandomCodec(new Random(Random().Next()), new HashSet<string>());
+            base.SetUp();
+        }
+
+        protected override Codec Codec
+        {
+            get
+            {
+                return Codec_Renamed;
+            }
+        }
+
+        protected internal override bool CodecAcceptsHugeBinaryValues(string field)
+        {
+            return TestUtil.FieldSupportsHugeBinaryDocValues(field);
+        }
+
+        // just a simple trivial test
+        // TODO: we should come up with a test that somehow checks that segment suffix
+        // is respected by all codec apis (not just docvalues and postings)
+        [Test]
+        public virtual void TestTwoFieldsTwoFormats()
+        {
+            Analyzer analyzer = new MockAnalyzer(Random());
+
+            Directory directory = NewDirectory();
+            // we don't use RandomIndexWriter because it might add more docvalues than we expect !!!!1
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
+            DocValuesFormat fast = DocValuesFormat.ForName("Lucene45");
+            DocValuesFormat slow = DocValuesFormat.ForName("Lucene45");
+            iwc.SetCodec(new Lucene46CodecAnonymousInnerClassHelper(this, fast, slow));
+            IndexWriter iwriter = new IndexWriter(directory, iwc);
+            Document doc = new Document();
+            string longTerm = "longtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongterm";
+            string text = "this is the text to be indexed. " + longTerm;
+            doc.Add(NewTextField("fieldname", text, Field.Store.YES));
+            doc.Add(new NumericDocValuesField("dv1", 5));
+            doc.Add(new BinaryDocValuesField("dv2", new BytesRef("hello world")));
+            iwriter.AddDocument(doc);
+            iwriter.Dispose();
+
+            // Now search the index:
+            IndexReader ireader = DirectoryReader.Open(directory); // read-only=true
+            IndexSearcher isearcher = NewSearcher(ireader);
+
+            Assert.AreEqual(1, isearcher.Search(new TermQuery(new Term("fieldname", longTerm)), 1).TotalHits);
+            Query query = new TermQuery(new Term("fieldname", "text"));
+            TopDocs hits = isearcher.Search(query, null, 1);
+            Assert.AreEqual(1, hits.TotalHits);
+            BytesRef scratch = new BytesRef();
+            // Iterate through the results:
+            for (int i = 0; i < hits.ScoreDocs.Length; i++)
+            {
+                Document hitDoc = isearcher.Doc(hits.ScoreDocs[i].Doc);
+                Assert.AreEqual(text, hitDoc.Get("fieldname"));
+                Debug.Assert(ireader.Leaves.Count == 1);
+                NumericDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetNumericDocValues("dv1");
+                Assert.AreEqual(5, dv.Get(hits.ScoreDocs[i].Doc));
+                BinaryDocValues dv2 = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv2");
+                dv2.Get(hits.ScoreDocs[i].Doc, scratch);
+                Assert.AreEqual(new BytesRef("hello world"), scratch);
+            }
+
+            ireader.Dispose();
+            directory.Dispose();
+        }
+
+        private class Lucene46CodecAnonymousInnerClassHelper : Lucene46Codec
+        {
+            private readonly TestPerFieldDocValuesFormat OuterInstance;
+
+            private DocValuesFormat Fast;
+            private DocValuesFormat Slow;
+
+            public Lucene46CodecAnonymousInnerClassHelper(TestPerFieldDocValuesFormat outerInstance, DocValuesFormat fast, DocValuesFormat slow)
+            {
+                this.OuterInstance = outerInstance;
+                this.Fast = fast;
+                this.Slow = slow;
+            }
+
+            public override DocValuesFormat GetDocValuesFormatForField(string field)
+            {
+                if ("dv1".Equals(field))
+                {
+                    return Fast;
+                }
+                else
+                {
+                    return Slow;
+                }
+            }
+        }
+
+
+        #region BaseDocValuesFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestOneNumber()
+        {
+            base.TestOneNumber();
+        }
+
+        [Test]
+        public override void TestOneFloat()
+        {
+            base.TestOneFloat();
+        }
+
+        [Test]
+        public override void TestTwoNumbers()
+        {
+            base.TestTwoNumbers();
+        }
+
+        [Test]
+        public override void TestTwoBinaryValues()
+        {
+            base.TestTwoBinaryValues();
+        }
+
+        [Test]
+        public override void TestTwoFieldsMixed()
+        {
+            base.TestTwoFieldsMixed();
+        }
+
+        [Test]
+        public override void TestThreeFieldsMixed()
+        {
+            base.TestThreeFieldsMixed();
+        }
+
+        [Test]
+        public override void TestThreeFieldsMixed2()
+        {
+            base.TestThreeFieldsMixed2();
+        }
+
+        [Test]
+        public override void TestTwoDocumentsNumeric()
+        {
+            base.TestTwoDocumentsNumeric();
+        }
+
+        [Test]
+        public override void TestTwoDocumentsMerged()
+        {
+            base.TestTwoDocumentsMerged();
+        }
+
+        [Test]
+        public override void TestBigNumericRange()
+        {
+            base.TestBigNumericRange();
+        }
+
+        [Test]
+        public override void TestBigNumericRange2()
+        {
+            base.TestBigNumericRange2();
+        }
+
+        [Test]
+        public override void TestBytes()
+        {
+            base.TestBytes();
+        }
+
+        [Test]
+        public override void TestBytesTwoDocumentsMerged()
+        {
+            base.TestBytesTwoDocumentsMerged();
+        }
+
+        [Test]
+        public override void TestSortedBytes()
+        {
+            base.TestSortedBytes();
+        }
+
+        [Test]
+        public override void TestSortedBytesTwoDocuments()
+        {
+            base.TestSortedBytesTwoDocuments();
+        }
+
+        [Test]
+        public override void TestSortedBytesThreeDocuments()
+        {
+            base.TestSortedBytesThreeDocuments();
+        }
+
+        [Test]
+        public override void TestSortedBytesTwoDocumentsMerged()
+        {
+            base.TestSortedBytesTwoDocumentsMerged();
+        }
+
+        [Test]
+        public override void TestSortedMergeAwayAllValues()
+        {
+            base.TestSortedMergeAwayAllValues();
+        }
+
+        [Test]
+        public override void TestBytesWithNewline()
+        {
+            base.TestBytesWithNewline();
+        }
+
+        [Test]
+        public override void TestMissingSortedBytes()
+        {
+            base.TestMissingSortedBytes();
+        }
+
+        [Test]
+        public override void TestSortedTermsEnum()
+        {
+            base.TestSortedTermsEnum();
+        }
+
+        [Test]
+        public override void TestEmptySortedBytes()
+        {
+            base.TestEmptySortedBytes();
+        }
+
+        [Test]
+        public override void TestEmptyBytes()
+        {
+            base.TestEmptyBytes();
+        }
+
+        [Test]
+        public override void TestVeryLargeButLegalBytes()
+        {
+            base.TestVeryLargeButLegalBytes();
+        }
+
+        [Test]
+        public override void TestVeryLargeButLegalSortedBytes()
+        {
+            base.TestVeryLargeButLegalSortedBytes();
+        }
+
+        [Test]
+        public override void TestCodecUsesOwnBytes()
+        {
+            base.TestCodecUsesOwnBytes();
+        }
+
+        [Test]
+        public override void TestCodecUsesOwnSortedBytes()
+        {
+            base.TestCodecUsesOwnSortedBytes();
+        }
+
+        [Test]
+        public override void TestCodecUsesOwnBytesEachTime()
+        {
+            base.TestCodecUsesOwnBytesEachTime();
+        }
+
+        [Test]
+        public override void TestCodecUsesOwnSortedBytesEachTime()
+        {
+            base.TestCodecUsesOwnSortedBytesEachTime();
+        }
+
+        /*
+         * Simple test case to show how to use the API
+         */
+        [Test]
+        public override void TestDocValuesSimple()
+        {
+            base.TestDocValuesSimple();
+        }
+
+        [Test]
+        public override void TestRandomSortedBytes()
+        {
+            base.TestRandomSortedBytes();
+        }
+
+        [Test]
+        public override void TestBooleanNumericsVsStoredFields()
+        {
+            base.TestBooleanNumericsVsStoredFields();
+        }
+
+        [Test]
+        public override void TestByteNumericsVsStoredFields()
+        {
+            base.TestByteNumericsVsStoredFields();
+        }
+
+        [Test]
+        public override void TestByteMissingVsFieldCache()
+        {
+            base.TestByteMissingVsFieldCache();
+        }
+
+        [Test]
+        public override void TestShortNumericsVsStoredFields()
+        {
+            base.TestShortNumericsVsStoredFields();
+        }
+
+        [Test]
+        public override void TestShortMissingVsFieldCache()
+        {
+            base.TestShortMissingVsFieldCache();
+        }
+
+        [Test]
+        public override void TestIntNumericsVsStoredFields()
+        {
+            base.TestIntNumericsVsStoredFields();
+        }
+
+        [Test]
+        public override void TestIntMissingVsFieldCache()
+        {
+            base.TestIntMissingVsFieldCache();
+        }
+
+        [Test]
+        public override void TestLongNumericsVsStoredFields()
+        {
+            base.TestLongNumericsVsStoredFields();
+        }
+
+        [Test]
+        public override void TestLongMissingVsFieldCache()
+        {
+            base.TestLongMissingVsFieldCache();
+        }
+
+        [Test]
+        public override void TestBinaryFixedLengthVsStoredFields()
+        {
+            base.TestBinaryFixedLengthVsStoredFields();
+        }
+
+        [Test]
+        public override void TestBinaryVariableLengthVsStoredFields()
+        {
+            base.TestBinaryVariableLengthVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedFixedLengthVsStoredFields()
+        {
+            base.TestSortedFixedLengthVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedFixedLengthVsFieldCache()
+        {
+            base.TestSortedFixedLengthVsFieldCache();
+        }
+
+        [Test]
+        public override void TestSortedVariableLengthVsFieldCache()
+        {
+            base.TestSortedVariableLengthVsFieldCache();
+        }
+
+        [Test]
+        public override void TestSortedVariableLengthVsStoredFields()
+        {
+            base.TestSortedVariableLengthVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedSetOneValue()
+        {
+            base.TestSortedSetOneValue();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoFields()
+        {
+            base.TestSortedSetTwoFields();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoDocumentsMerged()
+        {
+            base.TestSortedSetTwoDocumentsMerged();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoValues()
+        {
+            base.TestSortedSetTwoValues();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoValuesUnordered()
+        {
+            base.TestSortedSetTwoValuesUnordered();
+        }
+
+        [Test]
+        public override void TestSortedSetThreeValuesTwoDocs()
+        {
+            base.TestSortedSetThreeValuesTwoDocs();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoDocumentsLastMissing()
+        {
+            base.TestSortedSetTwoDocumentsLastMissing();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoDocumentsLastMissingMerge()
+        {
+            base.TestSortedSetTwoDocumentsLastMissingMerge();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoDocumentsFirstMissing()
+        {
+            base.TestSortedSetTwoDocumentsFirstMissing();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoDocumentsFirstMissingMerge()
+        {
+            base.TestSortedSetTwoDocumentsFirstMissingMerge();
+        }
+
+        [Test]
+        public override void TestSortedSetMergeAwayAllValues()
+        {
+            base.TestSortedSetMergeAwayAllValues();
+        }
+
+        [Test]
+        public override void TestSortedSetTermsEnum()
+        {
+            base.TestSortedSetTermsEnum();
+        }
+
+        [Test]
+        public override void TestSortedSetFixedLengthVsStoredFields()
+        {
+            base.TestSortedSetFixedLengthVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedSetVariableLengthVsStoredFields()
+        {
+            base.TestSortedSetVariableLengthVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedSetFixedLengthSingleValuedVsStoredFields()
+        {
+            base.TestSortedSetFixedLengthSingleValuedVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedSetVariableLengthSingleValuedVsStoredFields()
+        {
+            base.TestSortedSetVariableLengthSingleValuedVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedSetFixedLengthVsUninvertedField()
+        {
+            base.TestSortedSetFixedLengthVsUninvertedField();
+        }
+
+        [Test]
+        public override void TestSortedSetVariableLengthVsUninvertedField()
+        {
+            base.TestSortedSetVariableLengthVsUninvertedField();
+        }
+
+        [Test]
+        public override void TestGCDCompression()
+        {
+            base.TestGCDCompression();
+        }
+
+        [Test]
+        public override void TestZeros()
+        {
+            base.TestZeros();
+        }
+
+        [Test]
+        public override void TestZeroOrMin()
+        {
+            base.TestZeroOrMin();
+        }
+
+        [Test]
+        public override void TestTwoNumbersOneMissing()
+        {
+            base.TestTwoNumbersOneMissing();
+        }
+
+        [Test]
+        public override void TestTwoNumbersOneMissingWithMerging()
+        {
+            base.TestTwoNumbersOneMissingWithMerging();
+        }
+
+        [Test]
+        public override void TestThreeNumbersOneMissingWithMerging()
+        {
+            base.TestThreeNumbersOneMissingWithMerging();
+        }
+
+        [Test]
+        public override void TestTwoBytesOneMissing()
+        {
+            base.TestTwoBytesOneMissing();
+        }
+
+        [Test]
+        public override void TestTwoBytesOneMissingWithMerging()
+        {
+            base.TestTwoBytesOneMissingWithMerging();
+        }
+
+        [Test]
+        public override void TestThreeBytesOneMissingWithMerging()
+        {
+            base.TestThreeBytesOneMissingWithMerging();
+        }
+
+        // LUCENE-4853
+        [Test]
+        public override void TestHugeBinaryValues()
+        {
+            base.TestHugeBinaryValues();
+        }
+
+        // TODO: get this out of here and into the deprecated codecs (4.0, 4.2)
+        [Test]
+        public override void TestHugeBinaryValueLimit()
+        {
+            base.TestHugeBinaryValueLimit();
+        }
+
+        /// <summary>
+        /// Tests dv against stored fields with threads (binary/numeric/sorted, no missing)
+        /// </summary>
+        [Test]
+        public override void TestThreads()
+        {
+            base.TestThreads();
+        }
+
+        /// <summary>
+        /// Tests dv against stored fields with threads (all types + missing)
+        /// </summary>
+        [Test]
+        public override void TestThreads2()
+        {
+            base.TestThreads2();
+        }
+
+        // LUCENE-5218
+        [Test]
+        public override void TestEmptyBinaryValueOnPageSizes()
+        {
+            base.TestEmptyBinaryValueOnPageSizes();
+        }
+
+        #endregion
+
+        #region BaseIndexFileFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestMergeStability()
+        {
+            base.TestMergeStability();
+        }
+
+        #endregion
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Codecs/PerField/TestPerFieldPostingsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/PerField/TestPerFieldPostingsFormat.cs b/src/Lucene.Net.Tests/Codecs/PerField/TestPerFieldPostingsFormat.cs
new file mode 100644
index 0000000..a6d5dd2
--- /dev/null
+++ b/src/Lucene.Net.Tests/Codecs/PerField/TestPerFieldPostingsFormat.cs
@@ -0,0 +1,100 @@
+using System;
+using System.Collections.Generic;
+
+namespace Lucene.Net.Codecs.PerField
+{
+    using NUnit.Framework;
+
+    /*
+             * Licensed to the Apache Software Foundation (ASF) under one or more
+             * contributor license agreements.  See the NOTICE file distributed with
+             * this work for additional information regarding copyright ownership.
+             * The ASF licenses this file to You under the Apache License, Version 2.0
+             * (the "License"); you may not use this file except in compliance with
+             * the License.  You may obtain a copy of the License at
+             *
+             *     http://www.apache.org/licenses/LICENSE-2.0
+             *
+             * Unless required by applicable law or agreed to in writing, software
+             * distributed under the License is distributed on an "AS IS" BASIS,
+             * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+             * See the License for the specific language governing permissions and
+             * limitations under the License.
+             */
+
+    using BasePostingsFormatTestCase = Lucene.Net.Index.BasePostingsFormatTestCase;
+    using RandomCodec = Lucene.Net.Index.RandomCodec;
+
+    /// <summary>
+    /// Basic tests of PerFieldPostingsFormat
+    /// </summary>
+    [TestFixture]
+    public class TestPerFieldPostingsFormat : BasePostingsFormatTestCase
+    {
+        protected override Codec Codec
+        {
+            get
+            {
+                return new RandomCodec(new Random(Random().Next()), new HashSet<string>());
+            }
+        }
+
+        [Test]
+        public override void TestMergeStability()
+        {
+            //LUCENE TO-DO
+            AssumeTrue("The MockRandom PF randomizes content on the fly, so we can't check it", false);
+        }
+
+
+        #region BasePostingsFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestDocsOnly()
+        {
+            base.TestDocsOnly();
+        }
+
+        [Test]
+        public override void TestDocsAndFreqs()
+        {
+            base.TestDocsAndFreqs();
+        }
+
+        [Test]
+        public override void TestDocsAndFreqsAndPositions()
+        {
+            base.TestDocsAndFreqsAndPositions();
+        }
+
+        [Test]
+        public override void TestDocsAndFreqsAndPositionsAndPayloads()
+        {
+            base.TestDocsAndFreqsAndPositionsAndPayloads();
+        }
+
+        [Test]
+        public override void TestDocsAndFreqsAndPositionsAndOffsets()
+        {
+            base.TestDocsAndFreqsAndPositionsAndOffsets();
+        }
+
+        [Test]
+        public override void TestDocsAndFreqsAndPositionsAndOffsetsAndPayloads()
+        {
+            base.TestDocsAndFreqsAndPositionsAndOffsetsAndPayloads();
+        }
+
+        [Test]
+        public override void TestRandom()
+        {
+            base.TestRandom();
+        }
+
+        #endregion
+
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Codecs/PerField/TestPerFieldPostingsFormat2.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/PerField/TestPerFieldPostingsFormat2.cs b/src/Lucene.Net.Tests/Codecs/PerField/TestPerFieldPostingsFormat2.cs
new file mode 100644
index 0000000..f758532
--- /dev/null
+++ b/src/Lucene.Net.Tests/Codecs/PerField/TestPerFieldPostingsFormat2.cs
@@ -0,0 +1,372 @@
+using System;
+using Lucene.Net.Documents;
+using Lucene.Net.Codecs.Lucene41;
+using Lucene.Net.Codecs.Lucene46;
+using Lucene.Net.Codecs.SimpleText;
+using Lucene.Net.Codecs.Pulsing;
+using Lucene.Net.Codecs.MockSep;
+using Lucene.Net.Util;
+using Lucene.Net.Index;
+using Lucene.Net.Search;
+using Lucene.Net.Analysis;
+using Lucene.Net.Randomized.Generators;
+using NUnit.Framework;
+using Lucene.Net.Store;
+
+namespace Lucene.Net.Codecs.PerField
+{
+    /*
+    * Licensed to the Apache Software Foundation (ASF) under one or more
+    * contributor license agreements.  See the NOTICE file distributed with
+    * this work for additional information regarding copyright ownership.
+    * The ASF licenses this file to You under the Apache License, Version 2.0
+    * (the "License"); you may not use this file except in compliance with
+    * the License.  You may obtain a copy of the License at
+    *
+    *     http://www.apache.org/licenses/LICENSE-2.0
+    *
+    * Unless required by applicable law or agreed to in writing, software
+    * distributed under the License is distributed on an "AS IS" BASIS,
+    * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    * See the License for the specific language governing permissions and
+    * limitations under the License.
+    */
+    using Document = Documents.Document;
+
+    //TODO: would be better in this test to pull termsenums and instanceof or something?
+    // this way we can verify PFPF is doing the right thing.
+    // for now we do termqueries.
+    [TestFixture]
+    public class TestPerFieldPostingsFormat2 : LuceneTestCase
+    {
+        private IndexWriter NewWriter(Directory dir, IndexWriterConfig conf)
+        {
+            LogDocMergePolicy logByteSizeMergePolicy = new LogDocMergePolicy();
+            logByteSizeMergePolicy.NoCFSRatio = 0.0; // make sure we use plain
+            // files
+            conf.SetMergePolicy(logByteSizeMergePolicy);
+
+            IndexWriter writer = new IndexWriter(dir, conf);
+            return writer;
+        }
+
+        private void AddDocs(IndexWriter writer, int numDocs)
+        {
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document doc = new Document();
+                doc.Add(NewTextField("content", "aaa", Field.Store.NO));
+                writer.AddDocument(doc);
+            }
+        }
+
+        private void AddDocs2(IndexWriter writer, int numDocs)
+        {
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document doc = new Document();
+                doc.Add(NewTextField("content", "bbb", Field.Store.NO));
+                writer.AddDocument(doc);
+            }
+        }
+
+        private void AddDocs3(IndexWriter writer, int numDocs)
+        {
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document doc = new Document();
+                doc.Add(NewTextField("content", "ccc", Field.Store.NO));
+                doc.Add(NewStringField("id", "" + i, Field.Store.YES));
+                writer.AddDocument(doc);
+            }
+        }
+
+        /// <summary>
+        /// Test that heterogeneous index segments are merge successfully
+        /// </summary>
+        [Test]
+        public virtual void TestMergeUnusedPerFieldCodec()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig iwconf = NewIndexWriterConfig(TEST_VERSION_CURRENT, 
+                new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE).SetCodec(new MockCodec());
+            IndexWriter writer = NewWriter(dir, iwconf);
+            AddDocs(writer, 10);
+            writer.Commit();
+            AddDocs3(writer, 10);
+            writer.Commit();
+            AddDocs2(writer, 10);
+            writer.Commit();
+            Assert.AreEqual(30, writer.MaxDoc);
+            TestUtil.CheckIndex(dir);
+            writer.ForceMerge(1);
+            Assert.AreEqual(30, writer.MaxDoc);
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        /// <summary>
+        /// Test that heterogeneous index segments are merged sucessfully
+        /// </summary>
+        // TODO: not sure this test is that great, we should probably peek inside PerFieldPostingsFormat or something?!
+        [Test]
+        public virtual void TestChangeCodecAndMerge()
+        {
+            Directory dir = NewDirectory();
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: make new index");
+            }
+            IndexWriterConfig iwconf = NewIndexWriterConfig(TEST_VERSION_CURRENT, 
+                new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE).SetCodec(new MockCodec());
+            iwconf.SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH);
+            // ((LogMergePolicy)iwconf.getMergePolicy()).setMergeFactor(10);
+            IndexWriter writer = NewWriter(dir, iwconf);
+
+            AddDocs(writer, 10);
+            writer.Commit();
+            AssertQuery(new Term("content", "aaa"), dir, 10);
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: addDocs3");
+            }
+            AddDocs3(writer, 10);
+            writer.Commit();
+            writer.Dispose();
+
+            AssertQuery(new Term("content", "ccc"), dir, 10);
+            AssertQuery(new Term("content", "aaa"), dir, 10);
+            Codec codec = iwconf.Codec;
+
+            iwconf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))
+                .SetOpenMode(OpenMode.APPEND).SetCodec(codec);
+            // ((LogMergePolicy)iwconf.getMergePolicy()).setNoCFSRatio(0.0);
+            // ((LogMergePolicy)iwconf.getMergePolicy()).setMergeFactor(10);
+            iwconf.SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH);
+
+            iwconf.SetCodec(new MockCodec2()); // uses standard for field content
+            writer = NewWriter(dir, iwconf);
+            // swap in new codec for currently written segments
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: add docs w/ Standard codec for content field");
+            }
+            AddDocs2(writer, 10);
+            writer.Commit();
+            codec = iwconf.Codec;
+            Assert.AreEqual(30, writer.MaxDoc);
+            AssertQuery(new Term("content", "bbb"), dir, 10);
+            AssertQuery(new Term("content", "ccc"), dir, 10); ////
+            AssertQuery(new Term("content", "aaa"), dir, 10);
+
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: add more docs w/ new codec");
+            }
+            AddDocs2(writer, 10);
+            writer.Commit();
+            AssertQuery(new Term("content", "ccc"), dir, 10);
+            AssertQuery(new Term("content", "bbb"), dir, 20);
+            AssertQuery(new Term("content", "aaa"), dir, 10);
+            Assert.AreEqual(40, writer.MaxDoc);
+
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: now optimize");
+            }
+            writer.ForceMerge(1);
+            Assert.AreEqual(40, writer.MaxDoc);
+            writer.Dispose();
+            AssertQuery(new Term("content", "ccc"), dir, 10);
+            AssertQuery(new Term("content", "bbb"), dir, 20);
+            AssertQuery(new Term("content", "aaa"), dir, 10);
+
+            dir.Dispose();
+        }
+
+        public virtual void AssertQuery(Term t, Directory dir, int num)
+        {
+            if (VERBOSE)
+            {
+                Console.WriteLine("\nTEST: assertQuery " + t);
+            }
+            IndexReader reader = DirectoryReader.Open(dir, 1);
+            IndexSearcher searcher = NewSearcher(reader);
+            TopDocs search = searcher.Search(new TermQuery(t), num + 10);
+            Assert.AreEqual(num, search.TotalHits);
+            reader.Dispose();
+        }
+
+        private class MockCodec : Lucene46Codec
+        {
+            internal readonly PostingsFormat Lucene40 = new Lucene41PostingsFormat();
+            internal readonly PostingsFormat SimpleText = new SimpleTextPostingsFormat();
+            internal readonly PostingsFormat MockSep = new MockSepPostingsFormat();
+
+            public override PostingsFormat GetPostingsFormatForField(string field)
+            {
+                if (field.Equals("id"))
+                {
+                    return SimpleText;
+                }
+                else if (field.Equals("content"))
+                {
+                    return MockSep;
+                }
+                else
+                {
+                    return Lucene40;
+                }
+            }
+        }
+
+        private class MockCodec2 : Lucene46Codec
+        {
+            internal readonly PostingsFormat Lucene40 = new Lucene41PostingsFormat();
+            internal readonly PostingsFormat SimpleText = new SimpleTextPostingsFormat();
+
+            public override PostingsFormat GetPostingsFormatForField(string field)
+            {
+                if (field.Equals("id"))
+                {
+                    return SimpleText;
+                }
+                else
+                {
+                    return Lucene40;
+                }
+            }
+        }
+
+        /// <summary>
+        /// Test per field codec support - adding fields with random codecs
+        /// </summary>
+        [Test]
+        public virtual void TestStressPerFieldCodec()
+        {
+            Directory dir = NewDirectory(Random());
+            const int docsPerRound = 97;
+            int numRounds = AtLeast(1);
+            for (int i = 0; i < numRounds; i++)
+            {
+                int num = TestUtil.NextInt(Random(), 30, 60);
+                IndexWriterConfig config = NewIndexWriterConfig(Random(), TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+                config.SetOpenMode(OpenMode.CREATE_OR_APPEND);
+                IndexWriter writer = NewWriter(dir, config);
+                for (int j = 0; j < docsPerRound; j++)
+                {
+                    Document doc = new Document();
+                    for (int k = 0; k < num; k++)
+                    {
+                        FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
+                        customType.IsTokenized = Random().NextBoolean();
+                        customType.OmitNorms = Random().NextBoolean();
+                        Field field = NewField("" + k, TestUtil.RandomRealisticUnicodeString(Random(), 128), customType);
+                        doc.Add(field);
+                    }
+                    writer.AddDocument(doc);
+                }
+                if (Random().NextBoolean())
+                {
+                    writer.ForceMerge(1);
+                }
+                writer.Commit();
+                Assert.AreEqual((i + 1) * docsPerRound, writer.MaxDoc);
+                writer.Dispose();
+            }
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestSameCodecDifferentInstance()
+        {
+            Codec codec = new Lucene46CodecAnonymousInnerClassHelper(this);
+            DoTestMixedPostings(codec);
+        }
+
+        private class Lucene46CodecAnonymousInnerClassHelper : Lucene46Codec
+        {
+            private readonly TestPerFieldPostingsFormat2 OuterInstance;
+
+            public Lucene46CodecAnonymousInnerClassHelper(TestPerFieldPostingsFormat2 outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            public override PostingsFormat GetPostingsFormatForField(string field)
+            {
+                if ("id".Equals(field))
+                {
+                    return new Pulsing41PostingsFormat(1);
+                }
+                else if ("date".Equals(field))
+                {
+                    return new Pulsing41PostingsFormat(1);
+                }
+                else
+                {
+                    return base.GetPostingsFormatForField(field);
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestSameCodecDifferentParams()
+        {
+          Codec codec = new Lucene46CodecAnonymousInnerClassHelper2(this);
+          DoTestMixedPostings(codec);
+        }
+
+        private class Lucene46CodecAnonymousInnerClassHelper2 : Lucene46Codec
+        {
+            private readonly TestPerFieldPostingsFormat2 OuterInstance;
+
+            public Lucene46CodecAnonymousInnerClassHelper2(TestPerFieldPostingsFormat2 outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            public override PostingsFormat GetPostingsFormatForField(string field)
+            {
+                if ("id".Equals(field))
+                {
+                    return new Pulsing41PostingsFormat(1);
+                }
+                else if ("date".Equals(field))
+                {
+                    return new Pulsing41PostingsFormat(2);
+                }
+                else
+                {
+                    return base.GetPostingsFormatForField(field);
+                }
+            }
+        }
+
+        private void DoTestMixedPostings(Codec codec)
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            iwc.SetCodec(codec);
+            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwc);
+            Document doc = new Document();
+            FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
+            // turn on vectors for the checkindex cross-check
+            ft.StoreTermVectors = true;
+            ft.StoreTermVectorOffsets = true;
+            ft.StoreTermVectorPositions = true;
+            Field idField = new Field("id", "", ft);
+            Field dateField = new Field("date", "", ft);
+            doc.Add(idField);
+            doc.Add(dateField);
+            for (int i = 0; i < 100; i++)
+            {
+                idField.SetStringValue(Convert.ToString(Random().Next(50)));
+                dateField.SetStringValue(Convert.ToString(Random().Next(100)));
+                iw.AddDocument(doc);
+            }
+            iw.Dispose();
+            dir.Dispose(); // checkindex
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Document/TestBinaryDocument.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Document/TestBinaryDocument.cs b/src/Lucene.Net.Tests/Document/TestBinaryDocument.cs
new file mode 100644
index 0000000..839fa30
--- /dev/null
+++ b/src/Lucene.Net.Tests/Document/TestBinaryDocument.cs
@@ -0,0 +1,122 @@
+using Lucene.Net.Support;
+using NUnit.Framework;
+using System;
+using System.Text;
+
+namespace Lucene.Net.Documents
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using IIndexableField = Lucene.Net.Index.IIndexableField;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+
+    /// <summary>
+    /// Tests <seealso cref="Document"/> class.
+    /// </summary>
+    [TestFixture]
+    public class TestBinaryDocument : LuceneTestCase
+    {
+        internal string BinaryValStored = "this text will be stored as a byte array in the index";
+        internal string BinaryValCompressed = "this text will be also stored and compressed as a byte array in the index";
+
+        [Test]
+        public virtual void TestBinaryFieldInIndex()
+        {
+            FieldType ft = new FieldType();
+            ft.IsStored = true;
+            IIndexableField binaryFldStored = new StoredField("binaryStored", System.Text.UTF8Encoding.UTF8.GetBytes(BinaryValStored));
+            IIndexableField stringFldStored = new Field("stringStored", BinaryValStored, ft);
+
+            Documents.Document doc = new Documents.Document();
+
+            doc.Add(binaryFldStored);
+
+            doc.Add(stringFldStored);
+
+            /// <summary>
+            /// test for field count </summary>
+            Assert.AreEqual(2, doc.Fields.Count);
+
+            /// <summary>
+            /// add the doc to a ram index </summary>
+            Directory dir = NewDirectory();
+            Random r = Random();
+            RandomIndexWriter writer = new RandomIndexWriter(r, dir, Similarity, TimeZone);
+            writer.AddDocument(doc);
+
+            /// <summary>
+            /// open a reader and fetch the document </summary>
+            IndexReader reader = writer.Reader;
+            Documents.Document docFromReader = reader.Document(0);
+            Assert.IsTrue(docFromReader != null);
+
+            /// <summary>
+            /// fetch the binary stored field and compare it's content with the original one </summary>
+            BytesRef bytes = docFromReader.GetBinaryValue("binaryStored");
+            Assert.IsNotNull(bytes);
+
+            string binaryFldStoredTest = Encoding.UTF8.GetString((byte[])(Array)bytes.Bytes).Substring(bytes.Offset, bytes.Length);
+            //new string(bytes.Bytes, bytes.Offset, bytes.Length, IOUtils.CHARSET_UTF_8);
+            Assert.IsTrue(binaryFldStoredTest.Equals(BinaryValStored));
+
+            /// <summary>
+            /// fetch the string field and compare it's content with the original one </summary>
+            string stringFldStoredTest = docFromReader.Get("stringStored");
+            Assert.IsTrue(stringFldStoredTest.Equals(BinaryValStored));
+
+            writer.Dispose();
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestCompressionTools()
+        {
+            IIndexableField binaryFldCompressed = new StoredField("binaryCompressed", CompressionTools.Compress(BinaryValCompressed.GetBytes(Encoding.UTF8)));
+            IIndexableField stringFldCompressed = new StoredField("stringCompressed", CompressionTools.CompressString(BinaryValCompressed));
+
+            var doc = new Documents.Document {binaryFldCompressed, stringFldCompressed};
+
+            using (Directory dir = NewDirectory())
+            using (RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone))
+            {
+                writer.AddDocument(doc);
+
+                using (IndexReader reader = writer.Reader)
+                {
+                    Documents.Document docFromReader = reader.Document(0);
+                    Assert.IsTrue(docFromReader != null);
+
+                    string binaryFldCompressedTest =
+                        Encoding.UTF8.GetString(
+                            CompressionTools.Decompress(docFromReader.GetBinaryValue("binaryCompressed")));
+                    //new string(CompressionTools.Decompress(docFromReader.GetBinaryValue("binaryCompressed")), IOUtils.CHARSET_UTF_8);
+                    Assert.IsTrue(binaryFldCompressedTest.Equals(BinaryValCompressed));
+                    Assert.IsTrue(
+                        CompressionTools.DecompressString(docFromReader.GetBinaryValue("stringCompressed"))
+                            .Equals(BinaryValCompressed));
+                }
+
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Document/TestDateTools.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Document/TestDateTools.cs b/src/Lucene.Net.Tests/Document/TestDateTools.cs
new file mode 100644
index 0000000..8a4d823
--- /dev/null
+++ b/src/Lucene.Net.Tests/Document/TestDateTools.cs
@@ -0,0 +1,244 @@
+using Lucene.Net.Util;
+using NUnit.Framework;
+using System;
+using System.Globalization;
+
+namespace Lucene.Net.Documents
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    [TestFixture]
+    public class TestDateTools : LuceneTestCase
+    {
+        //public TestRule TestRules = RuleChain.outerRule(new SystemPropertiesRestoreRule());
+
+        [Test]
+        public virtual void TestStringToDate()
+        {
+            DateTime d = default(DateTime);
+            d = DateTools.StringToDate("2004");
+            Assert.AreEqual("2004-01-01 00:00:00:000", IsoFormat(d));
+            d = DateTools.StringToDate("20040705");
+            Assert.AreEqual("2004-07-05 00:00:00:000", IsoFormat(d));
+            d = DateTools.StringToDate("200407050910");
+            Assert.AreEqual("2004-07-05 09:10:00:000", IsoFormat(d));
+            d = DateTools.StringToDate("20040705091055990");
+            Assert.AreEqual("2004-07-05 09:10:55:990", IsoFormat(d));
+
+            try
+            {
+                d = DateTools.StringToDate("97"); // no date
+                Assert.Fail();
+            } // expected exception
+#pragma warning disable 168
+            catch (Exception e)
+#pragma warning restore 168
+            {
+            }
+            try
+            {
+                d = DateTools.StringToDate("200401011235009999"); // no date
+                Assert.Fail();
+            } // expected exception
+#pragma warning disable 168
+            catch (Exception e)
+#pragma warning restore 168
+            {
+            }
+            try
+            {
+                d = DateTools.StringToDate("aaaa"); // no date
+                Assert.Fail();
+            } // expected exception
+#pragma warning disable 168
+            catch (Exception e)
+#pragma warning restore 168
+            {
+            }
+        }
+
+        [Test]
+        public virtual void TestStringtoTime()
+        {
+            long time = DateTools.StringToTime("197001010000");
+
+            // we use default locale since LuceneTestCase randomizes it
+            //Calendar cal = new GregorianCalendar(TimeZone.GetTimeZone("GMT"), Locale.Default);
+            //cal.Clear();
+           
+            DateTime cal = new GregorianCalendar().ToDateTime(1970, 1, 1, 0, 0, 0, 0); // hour, minute, second -  year=1970, month=january, day=1
+            //cal.set(DateTime.MILLISECOND, 0);
+            Assert.AreEqual(cal.Ticks, time);
+
+            cal = new GregorianCalendar().ToDateTime(1980, 2, 2, 11, 5, 0, 0); // hour, minute, second -  year=1980, month=february, day=2
+            //cal.set(DateTime.MILLISECOND, 0);
+            time = DateTools.StringToTime("198002021105");
+            Assert.AreEqual(cal.Ticks, time);
+        }
+
+        [Test]
+        public virtual void TestDateAndTimetoString()
+        {
+            // we use default locale since LuceneTestCase randomizes it
+            //Calendar cal = new GregorianCalendar(TimeZone.getTimeZone("GMT"), Locale.Default);
+            DateTime cal = new GregorianCalendar().ToDateTime(2004, 2, 3, 22, 8, 56, 333);
+
+            /*cal.clear();
+            cal = new DateTime(2004, 1, 3, 22, 8, 56); // hour, minute, second -  year=2004, month=february(!), day=3
+            cal.set(DateTime.MILLISECOND, 333);*/
+
+            string dateString = DateTools.DateToString(cal, DateTools.Resolution.YEAR);
+            Assert.AreEqual("2004", dateString);
+            Assert.AreEqual("2004-01-01 00:00:00:000", IsoFormat(DateTools.StringToDate(dateString)));
+
+            dateString = DateTools.DateToString(cal, DateTools.Resolution.MONTH);
+            Assert.AreEqual("200402", dateString);
+            Assert.AreEqual("2004-02-01 00:00:00:000", IsoFormat(DateTools.StringToDate(dateString)));
+
+            dateString = DateTools.DateToString(cal, DateTools.Resolution.DAY);
+            Assert.AreEqual("20040203", dateString);
+            Assert.AreEqual("2004-02-03 00:00:00:000", IsoFormat(DateTools.StringToDate(dateString)));
+
+            dateString = DateTools.DateToString(cal, DateTools.Resolution.HOUR);
+            Assert.AreEqual("2004020322", dateString);
+            Assert.AreEqual("2004-02-03 22:00:00:000", IsoFormat(DateTools.StringToDate(dateString)));
+
+            dateString = DateTools.DateToString(cal, DateTools.Resolution.MINUTE);
+            Assert.AreEqual("200402032208", dateString);
+            Assert.AreEqual("2004-02-03 22:08:00:000", IsoFormat(DateTools.StringToDate(dateString)));
+
+            dateString = DateTools.DateToString(cal, DateTools.Resolution.SECOND);
+            Assert.AreEqual("20040203220856", dateString);
+            Assert.AreEqual("2004-02-03 22:08:56:000", IsoFormat(DateTools.StringToDate(dateString)));
+
+            dateString = DateTools.DateToString(cal, DateTools.Resolution.MILLISECOND);
+            Assert.AreEqual("20040203220856333", dateString);
+            Assert.AreEqual("2004-02-03 22:08:56:333", IsoFormat(DateTools.StringToDate(dateString)));
+
+            // date before 1970:
+            cal = new GregorianCalendar().ToDateTime(1961, 3, 5, 23, 9, 51, 444); // hour, minute, second -  year=1961, month=march(!), day=5
+            //cal.set(DateTime.MILLISECOND, 444);
+            dateString = DateTools.DateToString(cal, DateTools.Resolution.MILLISECOND);
+            Assert.AreEqual("19610305230951444", dateString);
+            Assert.AreEqual("1961-03-05 23:09:51:444", IsoFormat(DateTools.StringToDate(dateString)));
+
+            dateString = DateTools.DateToString(cal, DateTools.Resolution.HOUR);
+            Assert.AreEqual("1961030523", dateString);
+            Assert.AreEqual("1961-03-05 23:00:00:000", IsoFormat(DateTools.StringToDate(dateString)));
+
+            // timeToString:
+            cal = new GregorianCalendar().ToDateTime(1970, 1, 1, 0, 0, 0, 0); // hour, minute, second -  year=1970, month=january, day=1
+            //cal.set(DateTime.MILLISECOND, 0);
+            dateString = DateTools.TimeToString(cal.Ticks / TimeSpan.TicksPerMillisecond, DateTools.Resolution.MILLISECOND);
+            Assert.AreEqual("19700101000000000", dateString);
+
+            cal = new GregorianCalendar().ToDateTime(1970, 1, 1, 1, 2, 3, 0); // hour, minute, second -  year=1970, month=january, day=1
+            //cal.set(DateTime.MILLISECOND, 0);
+            dateString = DateTools.TimeToString(cal.Ticks / TimeSpan.TicksPerMillisecond, DateTools.Resolution.MILLISECOND);
+            Assert.AreEqual("19700101010203000", dateString);
+        }
+
+        [Test]
+        public virtual void TestRound()
+        {
+            // we use default locale since LuceneTestCase randomizes it
+            //Calendar cal = new GregorianCalendar(TimeZone.getTimeZone("GMT"), Locale.Default);
+            //cal.clear();
+            DateTime cal = new GregorianCalendar().ToDateTime(2004, 2, 3, 22, 8, 56, 333); // hour, minute, second -  year=2004, month=february(!), day=3
+            //cal.set(DateTime.MILLISECOND, 333);
+            DateTime date = cal;
+            Assert.AreEqual("2004-02-03 22:08:56:333", IsoFormat(date));
+
+            DateTime dateYear = DateTools.Round(date, DateTools.Resolution.YEAR);
+            Assert.AreEqual("2004-01-01 00:00:00:000", IsoFormat(dateYear));
+
+            DateTime dateMonth = DateTools.Round(date, DateTools.Resolution.MONTH);
+            Assert.AreEqual("2004-02-01 00:00:00:000", IsoFormat(dateMonth));
+
+            DateTime dateDay = DateTools.Round(date, DateTools.Resolution.DAY);
+            Assert.AreEqual("2004-02-03 00:00:00:000", IsoFormat(dateDay));
+
+            DateTime dateHour = DateTools.Round(date, DateTools.Resolution.HOUR);
+            Assert.AreEqual("2004-02-03 22:00:00:000", IsoFormat(dateHour));
+
+            DateTime dateMinute = DateTools.Round(date, DateTools.Resolution.MINUTE);
+            Assert.AreEqual("2004-02-03 22:08:00:000", IsoFormat(dateMinute));
+
+            DateTime dateSecond = DateTools.Round(date, DateTools.Resolution.SECOND);
+            Assert.AreEqual("2004-02-03 22:08:56:000", IsoFormat(dateSecond));
+
+            DateTime dateMillisecond = DateTools.Round(date, DateTools.Resolution.MILLISECOND);
+            Assert.AreEqual("2004-02-03 22:08:56:333", IsoFormat(dateMillisecond));
+
+            // long parameter:
+            long dateYearLong = DateTools.Round(date.Ticks / TimeSpan.TicksPerMillisecond, DateTools.Resolution.YEAR);
+            Assert.AreEqual("2004-01-01 00:00:00:000", IsoFormat(new DateTime(dateYearLong)));
+
+            long dateMillisecondLong = DateTools.Round(date.Ticks / TimeSpan.TicksPerMillisecond, DateTools.Resolution.MILLISECOND);
+            Assert.AreEqual("2004-02-03 22:08:56:333", IsoFormat(new DateTime(dateMillisecondLong)));
+        }
+
+        private string IsoFormat(DateTime date)
+        {
+            /*SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss:SSS", Locale.ROOT);
+            sdf.TimeZone = TimeZone.getTimeZone("GMT");
+            return sdf.Format(date);*/
+            return date.ToString("yyyy-MM-dd HH:mm:ss:fff", System.Globalization.CultureInfo.InvariantCulture);
+        }
+
+        [Test]
+        public virtual void TestDateToolsUTC()
+        {
+            /*// Sun, 30 Oct 2005 00:00:00 +0000 -- the last second of 2005's DST in Europe/London
+            long time = 1130630400;
+            try
+            {
+                TimeZone.Default = TimeZone.getTimeZone("Europe/London"); // "GMT"
+                string d1 = DateTools.DateToString(new DateTime(time * 1000), DateTools.Resolution.MINUTE);
+                string d2 = DateTools.DateToString(new DateTime((time+3600) * 1000), DateTools.Resolution.MINUTE);
+                Assert.IsFalse(d1.Equals(d2), "different times");
+                Assert.AreEqual(DateTools.StringToTime(d1), time * 1000, "midnight");
+                Assert.AreEqual(DateTools.StringToTime(d2), (time+3600) * 1000, "later");
+            }
+            finally
+            {
+                TimeZone.Default = null;
+            }*/
+
+            // Sun, 30 Oct 2005 00:00:00 +0000 -- the last second of 2005's DST in Europe/London
+            //long time = 1130630400;
+            DateTime time1 = new DateTime(2005, 10, 30);
+            DateTime time2 = time1.AddHours(1);
+            try
+            {
+                //TimeZone.setDefault(TimeZone.getTimeZone("Europe/London")); // {{Aroush-2.0}} need porting 'java.util.TimeZone.getTimeZone'
+                System.DateTime tempAux = time1;
+                System.String d1 = DateTools.DateToString(tempAux, DateTools.Resolution.MINUTE);
+                System.DateTime tempAux2 = time2;
+                System.String d2 = DateTools.DateToString(tempAux2, DateTools.Resolution.MINUTE);
+                Assert.IsFalse(d1.Equals(d2), "different times");
+                Assert.AreEqual(DateTools.StringToTime(d1), time1.Ticks, "midnight");
+                Assert.AreEqual(DateTools.StringToTime(d2), time2.Ticks, "later");
+            }
+            finally
+            {
+                //TimeZone.SetDefault(null);    // {{Aroush-2.0}} need porting 'java.util.TimeZone.setDefault'
+            }
+        }
+    }
+}
\ No newline at end of file


[71/72] [abbrv] lucenenet git commit: Lucene.Net.Tests + Lucene.Net.Codecs: Added LevelOfParallelism attribute at the assembly level to prevent codecs/index related tests from running in parallel (and screwing up the shared static variable for determinin

Posted by ni...@apache.org.
Lucene.Net.Tests + Lucene.Net.Codecs: Added LevelOfParallelism attribute at the assembly level to prevent codecs/index related tests from running in parallel (and screwing up the shared static variable for determining whether old format impersonation is active).


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/9616324b
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/9616324b
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/9616324b

Branch: refs/heads/api-work
Commit: 9616324b2d35cfa7fe34d9b73e0aaf9a24cb3d0e
Parents: 4b0fa13
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Mon Feb 27 06:11:55 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Feb 27 06:18:03 2017 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Tests.Codecs/Properties/AssemblyInfo.cs | 8 +++++++-
 src/Lucene.Net.Tests/AssemblyInfo.cs                   | 7 ++++++-
 2 files changed, 13 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/9616324b/src/Lucene.Net.Tests.Codecs/Properties/AssemblyInfo.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Codecs/Properties/AssemblyInfo.cs b/src/Lucene.Net.Tests.Codecs/Properties/AssemblyInfo.cs
index a54794e..726a4db 100644
--- a/src/Lucene.Net.Tests.Codecs/Properties/AssemblyInfo.cs
+++ b/src/Lucene.Net.Tests.Codecs/Properties/AssemblyInfo.cs
@@ -1,4 +1,5 @@
-\ufeffusing System.Reflection;
+\ufeffusing NUnit.Framework;
+using System.Reflection;
 using System.Runtime.CompilerServices;
 using System.Runtime.InteropServices;
 
@@ -34,3 +35,8 @@ using System.Runtime.InteropServices;
 // [assembly: AssemblyVersion("1.0.*")]
 [assembly: AssemblyVersion("1.0.0.0")]
 [assembly: AssemblyFileVersion("1.0.0.0")]
+
+
+// LUCENENET specific - only allow tests in this assembly to run one at a time
+// to prevent polluting shared state.
+[assembly: LevelOfParallelism(1)]
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/9616324b/src/Lucene.Net.Tests/AssemblyInfo.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/AssemblyInfo.cs b/src/Lucene.Net.Tests/AssemblyInfo.cs
index 46d6fb9..186be6b 100644
--- a/src/Lucene.Net.Tests/AssemblyInfo.cs
+++ b/src/Lucene.Net.Tests/AssemblyInfo.cs
@@ -84,4 +84,9 @@ using System.Runtime.CompilerServices;
 
 #if !NETSTANDARD
 [assembly: Timeout(20000)]
-#endif 
\ No newline at end of file
+#endif 
+
+
+// LUCENENET specific - only allow tests in this assembly to run one at a time
+// to prevent polluting shared state.
+[assembly: LevelOfParallelism(1)]
\ No newline at end of file


[44/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/Test2BSortedDocValues.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/Test2BSortedDocValues.cs b/src/Lucene.Net.Tests/Index/Test2BSortedDocValues.cs
new file mode 100644
index 0000000..8ad5aa9
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/Test2BSortedDocValues.cs
@@ -0,0 +1,168 @@
+using Lucene.Net.Documents;
+using NUnit.Framework;
+using System;
+
+namespace Lucene.Net.Index
+{
+    /*
+    * Licensed to the Apache Software Foundation (ASF) under one or more
+    * contributor license agreements.  See the NOTICE file distributed with
+    * this work for additional information regarding copyright ownership.
+    * The ASF licenses this file to You under the Apache License, Version 2.0
+    * (the "License"); you may not use this file except in compliance with
+    * the License.  You may obtain a copy of the License at
+    *
+    *     http://www.apache.org/licenses/LICENSE-2.0
+    *
+    * Unless required by applicable law or agreed to in writing, software
+    * distributed under the License is distributed on an "AS IS" BASIS,
+    * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    * See the License for the specific language governing permissions and
+    * limitations under the License.
+    */
+
+    using BaseDirectoryWrapper = Lucene.Net.Store.BaseDirectoryWrapper;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Document = Documents.Document;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+    using SortedDocValuesField = SortedDocValuesField;
+
+    [SuppressCodecs("Lucene3x")]
+    [Ignore("very slow")]
+    [TestFixture]
+    public class Test2BSortedDocValues : LuceneTestCase
+    {
+        // indexes Integer.MAX_VALUE docs with a fixed binary field
+        [Test]
+        public virtual void TestFixedSorted([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            BaseDirectoryWrapper dir = NewFSDirectory(CreateTempDir("2BFixedSorted"));
+            if (dir is MockDirectoryWrapper)
+            {
+                ((MockDirectoryWrapper)dir).Throttling = MockDirectoryWrapper.Throttling_e.NEVER;
+            }
+
+            IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))
+                                .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
+                                .SetRAMBufferSizeMB(256.0)
+                                .SetMergeScheduler(scheduler)
+                                .SetMergePolicy(NewLogMergePolicy(false, 10))
+                                .SetOpenMode(OpenMode.CREATE));
+
+            Document doc = new Document();
+            var bytes = new byte[2];
+            BytesRef data = new BytesRef(bytes);
+            SortedDocValuesField dvField = new SortedDocValuesField("dv", data);
+            doc.Add(dvField);
+
+            for (int i = 0; i < int.MaxValue; i++)
+            {
+                bytes[0] = (byte)(i >> 8);
+                bytes[1] = (byte)i;
+                w.AddDocument(doc);
+                if (i % 100000 == 0)
+                {
+                    Console.WriteLine("indexed: " + i);
+                    Console.Out.Flush();
+                }
+            }
+
+            w.ForceMerge(1);
+            w.Dispose();
+
+            Console.WriteLine("verifying...");
+            Console.Out.Flush();
+
+            DirectoryReader r = DirectoryReader.Open(dir);
+            int expectedValue = 0;
+            foreach (AtomicReaderContext context in r.Leaves)
+            {
+                AtomicReader reader = context.AtomicReader;
+                BytesRef scratch = new BytesRef();
+                BinaryDocValues dv = reader.GetSortedDocValues("dv");
+                for (int i = 0; i < reader.MaxDoc; i++)
+                {
+                    bytes[0] = (byte)(expectedValue >> 8);
+                    bytes[1] = (byte)expectedValue;
+                    dv.Get(i, scratch);
+                    Assert.AreEqual(data, scratch);
+                    expectedValue++;
+                }
+            }
+
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        // indexes Integer.MAX_VALUE docs with a fixed binary field
+        [Test]
+        public virtual void Test2BOrds([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            BaseDirectoryWrapper dir = NewFSDirectory(CreateTempDir("2BOrds"));
+            if (dir is MockDirectoryWrapper)
+            {
+                ((MockDirectoryWrapper)dir).Throttling = MockDirectoryWrapper.Throttling_e.NEVER;
+            }
+
+            var config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))
+                            .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
+                            .SetRAMBufferSizeMB(256.0)
+                            .SetMergeScheduler(scheduler)
+                            .SetMergePolicy(NewLogMergePolicy(false, 10))
+                            .SetOpenMode(OpenMode.CREATE);
+            IndexWriter w = new IndexWriter(dir, config);
+
+            Document doc = new Document();
+            var bytes = new byte[4];
+            BytesRef data = new BytesRef(bytes);
+            SortedDocValuesField dvField = new SortedDocValuesField("dv", data);
+            doc.Add(dvField);
+
+            for (int i = 0; i < int.MaxValue; i++)
+            {
+                bytes[0] = (byte)(i >> 24);
+                bytes[1] = (byte)(i >> 16);
+                bytes[2] = (byte)(i >> 8);
+                bytes[3] = (byte)i;
+                w.AddDocument(doc);
+                if (i % 100000 == 0)
+                {
+                    Console.WriteLine("indexed: " + i);
+                    Console.Out.Flush();
+                }
+            }
+
+            w.ForceMerge(1);
+            w.Dispose();
+
+            Console.WriteLine("verifying...");
+            Console.Out.Flush();
+
+            DirectoryReader r = DirectoryReader.Open(dir);
+            int counter = 0;
+            foreach (AtomicReaderContext context in r.Leaves)
+            {
+                AtomicReader reader = context.AtomicReader;
+                BytesRef scratch = new BytesRef();
+                BinaryDocValues dv = reader.GetSortedDocValues("dv");
+                for (int i = 0; i < reader.MaxDoc; i++)
+                {
+                    bytes[0] = (byte)(counter >> 24);
+                    bytes[1] = (byte)(counter >> 16);
+                    bytes[2] = (byte)(counter >> 8);
+                    bytes[3] = (byte)counter;
+                    counter++;
+                    dv.Get(i, scratch);
+                    Assert.AreEqual(data, scratch);
+                }
+            }
+
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        // TODO: variable
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/Test2BTerms.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/Test2BTerms.cs b/src/Lucene.Net.Tests/Index/Test2BTerms.cs
new file mode 100644
index 0000000..97da141
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/Test2BTerms.cs
@@ -0,0 +1,317 @@
+using System;
+using System.Collections.Generic;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Analysis;
+    using Lucene.Net.Analysis.TokenAttributes;
+
+    using Lucene.Net.Search;
+    using Lucene.Net.Store;
+    using Lucene.Net.Support;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Lucene.Net.Util;
+    using NUnit.Framework;
+    using System.Reflection;
+    using System.Runtime.CompilerServices;
+    using Codec = Lucene.Net.Codecs.Codec;
+
+    // NOTE: this test will fail w/ PreFlexRW codec!  (Because
+    // this test uses full binary term space, but PreFlex cannot
+    // handle this since it requires the terms are UTF8 bytes).
+    //
+    // Also, SimpleText codec will consume very large amounts of
+    // disk (but, should run successfully).  Best to run w/
+    // -Dtests.codec=Standard, and w/ plenty of RAM, eg:
+    //
+    //   ant test -Dtest.slow=true -Dtests.heapsize=8g
+    //
+    //   java -server -Xmx8g -d64 -cp .:lib/junit-4.10.jar:./build/classes/test:./build/classes/test-framework:./build/classes/java -Dlucene.version=4.0-dev -Dtests.directory=MMapDirectory -DtempDir=build -ea org.junit.runner.JUnitCore Lucene.Net.Index.Test2BTerms
+    //
+    [SuppressCodecs("SimpleText", "Memory", "Direct")]
+    [Ignore("SimpleText codec will consume very large amounts of memory.")]
+    [TestFixture]
+    public class Test2BTerms : LuceneTestCase
+    {
+        private const int TOKEN_LEN = 5;
+
+        private static readonly BytesRef Bytes = new BytesRef(TOKEN_LEN);
+
+        private sealed class MyTokenStream : TokenStream
+        {
+            internal readonly int TokensPerDoc;
+            internal int TokenCount;
+            public readonly IList<BytesRef> SavedTerms = new List<BytesRef>();
+            internal int NextSave;
+            internal long TermCounter;
+            internal readonly Random Random;
+
+            public MyTokenStream(Random random, int tokensPerDoc)
+                : base(new MyAttributeFactory(AttributeFactory.DEFAULT_ATTRIBUTE_FACTORY))
+            {
+                this.TokensPerDoc = tokensPerDoc;
+                AddAttribute<ITermToBytesRefAttribute>();
+                Bytes.Length = TOKEN_LEN;
+                this.Random = random;
+                NextSave = TestUtil.NextInt(random, 500000, 1000000);
+            }
+
+            public override bool IncrementToken()
+            {
+                ClearAttributes();
+                if (TokenCount >= TokensPerDoc)
+                {
+                    return false;
+                }
+                int shift = 32;
+                for (int i = 0; i < 5; i++)
+                {
+                    Bytes.Bytes[i] = unchecked((byte)((TermCounter >> shift) & 0xFF));
+                    shift -= 8;
+                }
+                TermCounter++;
+                TokenCount++;
+                if (--NextSave == 0)
+                {
+                    SavedTerms.Add(BytesRef.DeepCopyOf(Bytes));
+                    Console.WriteLine("TEST: save term=" + Bytes);
+                    NextSave = TestUtil.NextInt(Random, 500000, 1000000);
+                }
+                return true;
+            }
+
+            public override void Reset()
+            {
+                TokenCount = 0;
+            }
+
+            private sealed class MyTermAttributeImpl : Attribute, ITermToBytesRefAttribute
+            {
+                public void FillBytesRef()
+                {
+                    // no-op: the bytes was already filled by our owner's incrementToken
+                }
+
+                public BytesRef BytesRef
+                {
+                    get
+                    {
+                        return Bytes;
+                    }
+                }
+
+                public override void Clear()
+                {
+                }
+
+                public override bool Equals(object other)
+                {
+                    return other == this;
+                }
+
+                public override int GetHashCode()
+                {
+                    return RuntimeHelpers.GetHashCode(this);
+                }
+
+                public override void CopyTo(IAttribute target)
+                {
+                }
+
+                public override object Clone()
+                {
+                    throw new System.NotSupportedException();
+                }
+            }
+
+            private sealed class MyAttributeFactory : AttributeFactory
+            {
+                internal readonly AttributeFactory @delegate;
+
+                public MyAttributeFactory(AttributeFactory @delegate)
+                {
+                    this.@delegate = @delegate;
+                }
+
+                public override Attribute CreateAttributeInstance<T>()
+                {
+                    var attClass = typeof(T);
+                    if (attClass == typeof(ITermToBytesRefAttribute))
+                    {
+                        return new MyTermAttributeImpl();
+                    }
+                    if (attClass.GetTypeInfo().IsSubclassOf(typeof(CharTermAttribute)))
+                    {
+                        throw new System.ArgumentException("no");
+                    }
+                    return @delegate.CreateAttributeInstance<T>();
+                }
+            }
+        }
+
+        [Ignore("Very slow. Enable manually by removing Ignore.")]
+        [Test]
+        public virtual void Test2BTerms_Mem([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            if ("Lucene3x".Equals(Codec.Default.Name))
+            {
+                throw new Exception("this test cannot run with PreFlex codec");
+            }
+            Console.WriteLine("Starting Test2B");
+            long TERM_COUNT = ((long)int.MaxValue) + 100000000;
+
+            int TERMS_PER_DOC = TestUtil.NextInt(Random(), 100000, 1000000);
+
+            IList<BytesRef> savedTerms = null;
+
+            BaseDirectoryWrapper dir = NewFSDirectory(CreateTempDir("2BTerms"));
+            //MockDirectoryWrapper dir = NewFSDirectory(new File("/p/lucene/indices/2bindex"));
+            if (dir is MockDirectoryWrapper)
+            {
+                ((MockDirectoryWrapper)dir).Throttling = MockDirectoryWrapper.Throttling_e.NEVER;
+            }
+            dir.CheckIndexOnClose = false; // don't double-checkindex
+
+            if (true)
+            {
+                IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))
+                                           .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
+                                           .SetRAMBufferSizeMB(256.0)
+                                           .SetMergeScheduler(scheduler)
+                                           .SetMergePolicy(NewLogMergePolicy(false, 10))
+                                           .SetOpenMode(OpenMode.CREATE));
+
+                MergePolicy mp = w.Config.MergePolicy;
+                if (mp is LogByteSizeMergePolicy)
+                {
+                    // 1 petabyte:
+                    ((LogByteSizeMergePolicy)mp).MaxMergeMB = 1024 * 1024 * 1024;
+                }
+
+                Documents.Document doc = new Documents.Document();
+                MyTokenStream ts = new MyTokenStream(Random(), TERMS_PER_DOC);
+
+                FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
+                customType.IndexOptions = IndexOptions.DOCS_ONLY;
+                customType.OmitNorms = true;
+                Field field = new Field("field", ts, customType);
+                doc.Add(field);
+                //w.setInfoStream(System.out);
+                int numDocs = (int)(TERM_COUNT / TERMS_PER_DOC);
+
+                Console.WriteLine("TERMS_PER_DOC=" + TERMS_PER_DOC);
+                Console.WriteLine("numDocs=" + numDocs);
+
+                for (int i = 0; i < numDocs; i++)
+                {
+                    long t0 = Environment.TickCount;
+                    w.AddDocument(doc);
+                    Console.WriteLine(i + " of " + numDocs + " " + (Environment.TickCount - t0) + " msec");
+                }
+                savedTerms = ts.SavedTerms;
+
+                Console.WriteLine("TEST: full merge");
+                w.ForceMerge(1);
+                Console.WriteLine("TEST: close writer");
+                w.Dispose();
+            }
+
+            Console.WriteLine("TEST: open reader");
+            IndexReader r = DirectoryReader.Open(dir);
+            if (savedTerms == null)
+            {
+                savedTerms = FindTerms(r);
+            }
+            int numSavedTerms = savedTerms.Count;
+            IList<BytesRef> bigOrdTerms = new List<BytesRef>(savedTerms.SubList(numSavedTerms - 10, numSavedTerms));
+            Console.WriteLine("TEST: test big ord terms...");
+            TestSavedTerms(r, bigOrdTerms);
+            Console.WriteLine("TEST: test all saved terms...");
+            TestSavedTerms(r, savedTerms);
+            r.Dispose();
+
+            Console.WriteLine("TEST: now CheckIndex...");
+            CheckIndex.Status status = TestUtil.CheckIndex(dir);
+            long tc = status.SegmentInfos[0].TermIndexStatus.TermCount;
+            Assert.IsTrue(tc > int.MaxValue, "count " + tc + " is not > " + int.MaxValue);
+
+            dir.Dispose();
+            Console.WriteLine("TEST: done!");
+        }
+
+        private IList<BytesRef> FindTerms(IndexReader r)
+        {
+            Console.WriteLine("TEST: findTerms");
+            TermsEnum termsEnum = MultiFields.GetTerms(r, "field").GetIterator(null);
+            IList<BytesRef> savedTerms = new List<BytesRef>();
+            int nextSave = TestUtil.NextInt(Random(), 500000, 1000000);
+            BytesRef term;
+            while ((term = termsEnum.Next()) != null)
+            {
+                if (--nextSave == 0)
+                {
+                    savedTerms.Add(BytesRef.DeepCopyOf(term));
+                    Console.WriteLine("TEST: add " + term);
+                    nextSave = TestUtil.NextInt(Random(), 500000, 1000000);
+                }
+            }
+            return savedTerms;
+        }
+
+        private void TestSavedTerms(IndexReader r, IList<BytesRef> terms)
+        {
+            Console.WriteLine("TEST: run " + terms.Count + " terms on reader=" + r);
+            IndexSearcher s = NewSearcher(r);
+            Collections.Shuffle(terms);
+            TermsEnum termsEnum = MultiFields.GetTerms(r, "field").GetIterator(null);
+            bool failed = false;
+            for (int iter = 0; iter < 10 * terms.Count; iter++)
+            {
+                BytesRef term = terms[Random().Next(terms.Count)];
+                Console.WriteLine("TEST: search " + term);
+                long t0 = Environment.TickCount;
+                int count = s.Search(new TermQuery(new Term("field", term)), 1).TotalHits;
+                if (count <= 0)
+                {
+                    Console.WriteLine("  FAILED: count=" + count);
+                    failed = true;
+                }
+                long t1 = Environment.TickCount;
+                Console.WriteLine("  took " + (t1 - t0) + " millis");
+
+                TermsEnum.SeekStatus result = termsEnum.SeekCeil(term);
+                if (result != TermsEnum.SeekStatus.FOUND)
+                {
+                    if (result == TermsEnum.SeekStatus.END)
+                    {
+                        Console.WriteLine("  FAILED: got END");
+                    }
+                    else
+                    {
+                        Console.WriteLine("  FAILED: wrong term: got " + termsEnum.Term);
+                    }
+                    failed = true;
+                }
+            }
+            Assert.IsFalse(failed);
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/Test4GBStoredFields.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/Test4GBStoredFields.cs b/src/Lucene.Net.Tests/Index/Test4GBStoredFields.cs
new file mode 100644
index 0000000..212eca2
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/Test4GBStoredFields.cs
@@ -0,0 +1,123 @@
+using System;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Randomized.Generators;
+    using NUnit.Framework;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldType = FieldType;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MMapDirectory = Lucene.Net.Store.MMapDirectory;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+
+    /// <summary>
+    /// this test creates an index with one segment that is a little larger than 4GB.
+    /// </summary>
+    [SuppressCodecs("SimpleText")]
+    [TestFixture]
+    public class Test4GBStoredFields : LuceneTestCase
+    {
+        [Ignore("//LUCENENET NOTE: This was marked Nightly in Java")]
+        [Test]
+        public virtual void Test([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            MockDirectoryWrapper dir = new MockDirectoryWrapper(Random(), new MMapDirectory(CreateTempDir("4GBStoredFields")));
+            dir.Throttling = MockDirectoryWrapper.Throttling_e.NEVER;
+
+            var config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))
+                            .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
+                            .SetRAMBufferSizeMB(256.0)
+                            .SetMergeScheduler(scheduler)
+                            .SetMergePolicy(NewLogMergePolicy(false, 10))
+                            .SetOpenMode(OpenMode.CREATE);
+            IndexWriter w = new IndexWriter(dir, config);
+
+            MergePolicy mp = w.Config.MergePolicy;
+            if (mp is LogByteSizeMergePolicy)
+            {
+                // 1 petabyte:
+                ((LogByteSizeMergePolicy)mp).MaxMergeMB = 1024 * 1024 * 1024;
+            }
+
+            Document doc = new Document();
+            FieldType ft = new FieldType();
+            ft.IsIndexed = false;
+            ft.IsStored = true;
+            ft.Freeze();
+            int valueLength = RandomInts.NextIntBetween(Random(), 1 << 13, 1 << 20);
+            var value = new byte[valueLength];
+            for (int i = 0; i < valueLength; ++i)
+            {
+                // random so that even compressing codecs can't compress it
+                value[i] = (byte)Random().Next(256);
+            }
+            Field f = new Field("fld", value, ft);
+            doc.Add(f);
+
+            int numDocs = (int)((1L << 32) / valueLength + 100);
+            for (int i = 0; i < numDocs; ++i)
+            {
+                w.AddDocument(doc);
+                if (VERBOSE && i % (numDocs / 10) == 0)
+                {
+                    Console.WriteLine(i + " of " + numDocs + "...");
+                }
+            }
+            w.ForceMerge(1);
+            w.Dispose();
+            if (VERBOSE)
+            {
+                bool found = false;
+                foreach (string file in dir.ListAll())
+                {
+                    if (file.EndsWith(".fdt"))
+                    {
+                        long fileLength = dir.FileLength(file);
+                        if (fileLength >= 1L << 32)
+                        {
+                            found = true;
+                        }
+                        Console.WriteLine("File length of " + file + " : " + fileLength);
+                    }
+                }
+                if (!found)
+                {
+                    Console.WriteLine("No .fdt file larger than 4GB, test bug?");
+                }
+            }
+
+            DirectoryReader rd = DirectoryReader.Open(dir);
+            Document sd = rd.Document(numDocs - 1);
+            Assert.IsNotNull(sd);
+            Assert.AreEqual(1, sd.Fields.Count);
+            BytesRef valueRef = sd.GetBinaryValue("fld");
+            Assert.IsNotNull(valueRef);
+            Assert.AreEqual(new BytesRef(value), valueRef);
+            rd.Dispose();
+
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestAddIndexes.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestAddIndexes.cs b/src/Lucene.Net.Tests/Index/TestAddIndexes.cs
new file mode 100644
index 0000000..5389e74
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestAddIndexes.cs
@@ -0,0 +1,1396 @@
+using Lucene.Net.Codecs;
+using Lucene.Net.Documents;
+using Lucene.Net.Support;
+using NUnit.Framework;
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Threading;
+
+namespace Lucene.Net.Index
+{
+    /*
+    * Licensed to the Apache Software Foundation (ASF) under one or more
+    * contributor license agreements.  See the NOTICE file distributed with
+    * this work for additional information regarding copyright ownership.
+    * The ASF licenses this file to You under the Apache License, Version 2.0
+    * (the "License"); you may not use this file except in compliance with
+    * the License.  You may obtain a copy of the License at
+    *
+    *     http://www.apache.org/licenses/LICENSE-2.0
+    *
+    * Unless required by applicable law or agreed to in writing, software
+    * distributed under the License is distributed on an "AS IS" BASIS,
+    * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    * See the License for the specific language governing permissions and
+    * limitations under the License.
+    */
+
+    using AlreadyClosedException = Lucene.Net.Store.AlreadyClosedException;
+    using BaseDirectoryWrapper = Lucene.Net.Store.BaseDirectoryWrapper;
+    using Codec = Lucene.Net.Codecs.Codec;
+    using Directory = Lucene.Net.Store.Directory;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldType = FieldType;
+    using FilterCodec = Lucene.Net.Codecs.FilterCodec;
+    using IOUtils = Lucene.Net.Util.IOUtils;
+    using LockObtainFailedException = Lucene.Net.Store.LockObtainFailedException;
+    using Lucene46Codec = Lucene.Net.Codecs.Lucene46.Lucene46Codec;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+    using PhraseQuery = Lucene.Net.Search.PhraseQuery;
+    using PostingsFormat = Lucene.Net.Codecs.PostingsFormat;
+    using Pulsing41PostingsFormat = Lucene.Net.Codecs.Pulsing.Pulsing41PostingsFormat;
+    using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+    using StringField = StringField;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using TextField = TextField;
+
+    [TestFixture]
+    public class TestAddIndexes : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestSimpleCase()
+        {
+            // main directory
+            Directory dir = NewDirectory();
+            // two auxiliary directories
+            Directory aux = NewDirectory();
+            Directory aux2 = NewDirectory();
+
+            IndexWriter writer = null;
+
+            writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE));
+            // add 100 documents
+            AddDocs(writer, 100);
+            Assert.AreEqual(100, writer.MaxDoc);
+            writer.Dispose();
+            TestUtil.CheckIndex(dir);
+
+            writer = NewWriter(aux, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE).SetMergePolicy(NewLogMergePolicy(false)));
+            // add 40 documents in separate files
+            AddDocs(writer, 40);
+            Assert.AreEqual(40, writer.MaxDoc);
+            writer.Dispose();
+
+            writer = NewWriter(aux2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE));
+            // add 50 documents in compound files
+            AddDocs2(writer, 50);
+            Assert.AreEqual(50, writer.MaxDoc);
+            writer.Dispose();
+
+            // test doc count before segments are merged
+            writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND));
+            Assert.AreEqual(100, writer.MaxDoc);
+            writer.AddIndexes(aux, aux2);
+            Assert.AreEqual(190, writer.MaxDoc);
+            writer.Dispose();
+            TestUtil.CheckIndex(dir);
+
+            // make sure the old index is correct
+            VerifyNumDocs(aux, 40);
+
+            // make sure the new index is correct
+            VerifyNumDocs(dir, 190);
+
+            // now add another set in.
+            Directory aux3 = NewDirectory();
+            writer = NewWriter(aux3, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            // add 40 documents
+            AddDocs(writer, 40);
+            Assert.AreEqual(40, writer.MaxDoc);
+            writer.Dispose();
+
+            // test doc count before segments are merged
+            writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND));
+            Assert.AreEqual(190, writer.MaxDoc);
+            writer.AddIndexes(aux3);
+            Assert.AreEqual(230, writer.MaxDoc);
+            writer.Dispose();
+
+            // make sure the new index is correct
+            VerifyNumDocs(dir, 230);
+
+            VerifyTermDocs(dir, new Term("content", "aaa"), 180);
+
+            VerifyTermDocs(dir, new Term("content", "bbb"), 50);
+
+            // now fully merge it.
+            writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND));
+            writer.ForceMerge(1);
+            writer.Dispose();
+
+            // make sure the new index is correct
+            VerifyNumDocs(dir, 230);
+
+            VerifyTermDocs(dir, new Term("content", "aaa"), 180);
+
+            VerifyTermDocs(dir, new Term("content", "bbb"), 50);
+
+            // now add a single document
+            Directory aux4 = NewDirectory();
+            writer = NewWriter(aux4, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            AddDocs2(writer, 1);
+            writer.Dispose();
+
+            writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND));
+            Assert.AreEqual(230, writer.MaxDoc);
+            writer.AddIndexes(aux4);
+            Assert.AreEqual(231, writer.MaxDoc);
+            writer.Dispose();
+
+            VerifyNumDocs(dir, 231);
+
+            VerifyTermDocs(dir, new Term("content", "bbb"), 51);
+            dir.Dispose();
+            aux.Dispose();
+            aux2.Dispose();
+            aux3.Dispose();
+            aux4.Dispose();
+        }
+
+        [Test]
+        public virtual void TestWithPendingDeletes()
+        {
+            // main directory
+            Directory dir = NewDirectory();
+            // auxiliary directory
+            Directory aux = NewDirectory();
+
+            SetUpDirs(dir, aux);
+            IndexWriter writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND));
+            writer.AddIndexes(aux);
+
+            // Adds 10 docs, then replaces them with another 10
+            // docs, so 10 pending deletes:
+            for (int i = 0; i < 20; i++)
+            {
+                Document doc = new Document();
+                doc.Add(NewStringField("id", "" + (i % 10), Field.Store.NO));
+                doc.Add(NewTextField("content", "bbb " + i, Field.Store.NO));
+                writer.UpdateDocument(new Term("id", "" + (i % 10)), doc);
+            }
+            // Deletes one of the 10 added docs, leaving 9:
+            PhraseQuery q = new PhraseQuery();
+            q.Add(new Term("content", "bbb"));
+            q.Add(new Term("content", "14"));
+            writer.DeleteDocuments(q);
+
+            writer.ForceMerge(1);
+            writer.Commit();
+
+            VerifyNumDocs(dir, 1039);
+            VerifyTermDocs(dir, new Term("content", "aaa"), 1030);
+            VerifyTermDocs(dir, new Term("content", "bbb"), 9);
+
+            writer.Dispose();
+            dir.Dispose();
+            aux.Dispose();
+        }
+
+        [Test]
+        public virtual void TestWithPendingDeletes2()
+        {
+            // main directory
+            Directory dir = NewDirectory();
+            // auxiliary directory
+            Directory aux = NewDirectory();
+
+            SetUpDirs(dir, aux);
+            IndexWriter writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND));
+
+            // Adds 10 docs, then replaces them with another 10
+            // docs, so 10 pending deletes:
+            for (int i = 0; i < 20; i++)
+            {
+                Document doc = new Document();
+                doc.Add(NewStringField("id", "" + (i % 10), Field.Store.NO));
+                doc.Add(NewTextField("content", "bbb " + i, Field.Store.NO));
+                writer.UpdateDocument(new Term("id", "" + (i % 10)), doc);
+            }
+
+            writer.AddIndexes(aux);
+
+            // Deletes one of the 10 added docs, leaving 9:
+            PhraseQuery q = new PhraseQuery();
+            q.Add(new Term("content", "bbb"));
+            q.Add(new Term("content", "14"));
+            writer.DeleteDocuments(q);
+
+            writer.ForceMerge(1);
+            writer.Commit();
+
+            VerifyNumDocs(dir, 1039);
+            VerifyTermDocs(dir, new Term("content", "aaa"), 1030);
+            VerifyTermDocs(dir, new Term("content", "bbb"), 9);
+
+            writer.Dispose();
+            dir.Dispose();
+            aux.Dispose();
+        }
+
+        [Test]
+        public virtual void TestWithPendingDeletes3()
+        {
+            // main directory
+            Directory dir = NewDirectory();
+            // auxiliary directory
+            Directory aux = NewDirectory();
+
+            SetUpDirs(dir, aux);
+            IndexWriter writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND));
+
+            // Adds 10 docs, then replaces them with another 10
+            // docs, so 10 pending deletes:
+            for (int i = 0; i < 20; i++)
+            {
+                Document doc = new Document();
+                doc.Add(NewStringField("id", "" + (i % 10), Field.Store.NO));
+                doc.Add(NewTextField("content", "bbb " + i, Field.Store.NO));
+                writer.UpdateDocument(new Term("id", "" + (i % 10)), doc);
+            }
+
+            // Deletes one of the 10 added docs, leaving 9:
+            PhraseQuery q = new PhraseQuery();
+            q.Add(new Term("content", "bbb"));
+            q.Add(new Term("content", "14"));
+            writer.DeleteDocuments(q);
+
+            writer.AddIndexes(aux);
+
+            writer.ForceMerge(1);
+            writer.Commit();
+
+            VerifyNumDocs(dir, 1039);
+            VerifyTermDocs(dir, new Term("content", "aaa"), 1030);
+            VerifyTermDocs(dir, new Term("content", "bbb"), 9);
+
+            writer.Dispose();
+            dir.Dispose();
+            aux.Dispose();
+        }
+
+        // case 0: add self or exceed maxMergeDocs, expect exception
+        [Test]
+        public virtual void TestAddSelf()
+        {
+            // main directory
+            Directory dir = NewDirectory();
+            // auxiliary directory
+            Directory aux = NewDirectory();
+
+            IndexWriter writer = null;
+
+            writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            // add 100 documents
+            AddDocs(writer, 100);
+            Assert.AreEqual(100, writer.MaxDoc);
+            writer.Dispose();
+
+            writer = NewWriter(aux, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(1000).SetMergePolicy(NewLogMergePolicy(false)));
+            // add 140 documents in separate files
+            AddDocs(writer, 40);
+            writer.Dispose();
+            writer = NewWriter(aux, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(1000).SetMergePolicy(NewLogMergePolicy(false)));
+            AddDocs(writer, 100);
+            writer.Dispose();
+
+            writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND));
+            try
+            {
+                // cannot add self
+                writer.AddIndexes(aux, dir);
+                Assert.IsTrue(false);
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException e)
+#pragma warning restore 168
+            {
+                Assert.AreEqual(100, writer.MaxDoc);
+            }
+            writer.Dispose();
+
+            // make sure the index is correct
+            VerifyNumDocs(dir, 100);
+            dir.Dispose();
+            aux.Dispose();
+        }
+
+        // in all the remaining tests, make the doc count of the oldest segment
+        // in dir large so that it is never merged in addIndexes()
+        // case 1: no tail segments
+        [Test]
+        public virtual void TestNoTailSegments()
+        {
+            // main directory
+            Directory dir = NewDirectory();
+            // auxiliary directory
+            Directory aux = NewDirectory();
+
+            SetUpDirs(dir, aux);
+
+            IndexWriter writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(10).SetMergePolicy(NewLogMergePolicy(4)));
+            AddDocs(writer, 10);
+
+            writer.AddIndexes(aux);
+            Assert.AreEqual(1040, writer.MaxDoc);
+            Assert.AreEqual(1000, writer.GetDocCount(0));
+            writer.Dispose();
+
+            // make sure the index is correct
+            VerifyNumDocs(dir, 1040);
+            dir.Dispose();
+            aux.Dispose();
+        }
+
+        // case 2: tail segments, invariants hold, no copy
+        [Test]
+        public virtual void TestNoCopySegments()
+        {
+            // main directory
+            Directory dir = NewDirectory();
+            // auxiliary directory
+            Directory aux = NewDirectory();
+
+            SetUpDirs(dir, aux);
+
+            IndexWriter writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(9).SetMergePolicy(NewLogMergePolicy(4)));
+            AddDocs(writer, 2);
+
+            writer.AddIndexes(aux);
+            Assert.AreEqual(1032, writer.MaxDoc);
+            Assert.AreEqual(1000, writer.GetDocCount(0));
+            writer.Dispose();
+
+            // make sure the index is correct
+            VerifyNumDocs(dir, 1032);
+            dir.Dispose();
+            aux.Dispose();
+        }
+
+        // case 3: tail segments, invariants hold, copy, invariants hold
+        [Test]
+        public virtual void TestNoMergeAfterCopy()
+        {
+            // main directory
+            Directory dir = NewDirectory();
+            // auxiliary directory
+            Directory aux = NewDirectory();
+
+            SetUpDirs(dir, aux);
+
+            IndexWriter writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(10).SetMergePolicy(NewLogMergePolicy(4)));
+
+            writer.AddIndexes(aux, new MockDirectoryWrapper(Random(), new RAMDirectory(aux, NewIOContext(Random()))));
+            Assert.AreEqual(1060, writer.MaxDoc);
+            Assert.AreEqual(1000, writer.GetDocCount(0));
+            writer.Dispose();
+
+            // make sure the index is correct
+            VerifyNumDocs(dir, 1060);
+            dir.Dispose();
+            aux.Dispose();
+        }
+
+        // case 4: tail segments, invariants hold, copy, invariants not hold
+        [Test]
+        public virtual void TestMergeAfterCopy()
+        {
+            // main directory
+            Directory dir = NewDirectory();
+            // auxiliary directory
+            Directory aux = NewDirectory();
+
+            SetUpDirs(dir, aux, true);
+
+            IndexWriterConfig dontMergeConfig = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetMergePolicy(NoMergePolicy.COMPOUND_FILES);
+            IndexWriter writer = new IndexWriter(aux, dontMergeConfig);
+            for (int i = 0; i < 20; i++)
+            {
+                writer.DeleteDocuments(new Term("id", "" + i));
+            }
+            writer.Dispose();
+            IndexReader reader = DirectoryReader.Open(aux);
+            Assert.AreEqual(10, reader.NumDocs);
+            reader.Dispose();
+
+            writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(4).SetMergePolicy(NewLogMergePolicy(4)));
+
+            if (VERBOSE)
+            {
+                Console.WriteLine("\nTEST: now addIndexes");
+            }
+            writer.AddIndexes(aux, new MockDirectoryWrapper(Random(), new RAMDirectory(aux, NewIOContext(Random()))));
+            Assert.AreEqual(1020, writer.MaxDoc);
+            Assert.AreEqual(1000, writer.GetDocCount(0));
+            writer.Dispose();
+            dir.Dispose();
+            aux.Dispose();
+        }
+
+        // case 5: tail segments, invariants not hold
+        [Test]
+        public virtual void TestMoreMerges()
+        {
+            // main directory
+            Directory dir = NewDirectory();
+            // auxiliary directory
+            Directory aux = NewDirectory();
+            Directory aux2 = NewDirectory();
+
+            SetUpDirs(dir, aux, true);
+
+            IndexWriter writer = NewWriter(aux2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(100).SetMergePolicy(NewLogMergePolicy(10)));
+            writer.AddIndexes(aux);
+            Assert.AreEqual(30, writer.MaxDoc);
+            Assert.AreEqual(3, writer.SegmentCount);
+            writer.Dispose();
+
+            IndexWriterConfig dontMergeConfig = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetMergePolicy(NoMergePolicy.COMPOUND_FILES);
+            writer = new IndexWriter(aux, dontMergeConfig);
+            for (int i = 0; i < 27; i++)
+            {
+                writer.DeleteDocuments(new Term("id", "" + i));
+            }
+            writer.Dispose();
+            IndexReader reader = DirectoryReader.Open(aux);
+            Assert.AreEqual(3, reader.NumDocs);
+            reader.Dispose();
+
+            dontMergeConfig = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetMergePolicy(NoMergePolicy.COMPOUND_FILES);
+            writer = new IndexWriter(aux2, dontMergeConfig);
+            for (int i = 0; i < 8; i++)
+            {
+                writer.DeleteDocuments(new Term("id", "" + i));
+            }
+            writer.Dispose();
+            reader = DirectoryReader.Open(aux2);
+            Assert.AreEqual(22, reader.NumDocs);
+            reader.Dispose();
+
+            writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(6).SetMergePolicy(NewLogMergePolicy(4)));
+
+            writer.AddIndexes(aux, aux2);
+            Assert.AreEqual(1040, writer.MaxDoc);
+            Assert.AreEqual(1000, writer.GetDocCount(0));
+            writer.Dispose();
+            dir.Dispose();
+            aux.Dispose();
+            aux2.Dispose();
+        }
+
+        private IndexWriter NewWriter(Directory dir, IndexWriterConfig conf)
+        {
+            conf.SetMergePolicy(new LogDocMergePolicy());
+            IndexWriter writer = new IndexWriter(dir, conf);
+            return writer;
+        }
+
+        private void AddDocs(IndexWriter writer, int numDocs)
+        {
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document doc = new Document();
+                doc.Add(NewTextField("content", "aaa", Field.Store.NO));
+                writer.AddDocument(doc);
+            }
+        }
+
+        private void AddDocs2(IndexWriter writer, int numDocs)
+        {
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document doc = new Document();
+                doc.Add(NewTextField("content", "bbb", Field.Store.NO));
+                writer.AddDocument(doc);
+            }
+        }
+
+        private void VerifyNumDocs(Directory dir, int numDocs)
+        {
+            IndexReader reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(numDocs, reader.MaxDoc);
+            Assert.AreEqual(numDocs, reader.NumDocs);
+            reader.Dispose();
+        }
+
+        private void VerifyTermDocs(Directory dir, Term term, int numDocs)
+        {
+            IndexReader reader = DirectoryReader.Open(dir);
+            DocsEnum docsEnum = TestUtil.Docs(Random(), reader, term.Field, term.Bytes, null, null, DocsEnum.FLAG_NONE);
+            int count = 0;
+            while (docsEnum.NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
+            {
+                count++;
+            }
+            Assert.AreEqual(numDocs, count);
+            reader.Dispose();
+        }
+
+        private void SetUpDirs(Directory dir, Directory aux)
+        {
+            SetUpDirs(dir, aux, false);
+        }
+
+        private void SetUpDirs(Directory dir, Directory aux, bool withID)
+        {
+            IndexWriter writer = null;
+
+            writer = NewWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(1000));
+            // add 1000 documents in 1 segment
+            if (withID)
+            {
+                AddDocsWithID(writer, 1000, 0);
+            }
+            else
+            {
+                AddDocs(writer, 1000);
+            }
+            Assert.AreEqual(1000, writer.MaxDoc);
+            Assert.AreEqual(1, writer.SegmentCount);
+            writer.Dispose();
+
+            writer = NewWriter(aux, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(1000).SetMergePolicy(NewLogMergePolicy(false, 10)));
+            // add 30 documents in 3 segments
+            for (int i = 0; i < 3; i++)
+            {
+                if (withID)
+                {
+                    AddDocsWithID(writer, 10, 10 * i);
+                }
+                else
+                {
+                    AddDocs(writer, 10);
+                }
+                writer.Dispose();
+                writer = NewWriter(aux, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(1000).SetMergePolicy(NewLogMergePolicy(false, 10)));
+            }
+            Assert.AreEqual(30, writer.MaxDoc);
+            Assert.AreEqual(3, writer.SegmentCount);
+            writer.Dispose();
+        }
+
+        // LUCENE-1270
+        [Test]
+        public virtual void TestHangOnClose()
+        {
+            Directory dir = NewDirectory();
+            LogByteSizeMergePolicy lmp = new LogByteSizeMergePolicy();
+            lmp.NoCFSRatio = 0.0;
+            lmp.MergeFactor = 100;
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(5).SetMergePolicy(lmp));
+
+            Document doc = new Document();
+            FieldType customType = new FieldType(TextField.TYPE_STORED);
+            customType.StoreTermVectors = true;
+            customType.StoreTermVectorPositions = true;
+            customType.StoreTermVectorOffsets = true;
+            doc.Add(NewField("content", "aaa bbb ccc ddd eee fff ggg hhh iii", customType));
+            for (int i = 0; i < 60; i++)
+            {
+                writer.AddDocument(doc);
+            }
+
+            Document doc2 = new Document();
+            FieldType customType2 = new FieldType();
+            customType2.IsStored = true;
+            doc2.Add(NewField("content", "aaa bbb ccc ddd eee fff ggg hhh iii", customType2));
+            doc2.Add(NewField("content", "aaa bbb ccc ddd eee fff ggg hhh iii", customType2));
+            doc2.Add(NewField("content", "aaa bbb ccc ddd eee fff ggg hhh iii", customType2));
+            doc2.Add(NewField("content", "aaa bbb ccc ddd eee fff ggg hhh iii", customType2));
+            for (int i = 0; i < 10; i++)
+            {
+                writer.AddDocument(doc2);
+            }
+            writer.Dispose();
+
+            Directory dir2 = NewDirectory();
+            lmp = new LogByteSizeMergePolicy();
+            lmp.MinMergeMB = 0.0001;
+            lmp.NoCFSRatio = 0.0;
+            lmp.MergeFactor = 4;
+            writer = new IndexWriter(dir2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergeScheduler(new SerialMergeScheduler()).SetMergePolicy(lmp));
+            writer.AddIndexes(dir);
+            writer.Dispose();
+            dir.Dispose();
+            dir2.Dispose();
+        }
+
+        // TODO: these are also in TestIndexWriter... add a simple doc-writing method
+        // like this to LuceneTestCase?
+        private void AddDoc(IndexWriter writer)
+        {
+            Document doc = new Document();
+            doc.Add(NewTextField("content", "aaa", Field.Store.NO));
+            writer.AddDocument(doc);
+        }
+
+        private abstract class RunAddIndexesThreads
+        {
+            private readonly TestAddIndexes OuterInstance;
+
+            internal Directory Dir, Dir2;
+            internal const int NUM_INIT_DOCS = 17;
+            internal IndexWriter Writer2;
+            internal readonly IList<Exception> Failures = new List<Exception>();
+            internal volatile bool DidClose;
+            internal readonly IndexReader[] Readers;
+            internal readonly int NUM_COPY;
+            internal const int NUM_THREADS = 5;
+            internal readonly ThreadClass[] Threads = new ThreadClass[NUM_THREADS];
+
+            public RunAddIndexesThreads(TestAddIndexes outerInstance, int numCopy)
+            {
+                this.OuterInstance = outerInstance;
+                NUM_COPY = numCopy;
+                Dir = new MockDirectoryWrapper(Random(), new RAMDirectory());
+                IndexWriter writer = new IndexWriter(Dir, (IndexWriterConfig)new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2));
+                for (int i = 0; i < NUM_INIT_DOCS; i++)
+                {
+                    outerInstance.AddDoc(writer);
+                }
+                writer.Dispose();
+
+                Dir2 = NewDirectory();
+                Writer2 = new IndexWriter(Dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+                Writer2.Commit();
+
+                Readers = new IndexReader[NUM_COPY];
+                for (int i = 0; i < NUM_COPY; i++)
+                {
+                    Readers[i] = DirectoryReader.Open(Dir);
+                }
+            }
+
+            internal virtual void LaunchThreads(int numIter)
+            {
+                for (int i = 0; i < NUM_THREADS; i++)
+                {
+                    Threads[i] = new ThreadAnonymousInnerClassHelper(this, numIter);
+                }
+
+                for (int i = 0; i < NUM_THREADS; i++)
+                {
+                    Threads[i].Start();
+                }
+            }
+
+            private class ThreadAnonymousInnerClassHelper : ThreadClass
+            {
+                private readonly RunAddIndexesThreads OuterInstance;
+
+                private int NumIter;
+
+                public ThreadAnonymousInnerClassHelper(RunAddIndexesThreads outerInstance, int numIter)
+                {
+                    this.OuterInstance = outerInstance;
+                    this.NumIter = numIter;
+                }
+
+                public override void Run()
+                {
+                    try
+                    {
+                        Directory[] dirs = new Directory[OuterInstance.NUM_COPY];
+                        for (int k = 0; k < OuterInstance.NUM_COPY; k++)
+                        {
+                            dirs[k] = new MockDirectoryWrapper(Random(), new RAMDirectory(OuterInstance.Dir, NewIOContext(Random())));
+                        }
+
+                        int j = 0;
+
+                        while (true)
+                        {
+                            // System.out.println(Thread.currentThread().getName() + ": iter j=" + j);
+                            if (NumIter > 0 && j == NumIter)
+                            {
+                                break;
+                            }
+                            OuterInstance.DoBody(j++, dirs);
+                        }
+                    }
+                    catch (Exception t)
+                    {
+                        OuterInstance.Handle(t);
+                    }
+                }
+            }
+
+            internal virtual void JoinThreads()
+            {
+                for (int i = 0; i < NUM_THREADS; i++)
+                {
+                    Threads[i].Join();
+                }
+            }
+
+            internal virtual void Close(bool doWait)
+            {
+                DidClose = true;
+                Writer2.Dispose(doWait);
+            }
+
+            internal virtual void CloseDir()
+            {
+                for (int i = 0; i < NUM_COPY; i++)
+                {
+                    Readers[i].Dispose();
+                }
+                Dir2.Dispose();
+            }
+
+            internal abstract void DoBody(int j, Directory[] dirs);
+
+            internal abstract void Handle(Exception t);
+        }
+
+        private class CommitAndAddIndexes : RunAddIndexesThreads
+        {
+            private readonly TestAddIndexes OuterInstance;
+
+            public CommitAndAddIndexes(TestAddIndexes outerInstance, int numCopy)
+                : base(outerInstance, numCopy)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            internal override void Handle(Exception t)
+            {
+                Console.Error.WriteLine(t.StackTrace);
+                lock (Failures)
+                {
+                    Failures.Add(t);
+                }
+            }
+
+            internal override void DoBody(int j, Directory[] dirs)
+            {
+                switch (j % 5)
+                {
+                    case 0:
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine(Thread.CurrentThread.Name + ": TEST: addIndexes(Dir[]) then full merge");
+                        }
+                        Writer2.AddIndexes(dirs);
+                        Writer2.ForceMerge(1);
+                        break;
+
+                    case 1:
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine(Thread.CurrentThread.Name + ": TEST: addIndexes(Dir[])");
+                        }
+                        Writer2.AddIndexes(dirs);
+                        break;
+
+                    case 2:
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine(Thread.CurrentThread.Name + ": TEST: addIndexes(IndexReader[])");
+                        }
+                        Writer2.AddIndexes(Readers);
+                        break;
+
+                    case 3:
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine(Thread.CurrentThread.Name + ": TEST: addIndexes(Dir[]) then maybeMerge");
+                        }
+                        Writer2.AddIndexes(dirs);
+                        Writer2.MaybeMerge();
+                        break;
+
+                    case 4:
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine(Thread.CurrentThread.Name + ": TEST: commit");
+                        }
+                        Writer2.Commit();
+                        break;
+                }
+            }
+        }
+
+        // LUCENE-1335: test simultaneous addIndexes & commits
+        // from multiple threads
+        [Test]
+        public virtual void TestAddIndexesWithThreads()
+        {
+            int NUM_ITER = TEST_NIGHTLY ? 15 : 5;
+            const int NUM_COPY = 3;
+            CommitAndAddIndexes c = new CommitAndAddIndexes(this, NUM_COPY);
+            c.LaunchThreads(NUM_ITER);
+
+            for (int i = 0; i < 100; i++)
+            {
+                AddDoc(c.Writer2);
+            }
+
+            c.JoinThreads();
+
+            int expectedNumDocs = 100 + NUM_COPY * (4 * NUM_ITER / 5) * RunAddIndexesThreads.NUM_THREADS * RunAddIndexesThreads.NUM_INIT_DOCS;
+            Assert.AreEqual(expectedNumDocs, c.Writer2.NumDocs, "expected num docs don't match - failures: " + Environment.NewLine
+                + string.Join(Environment.NewLine, c.Failures.Select(x => x.ToString())));
+
+            c.Close(true);
+
+            Assert.IsTrue(c.Failures.Count == 0, "found unexpected failures: " + c.Failures);
+
+            IndexReader reader = DirectoryReader.Open(c.Dir2);
+            Assert.AreEqual(expectedNumDocs, reader.NumDocs);
+            reader.Dispose();
+
+            c.CloseDir();
+        }
+
+        private class CommitAndAddIndexes2 : CommitAndAddIndexes
+        {
+            private readonly TestAddIndexes OuterInstance;
+
+            public CommitAndAddIndexes2(TestAddIndexes outerInstance, int numCopy)
+                : base(outerInstance, numCopy)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            internal override void Handle(Exception t)
+            {
+                if (!(t is AlreadyClosedException) && !(t is System.NullReferenceException))
+                {
+                    Console.Error.WriteLine(t.StackTrace);
+                    lock (Failures)
+                    {
+                        Failures.Add(t);
+                    }
+                }
+            }
+        }
+
+        // LUCENE-1335: test simultaneous addIndexes & close
+        [Test]
+        public virtual void TestAddIndexesWithClose()
+        {
+            const int NUM_COPY = 3;
+            CommitAndAddIndexes2 c = new CommitAndAddIndexes2(this, NUM_COPY);
+            //c.writer2.setInfoStream(System.out);
+            c.LaunchThreads(-1);
+
+            // Close w/o first stopping/joining the threads
+            c.Close(true);
+            //c.writer2.Dispose();
+
+            c.JoinThreads();
+
+            c.CloseDir();
+
+            Assert.IsTrue(c.Failures.Count == 0);
+        }
+
+        private class CommitAndAddIndexes3 : RunAddIndexesThreads
+        {
+            private readonly TestAddIndexes OuterInstance;
+
+            public CommitAndAddIndexes3(TestAddIndexes outerInstance, int numCopy)
+                : base(outerInstance, numCopy)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            internal override void DoBody(int j, Directory[] dirs)
+            {
+                switch (j % 5)
+                {
+                    case 0:
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": addIndexes + full merge");
+                        }
+                        Writer2.AddIndexes(dirs);
+                        Writer2.ForceMerge(1);
+                        break;
+
+                    case 1:
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": addIndexes");
+                        }
+                        Writer2.AddIndexes(dirs);
+                        break;
+
+                    case 2:
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": addIndexes(IR[])");
+                        }
+                        Writer2.AddIndexes(Readers);
+                        break;
+
+                    case 3:
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": full merge");
+                        }
+                        Writer2.ForceMerge(1);
+                        break;
+
+                    case 4:
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": commit");
+                        }
+                        Writer2.Commit();
+                        break;
+                }
+            }
+
+            internal override void Handle(Exception t)
+            {
+                bool report = true;
+
+                if (t is AlreadyClosedException || t is MergePolicy.MergeAbortedException || t is System.NullReferenceException)
+                {
+                    report = !DidClose;
+                }
+                else if (t is FileNotFoundException/* || t is NoSuchFileException*/)
+                {
+                    report = !DidClose;
+                }
+                else if (t is IOException)
+                {
+                    Exception t2 = t.InnerException;
+                    if (t2 is MergePolicy.MergeAbortedException)
+                    {
+                        report = !DidClose;
+                    }
+                }
+                if (report)
+                {
+                    Console.Out.WriteLine(t.StackTrace);
+                    lock (Failures)
+                    {
+                        Failures.Add(t);
+                    }
+                }
+            }
+        }
+
+        // LUCENE-1335: test simultaneous addIndexes & close
+        [Test]
+        public virtual void TestAddIndexesWithCloseNoWait()
+        {
+            const int NUM_COPY = 50;
+            CommitAndAddIndexes3 c = new CommitAndAddIndexes3(this, NUM_COPY);
+            c.LaunchThreads(-1);
+
+            Thread.Sleep(TestUtil.NextInt(Random(), 10, 500));
+
+            // Close w/o first stopping/joining the threads
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: now close(false)");
+            }
+            c.Close(false);
+
+            c.JoinThreads();
+
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: done join threads");
+            }
+            c.CloseDir();
+
+            Assert.IsTrue(c.Failures.Count == 0);
+        }
+
+        // LUCENE-1335: test simultaneous addIndexes & close
+        [Test]
+        public virtual void TestAddIndexesWithRollback()
+        {
+            int NUM_COPY = TEST_NIGHTLY ? 50 : 5;
+            CommitAndAddIndexes3 c = new CommitAndAddIndexes3(this, NUM_COPY);
+            c.LaunchThreads(-1);
+
+            Thread.Sleep(TestUtil.NextInt(Random(), 10, 500));
+
+            // Close w/o first stopping/joining the threads
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: now force rollback");
+            }
+            c.DidClose = true;
+            c.Writer2.Rollback();
+
+            c.JoinThreads();
+
+            c.CloseDir();
+
+            Assert.IsTrue(c.Failures.Count == 0);
+        }
+
+        // LUCENE-2996: tests that addIndexes(IndexReader) applies existing deletes correctly.
+        [Test]
+        public virtual void TestExistingDeletes()
+        {
+            Directory[] dirs = new Directory[2];
+            for (int i = 0; i < dirs.Length; i++)
+            {
+                dirs[i] = NewDirectory();
+                IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+                IndexWriter writer = new IndexWriter(dirs[i], conf);
+                Document doc = new Document();
+                doc.Add(new StringField("id", "myid", Field.Store.NO));
+                writer.AddDocument(doc);
+                writer.Dispose();
+            }
+
+            IndexWriterConfig conf_ = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer_ = new IndexWriter(dirs[0], conf_);
+
+            // Now delete the document
+            writer_.DeleteDocuments(new Term("id", "myid"));
+            IndexReader r = DirectoryReader.Open(dirs[1]);
+            try
+            {
+                writer_.AddIndexes(r);
+            }
+            finally
+            {
+                r.Dispose();
+            }
+            writer_.Commit();
+            Assert.AreEqual(1, writer_.NumDocs, "Documents from the incoming index should not have been deleted");
+            writer_.Dispose();
+
+            foreach (Directory dir in dirs)
+            {
+                dir.Dispose();
+            }
+        }
+
+        // just like addDocs but with ID, starting from docStart
+        private void AddDocsWithID(IndexWriter writer, int numDocs, int docStart)
+        {
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document doc = new Document();
+                doc.Add(NewTextField("content", "aaa", Field.Store.NO));
+                doc.Add(NewTextField("id", "" + (docStart + i), Field.Store.YES));
+                writer.AddDocument(doc);
+            }
+        }
+
+        [Test]
+        public virtual void TestSimpleCaseCustomCodec()
+        {
+            // main directory
+            Directory dir = NewDirectory();
+            // two auxiliary directories
+            Directory aux = NewDirectory();
+            Directory aux2 = NewDirectory();
+            Codec codec = new CustomPerFieldCodec();
+            IndexWriter writer = null;
+
+            writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE).SetCodec(codec));
+            // add 100 documents
+            AddDocsWithID(writer, 100, 0);
+            Assert.AreEqual(100, writer.MaxDoc);
+            writer.Commit();
+            writer.Dispose();
+            TestUtil.CheckIndex(dir);
+
+            writer = NewWriter(aux, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE).SetCodec(codec).SetMaxBufferedDocs(10).SetMergePolicy(NewLogMergePolicy(false)));
+            // add 40 documents in separate files
+            AddDocs(writer, 40);
+            Assert.AreEqual(40, writer.MaxDoc);
+            writer.Commit();
+            writer.Dispose();
+
+            writer = NewWriter(aux2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE).SetCodec(codec));
+            // add 40 documents in compound files
+            AddDocs2(writer, 50);
+            Assert.AreEqual(50, writer.MaxDoc);
+            writer.Commit();
+            writer.Dispose();
+
+            // test doc count before segments are merged
+            writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND).SetCodec(codec));
+            Assert.AreEqual(100, writer.MaxDoc);
+            writer.AddIndexes(aux, aux2);
+            Assert.AreEqual(190, writer.MaxDoc);
+            writer.Dispose();
+
+            dir.Dispose();
+            aux.Dispose();
+            aux2.Dispose();
+        }
+
+        private sealed class CustomPerFieldCodec : Lucene46Codec
+        {
+            internal readonly PostingsFormat SimpleTextFormat;
+            internal readonly PostingsFormat DefaultFormat;
+            internal readonly PostingsFormat MockSepFormat;
+
+            public CustomPerFieldCodec()
+            {
+                SimpleTextFormat = Codecs.PostingsFormat.ForName("SimpleText");
+                DefaultFormat = Codecs.PostingsFormat.ForName("Lucene41");
+                MockSepFormat = Codecs.PostingsFormat.ForName("MockSep");
+            }
+
+            public override PostingsFormat GetPostingsFormatForField(string field)
+            {
+                if (field.Equals("id"))
+                {
+                    return SimpleTextFormat;
+                }
+                else if (field.Equals("content"))
+                {
+                    return MockSepFormat;
+                }
+                else
+                {
+                    return DefaultFormat;
+                }
+            }
+        }
+
+        // LUCENE-2790: tests that the non CFS files were deleted by addIndexes
+        [Test]
+        public virtual void TestNonCFSLeftovers()
+        {
+            Directory[] dirs = new Directory[2];
+            for (int i = 0; i < dirs.Length; i++)
+            {
+                dirs[i] = new RAMDirectory();
+                IndexWriter w = new IndexWriter(dirs[i], new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+                Document d = new Document();
+                FieldType customType = new FieldType(TextField.TYPE_STORED);
+                customType.StoreTermVectors = true;
+                d.Add(new Field("c", "v", customType));
+                w.AddDocument(d);
+                w.Dispose();
+            }
+
+            IndexReader[] readers = new IndexReader[] { DirectoryReader.Open(dirs[0]), DirectoryReader.Open(dirs[1]) };
+
+            Directory dir = new MockDirectoryWrapper(Random(), new RAMDirectory());
+            IndexWriterConfig conf = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetMergePolicy(NewLogMergePolicy(true));
+            MergePolicy lmp = conf.MergePolicy;
+            // Force creation of CFS:
+            lmp.NoCFSRatio = 1.0;
+            lmp.MaxCFSSegmentSizeMB = double.PositiveInfinity;
+            IndexWriter w3 = new IndexWriter(dir, conf);
+            w3.AddIndexes(readers);
+            w3.Dispose();
+            // we should now see segments_X,
+            // segments.gen,_Y.cfs,_Y.cfe, _Z.si
+            Assert.AreEqual(5, dir.ListAll().Length, "Only one compound segment should exist, but got: " + Arrays.ToString(dir.ListAll()));
+            dir.Dispose();
+        }
+
+        [CodecName("NotRegistered")]
+        private sealed class UnRegisteredCodec : FilterCodec
+        {
+            public UnRegisteredCodec()
+                : base(new Lucene46Codec())
+            {
+            }
+        }
+
+        /*
+         * simple test that ensures we getting expected exceptions
+         */
+        [Test]
+        public virtual void TestAddIndexMissingCodec()
+        {
+            BaseDirectoryWrapper toAdd = NewDirectory();
+            // Disable checkIndex, else we get an exception because
+            // of the unregistered codec:
+            toAdd.CheckIndexOnClose = false;
+            {
+                IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+                conf.SetCodec(new UnRegisteredCodec());
+                using (var w = new IndexWriter(toAdd, conf))
+                {
+                    Document doc = new Document();
+                    FieldType customType = new FieldType();
+                    customType.IsIndexed = true;
+                    doc.Add(NewField("foo", "bar", customType));
+                    w.AddDocument(doc);
+                }
+            }
+
+            {
+                using (Directory dir = NewDirectory())
+                {
+                    IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+                    conf.SetCodec(TestUtil.AlwaysPostingsFormat(new Pulsing41PostingsFormat(1 + Random().Next(20))));
+                    IndexWriter w = new IndexWriter(dir, conf);
+                    try
+                    {
+                        w.AddIndexes(toAdd);
+                        Assert.Fail("no such codec");
+                    }
+#pragma warning disable 168
+                    catch (System.ArgumentException ex)
+#pragma warning restore 168
+                    {
+                        // expected
+                    }
+                    finally
+                    {
+                        w.Dispose();
+                    }
+                    using (IndexReader open = DirectoryReader.Open(dir))
+                    {
+                        Assert.AreEqual(0, open.NumDocs);
+                    }
+                }
+            }
+
+            try
+            {
+                DirectoryReader.Open(toAdd);
+                Assert.Fail("no such codec");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException ex)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            toAdd.Dispose();
+        }
+
+        // LUCENE-3575
+        [Test]
+        public virtual void TestFieldNamesChanged()
+        {
+            Directory d1 = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), d1, Similarity, TimeZone);
+            Document doc = new Document();
+            doc.Add(NewStringField("f1", "doc1 field1", Field.Store.YES));
+            doc.Add(NewStringField("id", "1", Field.Store.YES));
+            w.AddDocument(doc);
+            IndexReader r1 = w.Reader;
+            w.Dispose();
+
+            Directory d2 = NewDirectory();
+            w = new RandomIndexWriter(Random(), d2, Similarity, TimeZone);
+            doc = new Document();
+            doc.Add(NewStringField("f2", "doc2 field2", Field.Store.YES));
+            doc.Add(NewStringField("id", "2", Field.Store.YES));
+            w.AddDocument(doc);
+            IndexReader r2 = w.Reader;
+            w.Dispose();
+
+            Directory d3 = NewDirectory();
+            w = new RandomIndexWriter(Random(), d3, Similarity, TimeZone);
+            w.AddIndexes(r1, r2);
+            r1.Dispose();
+            d1.Dispose();
+            r2.Dispose();
+            d2.Dispose();
+
+            IndexReader r3 = w.Reader;
+            w.Dispose();
+            Assert.AreEqual(2, r3.NumDocs);
+            for (int docID = 0; docID < 2; docID++)
+            {
+                Document d = r3.Document(docID);
+                if (d.Get("id").Equals("1"))
+                {
+                    Assert.AreEqual("doc1 field1", d.Get("f1"));
+                }
+                else
+                {
+                    Assert.AreEqual("doc2 field2", d.Get("f2"));
+                }
+            }
+            r3.Dispose();
+            d3.Dispose();
+        }
+
+        [Test]
+        public virtual void TestAddEmpty()
+        {
+            Directory d1 = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), d1, Similarity, TimeZone);
+            MultiReader empty = new MultiReader();
+            w.AddIndexes(empty);
+            w.Dispose();
+            DirectoryReader dr = DirectoryReader.Open(d1);
+            foreach (AtomicReaderContext ctx in dr.Leaves)
+            {
+                Assert.IsTrue(ctx.Reader.MaxDoc > 0, "empty segments should be dropped by addIndexes");
+            }
+            dr.Dispose();
+            d1.Dispose();
+        }
+
+        // Currently it's impossible to end up with a segment with all documents
+        // deleted, as such segments are dropped. Still, to validate that addIndexes
+        // works with such segments, or readers that end up in such state, we fake an
+        // all deleted segment.
+        [Test]
+        public virtual void TestFakeAllDeleted()
+        {
+            Directory src = NewDirectory(), dest = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), src, Similarity, TimeZone);
+            w.AddDocument(new Document());
+            IndexReader allDeletedReader = new AllDeletedFilterReader((AtomicReader)w.Reader.Leaves[0].Reader);
+            w.Dispose();
+
+            w = new RandomIndexWriter(Random(), dest, Similarity, TimeZone);
+            w.AddIndexes(allDeletedReader);
+            w.Dispose();
+            DirectoryReader dr = DirectoryReader.Open(src);
+            foreach (AtomicReaderContext ctx in dr.Leaves)
+            {
+                Assert.IsTrue(ctx.Reader.MaxDoc > 0, "empty segments should be dropped by addIndexes");
+            }
+            dr.Dispose();
+            allDeletedReader.Dispose();
+            src.Dispose();
+            dest.Dispose();
+        }
+
+        /// <summary>
+        /// Make sure an open IndexWriter on an incoming Directory
+        ///  causes a LockObtainFailedException
+        /// </summary>
+        [Test]
+        public virtual void TestLocksBlock()
+        {
+            Directory src = NewDirectory();
+            RandomIndexWriter w1 = new RandomIndexWriter(Random(), src, Similarity, TimeZone);
+            w1.AddDocument(new Document());
+            w1.Commit();
+
+            Directory dest = NewDirectory();
+
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            iwc.SetWriteLockTimeout(1);
+            RandomIndexWriter w2 = new RandomIndexWriter(Random(), dest, iwc);
+
+            try
+            {
+                w2.AddIndexes(src);
+                Assert.Fail("did not hit expected exception");
+            }
+#pragma warning disable 168
+            catch (LockObtainFailedException lofe)
+#pragma warning restore 168
+            {
+                // expected
+            }
+
+            IOUtils.Close(w1, w2, src, dest);
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestAllFilesHaveChecksumFooter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestAllFilesHaveChecksumFooter.cs b/src/Lucene.Net.Tests/Index/TestAllFilesHaveChecksumFooter.cs
new file mode 100644
index 0000000..8d71391
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestAllFilesHaveChecksumFooter.cs
@@ -0,0 +1,114 @@
+using Lucene.Net.Documents;
+using NUnit.Framework;
+using System;
+
+namespace Lucene.Net.Index
+{
+    using CodecUtil = Lucene.Net.Codecs.CodecUtil;
+    using CompoundFileDirectory = Lucene.Net.Store.CompoundFileDirectory;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexInput = Lucene.Net.Store.IndexInput;
+    using IOUtils = Lucene.Net.Util.IOUtils;
+    using Lucene46Codec = Lucene.Net.Codecs.Lucene46.Lucene46Codec;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using NumericDocValuesField = NumericDocValuesField;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    /// <summary>
+    /// Test that a plain default puts CRC32 footers in all files.
+    /// </summary>
+    [TestFixture]
+    public class TestAllFilesHaveChecksumFooter : LuceneTestCase
+    {
+        [Test]
+        public virtual void Test()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            conf.SetCodec(new Lucene46Codec());
+            RandomIndexWriter riw = new RandomIndexWriter(Random(), dir, conf);
+            Document doc = new Document();
+            // these fields should sometimes get term vectors, etc
+            Field idField = NewStringField("id", "", Field.Store.NO);
+            Field bodyField = NewTextField("body", "", Field.Store.NO);
+            Field dvField = new NumericDocValuesField("dv", 5);
+            doc.Add(idField);
+            doc.Add(bodyField);
+            doc.Add(dvField);
+            for (int i = 0; i < 100; i++)
+            {
+                idField.SetStringValue(Convert.ToString(i));
+                bodyField.SetStringValue(TestUtil.RandomUnicodeString(Random()));
+                riw.AddDocument(doc);
+                if (Random().Next(7) == 0)
+                {
+                    riw.Commit();
+                }
+                if (Random().Next(20) == 0)
+                {
+                    riw.DeleteDocuments(new Term("id", Convert.ToString(i)));
+                }
+            }
+            riw.Dispose();
+            CheckHeaders(dir);
+            dir.Dispose();
+        }
+
+        private void CheckHeaders(Directory dir)
+        {
+            foreach (string file in dir.ListAll())
+            {
+                if (file.Equals(IndexWriter.WRITE_LOCK_NAME))
+                {
+                    continue; // write.lock has no footer, thats ok
+                }
+                if (file.EndsWith(IndexFileNames.COMPOUND_FILE_EXTENSION))
+                {
+                    CompoundFileDirectory cfsDir = new CompoundFileDirectory(dir, file, NewIOContext(Random()), false);
+                    CheckHeaders(cfsDir); // recurse into cfs
+                    cfsDir.Dispose();
+                }
+                IndexInput @in = null;
+                bool success = false;
+                try
+                {
+                    @in = dir.OpenInput(file, NewIOContext(Random()));
+                    CodecUtil.ChecksumEntireFile(@in);
+                    success = true;
+                }
+                finally
+                {
+                    if (success)
+                    {
+                        IOUtils.Close(@in);
+                    }
+                    else
+                    {
+                        IOUtils.CloseWhileHandlingException(@in);
+                    }
+                }
+            }
+        }
+    }
+}
\ No newline at end of file


[72/72] [abbrv] lucenenet git commit: Changed all BeforeClass() methods to override LuceneTestCase and call the base class (since we now have global setup there).

Posted by ni...@apache.org.
Changed all BeforeClass() methods to override LuceneTestCase and call the base class (since we now have global setup there).


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/7a494001
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/7a494001
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/7a494001

Branch: refs/heads/api-work
Commit: 7a4940010df346744e2131cea9860b0b4bcfa6ca
Parents: 9616324
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Mon Feb 27 06:35:13 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Feb 27 06:35:13 2017 +0700

----------------------------------------------------------------------
 .../Search/SearchEquivalenceTestBase.cs                      | 8 +++++---
 .../Analysis/Core/TestRandomChains.cs                        | 4 +++-
 .../Analysis/Hunspell/TestCaseInsensitive.cs                 | 3 ++-
 .../Analysis/Hunspell/TestCircumfix.cs                       | 3 ++-
 .../Analysis/Hunspell/TestComplexPrefix.cs                   | 3 ++-
 .../Analysis/Hunspell/TestCondition.cs                       | 3 ++-
 .../Analysis/Hunspell/TestConv.cs                            | 3 ++-
 .../Analysis/Hunspell/TestDependencies.cs                    | 3 ++-
 .../Analysis/Hunspell/TestEscaped.cs                         | 3 ++-
 .../Analysis/Hunspell/TestFlagLong.cs                        | 3 ++-
 .../Analysis/Hunspell/TestFlagNum.cs                         | 3 ++-
 .../Analysis/Hunspell/TestHomonyms.cs                        | 3 ++-
 .../Analysis/Hunspell/TestHunspellStemFilter.cs              | 5 +++--
 .../Analysis/Hunspell/TestIgnore.cs                          | 3 ++-
 .../Analysis/Hunspell/TestMorph.cs                           | 3 ++-
 .../Analysis/Hunspell/TestOptionalCondition.cs               | 3 ++-
 .../Analysis/Hunspell/TestStemmer.cs                         | 3 ++-
 .../Analysis/Hunspell/TestTwoFold.cs                         | 3 ++-
 .../Analysis/Hunspell/TestTwoSuffixes.cs                     | 3 ++-
 .../Taxonomy/TestTaxonomyFacetAssociations.cs                | 4 +++-
 .../Flexible/Standard/TestNumericQueryParser.cs              | 4 +++-
 .../Flexible/Standard/TestQPHelper.cs                        | 3 ++-
 src/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs | 3 ++-
 src/Lucene.Net.Tests.QueryParser/Xml/TestParser.cs           | 4 +++-
 24 files changed, 56 insertions(+), 27 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/7a494001/src/Lucene.Net.TestFramework/Search/SearchEquivalenceTestBase.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Search/SearchEquivalenceTestBase.cs b/src/Lucene.Net.TestFramework/Search/SearchEquivalenceTestBase.cs
index b1b5999..76c929a 100644
--- a/src/Lucene.Net.TestFramework/Search/SearchEquivalenceTestBase.cs
+++ b/src/Lucene.Net.TestFramework/Search/SearchEquivalenceTestBase.cs
@@ -61,9 +61,11 @@ namespace Lucene.Net.Search
         /// LUCENENET specific
         /// Is non-static because ClassEnvRule is no longer static.
         /// </summary>
-        [SetUp]
-        public void BeforeClass()
+        [OneTimeSetUp]
+        public override void BeforeClass()
         {
+            base.BeforeClass();
+
             Random random = Random();
             Directory = NewDirectory();
             Stopword = "" + RandomChar();
@@ -106,7 +108,7 @@ namespace Lucene.Net.Search
             iw.Dispose();
         }
 
-        [TearDown]
+        [OneTimeTearDown]
         public void AfterClass()
         {
             Reader.Dispose();

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/7a494001/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs
index fbeb2f9..2f7ccb3 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Core/TestRandomChains.cs
@@ -181,8 +181,10 @@ namespace Lucene.Net.Analysis.Core
         }
 
         [OneTimeSetUp]
-        public static void BeforeClass()
+        public override void BeforeClass()
         {
+            base.BeforeClass();
+
             IEnumerable<Type> analysisClasses = typeof(StandardAnalyzer).GetTypeInfo().Assembly.GetTypes()
                 .Where(c => {
                     var typeInfo = c.GetTypeInfo();

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/7a494001/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestCaseInsensitive.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestCaseInsensitive.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestCaseInsensitive.cs
index be73de9..7721983 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestCaseInsensitive.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestCaseInsensitive.cs
@@ -22,8 +22,9 @@ namespace Lucene.Net.Analysis.Hunspell
     public class TestCaseInsensitive : StemmerTestBase
     {
         [OneTimeSetUp]
-        public static void BeforeClass()
+        public override void BeforeClass()
         {
+            base.BeforeClass();
             Init(true, "simple.aff", "mixedcase.dic");
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/7a494001/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestCircumfix.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestCircumfix.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestCircumfix.cs
index 86387a6..7342f2f 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestCircumfix.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestCircumfix.cs
@@ -22,8 +22,9 @@ namespace Lucene.Net.Analysis.Hunspell
     public class TestCircumfix_ : StemmerTestBase
     {
         [OneTimeSetUp]
-        public static void BeforeClass()
+        public override void BeforeClass()
         {
+            base.BeforeClass();
             Init("circumfix.aff", "circumfix.dic");
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/7a494001/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestComplexPrefix.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestComplexPrefix.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestComplexPrefix.cs
index e0f228c..73007ce 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestComplexPrefix.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestComplexPrefix.cs
@@ -22,8 +22,9 @@ namespace Lucene.Net.Analysis.Hunspell
     public class TestComplexPrefix : StemmerTestBase
     {
         [OneTimeSetUp]
-        public static void BeforeClass()
+        public override void BeforeClass()
         {
+            base.BeforeClass();
             Init("complexprefix.aff", "complexprefix.dic");
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/7a494001/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestCondition.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestCondition.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestCondition.cs
index 888c755..f65a6d7 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestCondition.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestCondition.cs
@@ -22,8 +22,9 @@ namespace Lucene.Net.Analysis.Hunspell
     public class TestCondition : StemmerTestBase
     {
         [OneTimeSetUp]
-        public static void BeforeClass()
+        public override void BeforeClass()
         {
+            base.BeforeClass();
             Init("condition.aff", "condition.dic");
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/7a494001/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestConv.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestConv.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestConv.cs
index e0b1a83..968b354 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestConv.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestConv.cs
@@ -22,8 +22,9 @@ namespace Lucene.Net.Analysis.Hunspell
     public class TestConv : StemmerTestBase
     {
         [OneTimeSetUp]
-        public static void BeforeClass()
+        public override void BeforeClass()
         {
+            base.BeforeClass();
             Init("conv.aff", "conv.dic");
         }
         [Test]

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/7a494001/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestDependencies.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestDependencies.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestDependencies.cs
index ce34433..e9ae993 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestDependencies.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestDependencies.cs
@@ -22,8 +22,9 @@ namespace Lucene.Net.Analysis.Hunspell
     public class TestDependencies_ : StemmerTestBase
     {
         [OneTimeSetUp]
-        public static void BeforeClass()
+        public override void BeforeClass()
         {
+            base.BeforeClass();
             Init("dependencies.aff", "dependencies.dic");
         }
         [Test]

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/7a494001/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestEscaped.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestEscaped.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestEscaped.cs
index a4608b6..ea0f586 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestEscaped.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestEscaped.cs
@@ -22,8 +22,9 @@ namespace Lucene.Net.Analysis.Hunspell
     public class TestEscaped : StemmerTestBase
     {
         [OneTimeSetUp]
-        public static void BeforeClass()
+        public override void BeforeClass()
         {
+            base.BeforeClass();
             Init("escaped.aff", "escaped.dic");
         }
         [Test]

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/7a494001/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestFlagLong.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestFlagLong.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestFlagLong.cs
index 0f84119..8464c9c 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestFlagLong.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestFlagLong.cs
@@ -22,8 +22,9 @@ namespace Lucene.Net.Analysis.Hunspell
     public class TestFlagLong : StemmerTestBase
     {
         [OneTimeSetUp]
-        public static void BeforeClass()
+        public override void BeforeClass()
         {
+            base.BeforeClass();
             Init("flaglong.aff", "flaglong.dic");
         }
         [Test]

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/7a494001/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestFlagNum.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestFlagNum.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestFlagNum.cs
index cc9a33e..9cdfacb 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestFlagNum.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestFlagNum.cs
@@ -22,8 +22,9 @@ namespace Lucene.Net.Analysis.Hunspell
     public class TestFlagNum : StemmerTestBase
     {
         [OneTimeSetUp]
-        public static void BeforeClass()
+        public override void BeforeClass()
         {
+            base.BeforeClass();
             Init("flagnum.aff", "flagnum.dic");
         }
         [Test]

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/7a494001/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestHomonyms.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestHomonyms.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestHomonyms.cs
index 716b61e..235a92c 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestHomonyms.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestHomonyms.cs
@@ -23,8 +23,9 @@ namespace Lucene.Net.Analysis.Hunspell
     {
 
         [OneTimeSetUp]
-        public static void BeforeClass()
+        public override void BeforeClass()
         {
+            base.BeforeClass();
             Init("homonyms.aff", "homonyms.dic");
         }
         [Test]

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/7a494001/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestHunspellStemFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestHunspellStemFilter.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestHunspellStemFilter.cs
index d1e2c20..799544d 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestHunspellStemFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestHunspellStemFilter.cs
@@ -30,8 +30,9 @@ namespace Lucene.Net.Analysis.Hunspell
         private static Dictionary dictionary;
 
         [OneTimeSetUp]
-        public static void BeforeClass()
+        public override void BeforeClass()
         {
+            base.BeforeClass();
             System.IO.Stream affixStream = typeof(TestStemmer).getResourceAsStream("simple.aff");
             System.IO.Stream dictStream = typeof(TestStemmer).getResourceAsStream("simple.dic");
             try
@@ -45,7 +46,7 @@ namespace Lucene.Net.Analysis.Hunspell
         }
 
         [OneTimeTearDown]
-        public static void afterClass()
+        public static void AfterClass()
         {
             dictionary = null;
         }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/7a494001/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestIgnore.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestIgnore.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestIgnore.cs
index c026f73..e0bbf63 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestIgnore.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestIgnore.cs
@@ -23,8 +23,9 @@ namespace Lucene.Net.Analysis.Hunspell
     {
 
         [OneTimeSetUp]
-        public static void BeforeClass()
+        public override void BeforeClass()
         {
+            base.BeforeClass();
             Init("ignore.aff", "ignore.dic");
         }
         [Test]

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/7a494001/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestMorph.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestMorph.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestMorph.cs
index 0cc6cae..7a022b0 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestMorph.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestMorph.cs
@@ -23,8 +23,9 @@ namespace Lucene.Net.Analysis.Hunspell
     {
 
         [OneTimeSetUp]
-        public static void BeforeClass()
+        public override void BeforeClass()
         {
+            base.BeforeClass();
             Init("morph.aff", "morph.dic");
         }
         [Test]

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/7a494001/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestOptionalCondition.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestOptionalCondition.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestOptionalCondition.cs
index 05028cb..58af52a 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestOptionalCondition.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestOptionalCondition.cs
@@ -22,8 +22,9 @@ namespace Lucene.Net.Analysis.Hunspell
     public class TestOptionalCondition : StemmerTestBase
     {
         [OneTimeSetUp]
-        public static void BeforeClass()
+        public override void BeforeClass()
         {
+            base.BeforeClass();
             Init("optional-condition.aff", "condition.dic");
         }
         [Test]

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/7a494001/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestStemmer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestStemmer.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestStemmer.cs
index cf8e4d9..f544a0c 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestStemmer.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestStemmer.cs
@@ -23,8 +23,9 @@ namespace Lucene.Net.Analysis.Hunspell
     {
 
         [OneTimeSetUp]
-        public static void BeforeClass()
+        public override void BeforeClass()
         {
+            base.BeforeClass();
             Init("simple.aff", "simple.dic");
         }
         [Test]

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/7a494001/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestTwoFold.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestTwoFold.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestTwoFold.cs
index 93cbc4f..1d62a50 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestTwoFold.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestTwoFold.cs
@@ -23,8 +23,9 @@ namespace Lucene.Net.Analysis.Hunspell
     {
 
         [OneTimeSetUp]
-        public static void BeforeClass()
+        public override void BeforeClass()
         {
+            base.BeforeClass();
             Init("twofold.aff", "morph.dic");
         }
         [Test]

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/7a494001/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestTwoSuffixes.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestTwoSuffixes.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestTwoSuffixes.cs
index 7f51873..79ee1ec 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestTwoSuffixes.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Hunspell/TestTwoSuffixes.cs
@@ -23,8 +23,9 @@ namespace Lucene.Net.Analysis.Hunspell
     {
 
         [OneTimeSetUp]
-        public static void BeforeClass()
+        public override void BeforeClass()
         {
+            base.BeforeClass();
             Init("twosuffixes.aff", "twosuffixes.dic");
         }
         [Test]

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/7a494001/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetAssociations.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetAssociations.cs b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetAssociations.cs
index c7e4c32..d67e1f5 100644
--- a/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetAssociations.cs
+++ b/src/Lucene.Net.Tests.Facet/Taxonomy/TestTaxonomyFacetAssociations.cs
@@ -50,8 +50,10 @@ namespace Lucene.Net.Facet.Taxonomy
         /// Is non-static because Similarity and TimeZone are not static.
         /// </summary>
         [OneTimeSetUp]
-        public void BeforeClass()
+        public override void BeforeClass()
         {
+            base.BeforeClass();
+
             dir = NewDirectory();
             taxoDir = NewDirectory();
             // preparations - index, taxonomy, content

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/7a494001/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestNumericQueryParser.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestNumericQueryParser.cs b/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestNumericQueryParser.cs
index 2d48c3e..179cf27 100644
--- a/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestNumericQueryParser.cs
+++ b/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestNumericQueryParser.cs
@@ -74,8 +74,10 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard
         }
 
         [OneTimeSetUp]
-        public void BeforeClass()
+        public override void BeforeClass()
         {
+            base.BeforeClass();
+
             ANALYZER = new MockAnalyzer(Random());
 
             qp = new StandardQueryParser(ANALYZER);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/7a494001/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestQPHelper.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestQPHelper.cs b/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestQPHelper.cs
index 4246b8b..e157111 100644
--- a/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestQPHelper.cs
+++ b/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestQPHelper.cs
@@ -50,8 +50,9 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard
         public static Analyzer qpAnalyzer;
 
         [OneTimeSetUp]
-        public static void BeforeClass()
+        public override void BeforeClass()
         {
+            base.BeforeClass();
             qpAnalyzer = new QPTestAnalyzer();
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/7a494001/src/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs b/src/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs
index a0604d1..f4471ab 100644
--- a/src/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs
+++ b/src/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs
@@ -26,8 +26,9 @@ namespace Lucene.Net.QueryParsers.Util
         public static Analyzer qpAnalyzer;
 
         [OneTimeSetUp]
-        public static void BeforeClass()
+        public override void BeforeClass()
         {
+            base.BeforeClass();
             qpAnalyzer = new QPTestAnalyzer();
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/7a494001/src/Lucene.Net.Tests.QueryParser/Xml/TestParser.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.QueryParser/Xml/TestParser.cs b/src/Lucene.Net.Tests.QueryParser/Xml/TestParser.cs
index 7f8290a..f8de85a 100644
--- a/src/Lucene.Net.Tests.QueryParser/Xml/TestParser.cs
+++ b/src/Lucene.Net.Tests.QueryParser/Xml/TestParser.cs
@@ -39,8 +39,10 @@ namespace Lucene.Net.QueryParsers.Xml
         private static IndexSearcher searcher;
 
         [OneTimeSetUp]
-        public void BeforeClass()
+        public override void BeforeClass()
         {
+            base.BeforeClass();
+
             // TODO: rewrite test (this needs to set QueryParser.enablePositionIncrements, too, for work with CURRENT):
             Analyzer analyzer = new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, true, MockTokenFilter.ENGLISH_STOPSET);
             //initialize the parser


[64/72] [abbrv] lucenenet git commit: Lucene.Net.TestFramework: Renamed Codecs\ramonly\ to Codecs\RAMOnly\

Posted by ni...@apache.org.
Lucene.Net.TestFramework: Renamed Codecs\ramonly\ to Codecs\RAMOnly\


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/49a0460f
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/49a0460f
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/49a0460f

Branch: refs/heads/api-work
Commit: 49a0460f0f0a75567aa77bbb25ff52df82b787f3
Parents: 63b45ce
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 26 03:35:30 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Feb 27 06:17:59 2017 +0700

----------------------------------------------------------------------
 .../Codecs/RAMOnly/RAMOnlyPostingsFormat.cs     | 731 +++++++++++++++++++
 .../Codecs/ramonly/RAMOnlyPostingsFormat.cs     | 731 -------------------
 .../Lucene.Net.TestFramework.csproj             |   2 +-
 3 files changed, 732 insertions(+), 732 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/49a0460f/src/Lucene.Net.TestFramework/Codecs/RAMOnly/RAMOnlyPostingsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/RAMOnly/RAMOnlyPostingsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/RAMOnly/RAMOnlyPostingsFormat.cs
new file mode 100644
index 0000000..1ec6525
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/RAMOnly/RAMOnlyPostingsFormat.cs
@@ -0,0 +1,731 @@
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.Linq;
+
+namespace Lucene.Net.Codecs.RAMOnly
+{
+    using Lucene.Net.Support;
+    using IBits = Lucene.Net.Util.IBits;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using DocsAndPositionsEnum = Lucene.Net.Index.DocsAndPositionsEnum;
+    using DocsEnum = Lucene.Net.Index.DocsEnum;
+    using FieldInfo = Lucene.Net.Index.FieldInfo;
+    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
+    using IndexInput = Lucene.Net.Store.IndexInput;
+    using IndexOptions = Lucene.Net.Index.IndexOptions;
+    using IndexOutput = Lucene.Net.Store.IndexOutput;
+    using IOUtils = Lucene.Net.Util.IOUtils;
+    using RamUsageEstimator = Lucene.Net.Util.RamUsageEstimator;
+    using SegmentReadState = Lucene.Net.Index.SegmentReadState;
+    using SegmentWriteState = Lucene.Net.Index.SegmentWriteState;
+    using Terms = Lucene.Net.Index.Terms;
+    using TermsEnum = Lucene.Net.Index.TermsEnum;
+
+    /// <summary>
+    /// Stores all postings data in RAM, but writes a small
+    ///  token (header + single int) to identify which "slot" the
+    ///  index is using in RAM HashMap.
+    ///
+    ///  NOTE: this codec sorts terms by reverse-unicode-order!
+    /// </summary>
+    [PostingsFormatName("RAMOnly")] // LUCENENET specific - using PostingsFormatName attribute to ensure the default name passed from subclasses is the same as this class name
+    public sealed class RAMOnlyPostingsFormat : PostingsFormat
+    {
+        // For fun, test that we can override how terms are
+        // sorted, and basic things still work -- this comparer
+        // sorts in reversed unicode code point order:
+        private static readonly IComparer<BytesRef> reverseUnicodeComparer = new ComparerAnonymousInnerClassHelper();
+
+#pragma warning disable 659 // LUCENENET: Overrides Equals but not GetHashCode
+        private class ComparerAnonymousInnerClassHelper : IComparer<BytesRef>
+#pragma warning restore 659
+        {
+            public ComparerAnonymousInnerClassHelper()
+            {
+            }
+
+            public virtual int Compare(BytesRef t1, BytesRef t2)
+            {
+                var b1 = t1.Bytes;
+                var b2 = t2.Bytes;
+                int b1Stop;
+                int b1Upto = t1.Offset;
+                int b2Upto = t2.Offset;
+                if (t1.Length < t2.Length)
+                {
+                    b1Stop = t1.Offset + t1.Length;
+                }
+                else
+                {
+                    b1Stop = t1.Offset + t2.Length;
+                }
+                while (b1Upto < b1Stop)
+                {
+                    int bb1 = b1[b1Upto++] & 0xff;
+                    int bb2 = b2[b2Upto++] & 0xff;
+                    if (bb1 != bb2)
+                    {
+                        //System.out.println("cmp 1=" + t1 + " 2=" + t2 + " return " + (bb2-bb1));
+                        return bb2 - bb1;
+                    }
+                }
+
+                // One is prefix of another, or they are equal
+                return t2.Length - t1.Length;
+            }
+
+            public override bool Equals(object other)
+            {
+                return this == other;
+            }
+        }
+
+        public RAMOnlyPostingsFormat()
+            : base()
+        {
+        }
+
+        // Postings state:
+        internal class RAMPostings : FieldsProducer
+        {
+            internal readonly IDictionary<string, RAMField> FieldToTerms = new SortedDictionary<string, RAMField>();
+
+            public override Terms GetTerms(string field)
+            {
+                return FieldToTerms[field];
+            }
+
+            public override int Count
+            {
+                get { return FieldToTerms.Count; }
+            }
+
+            public override IEnumerator<string> GetEnumerator()
+            {
+                return FieldToTerms.Keys.GetEnumerator();
+            }
+
+            public override void Dispose()
+            {
+            }
+
+            public override long RamBytesUsed()
+            {
+                long sizeInBytes = 0;
+                foreach (RAMField field in FieldToTerms.Values)
+                {
+                    sizeInBytes += field.RamBytesUsed();
+                }
+                return sizeInBytes;
+            }
+
+            public override void CheckIntegrity()
+            {
+            }
+        }
+
+        internal class RAMField : Terms
+        {
+            internal readonly string Field;
+            internal readonly SortedDictionary<string, RAMTerm> TermToDocs = new SortedDictionary<string, RAMTerm>();
+            internal long SumTotalTermFreq_Renamed;
+            internal long SumDocFreq_Renamed;
+            internal int DocCount_Renamed;
+            internal readonly FieldInfo Info;
+
+            internal RAMField(string field, FieldInfo info)
+            {
+                this.Field = field;
+                this.Info = info;
+            }
+
+            /// <summary>
+            /// Returns approximate RAM bytes used </summary>
+            public virtual long RamBytesUsed()
+            {
+                long sizeInBytes = 0;
+                foreach (RAMTerm term in TermToDocs.Values)
+                {
+                    sizeInBytes += term.RamBytesUsed();
+                }
+                return sizeInBytes;
+            }
+
+            public override long Count
+            {
+                get { return TermToDocs.Count; }
+            }
+
+            public override long SumTotalTermFreq
+            {
+                get
+                {
+                    return SumTotalTermFreq_Renamed;
+                }
+            }
+
+            public override long SumDocFreq
+            {
+                get
+                {
+                    return SumDocFreq_Renamed;
+                }
+            }
+
+            public override int DocCount
+            {
+                get
+                {
+                    return DocCount_Renamed;
+                }
+            }
+
+            public override TermsEnum GetIterator(TermsEnum reuse)
+            {
+                return new RAMTermsEnum(this);
+            }
+
+            public override IComparer<BytesRef> Comparer
+            {
+                get
+                {
+                    return reverseUnicodeComparer;
+                }
+            }
+
+            public override bool HasFreqs
+            {
+                get { return Info.IndexOptions >= IndexOptions.DOCS_AND_FREQS; }
+            }
+
+            public override bool HasOffsets
+            {
+                get { return Info.IndexOptions >= IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS; }
+            }
+
+            public override bool HasPositions
+            {
+                get { return Info.IndexOptions >= IndexOptions.DOCS_AND_FREQS_AND_POSITIONS; }
+            }
+
+            public override bool HasPayloads
+            {
+                get { return Info.HasPayloads; }
+            }
+        }
+
+        internal class RAMTerm
+        {
+            internal readonly string Term;
+            internal long TotalTermFreq;
+            internal readonly IList<RAMDoc> Docs = new List<RAMDoc>();
+
+            public RAMTerm(string term)
+            {
+                this.Term = term;
+            }
+
+            /// <summary>
+            /// Returns approximate RAM bytes used </summary>
+            public virtual long RamBytesUsed()
+            {
+                long sizeInBytes = 0;
+                foreach (RAMDoc rDoc in Docs)
+                {
+                    sizeInBytes += rDoc.RamBytesUsed();
+                }
+                return sizeInBytes;
+            }
+        }
+
+        internal class RAMDoc
+        {
+            internal readonly int DocID;
+            internal readonly int[] Positions;
+            internal byte[][] Payloads;
+
+            public RAMDoc(int docID, int freq)
+            {
+                this.DocID = docID;
+                Positions = new int[freq];
+            }
+
+            /// <summary>
+            /// Returns approximate RAM bytes used </summary>
+            public virtual long RamBytesUsed()
+            {
+                long sizeInBytes = 0;
+                sizeInBytes += (Positions != null) ? RamUsageEstimator.SizeOf(Positions) : 0;
+
+                if (Payloads != null)
+                {
+                    foreach (var payload in Payloads)
+                    {
+                        sizeInBytes += (payload != null) ? RamUsageEstimator.SizeOf(payload) : 0;
+                    }
+                }
+                return sizeInBytes;
+            }
+        }
+
+        // Classes for writing to the postings state
+        private class RAMFieldsConsumer : FieldsConsumer
+        {
+            internal readonly RAMPostings Postings;
+            internal readonly RAMTermsConsumer TermsConsumer = new RAMTermsConsumer();
+
+            public RAMFieldsConsumer(RAMPostings postings)
+            {
+                this.Postings = postings;
+            }
+
+            public override TermsConsumer AddField(FieldInfo field)
+            {
+                if (field.IndexOptions >= IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS)
+                {
+                    throw new System.NotSupportedException("this codec cannot index offsets");
+                }
+                RAMField ramField = new RAMField(field.Name, field);
+                Postings.FieldToTerms[field.Name] = ramField;
+                TermsConsumer.Reset(ramField);
+                return TermsConsumer;
+            }
+
+            public override void Dispose()
+            {
+                // TODO: finalize stuff
+            }
+        }
+
+        private class RAMTermsConsumer : TermsConsumer
+        {
+            internal RAMField Field;
+            internal readonly RAMPostingsWriterImpl PostingsWriter = new RAMPostingsWriterImpl();
+            internal RAMTerm Current;
+
+            internal virtual void Reset(RAMField field)
+            {
+                this.Field = field;
+            }
+
+            public override PostingsConsumer StartTerm(BytesRef text)
+            {
+                string term = text.Utf8ToString();
+                Current = new RAMTerm(term);
+                PostingsWriter.Reset(Current);
+                return PostingsWriter;
+            }
+
+            public override IComparer<BytesRef> Comparer
+            {
+                get
+                {
+                    return BytesRef.UTF8SortedAsUnicodeComparer;
+                }
+            }
+
+            public override void FinishTerm(BytesRef text, TermStats stats)
+            {
+                Debug.Assert(stats.DocFreq > 0);
+                Debug.Assert(stats.DocFreq == Current.Docs.Count);
+                Current.TotalTermFreq = stats.TotalTermFreq;
+                Field.TermToDocs[Current.Term] = Current;
+            }
+
+            public override void Finish(long sumTotalTermFreq, long sumDocFreq, int docCount)
+            {
+                Field.SumTotalTermFreq_Renamed = sumTotalTermFreq;
+                Field.SumDocFreq_Renamed = sumDocFreq;
+                Field.DocCount_Renamed = docCount;
+            }
+        }
+
+        internal class RAMPostingsWriterImpl : PostingsConsumer
+        {
+            internal RAMTerm Term;
+            internal RAMDoc Current;
+            internal int PosUpto = 0;
+
+            public virtual void Reset(RAMTerm term)
+            {
+                this.Term = term;
+            }
+
+            public override void StartDoc(int docID, int freq)
+            {
+                Current = new RAMDoc(docID, freq);
+                Term.Docs.Add(Current);
+                PosUpto = 0;
+            }
+
+            public override void AddPosition(int position, BytesRef payload, int startOffset, int endOffset)
+            {
+                Debug.Assert(startOffset == -1);
+                Debug.Assert(endOffset == -1);
+                Current.Positions[PosUpto] = position;
+                if (payload != null && payload.Length > 0)
+                {
+                    if (Current.Payloads == null)
+                    {
+                        Current.Payloads = new byte[Current.Positions.Length][];
+                    }
+                    var bytes = Current.Payloads[PosUpto] = new byte[payload.Length];
+                    Array.Copy(payload.Bytes, payload.Offset, bytes, 0, payload.Length);
+                }
+                PosUpto++;
+            }
+
+            public override void FinishDoc()
+            {
+                Debug.Assert(PosUpto == Current.Positions.Length);
+            }
+        }
+
+        internal class RAMTermsEnum : TermsEnum
+        {
+            internal IEnumerator<string> It;
+            internal string Current;
+            internal readonly RAMField RamField;
+
+            public RAMTermsEnum(RAMField field)
+            {
+                this.RamField = field;
+            }
+
+            public override IComparer<BytesRef> Comparer
+            {
+                get
+                {
+                    return BytesRef.UTF8SortedAsUnicodeComparer;
+                }
+            }
+
+            public override BytesRef Next()
+            {
+                if (It == null)
+                {
+                    if (Current == null)
+                    {
+                        It = RamField.TermToDocs.Keys.GetEnumerator();
+                    }
+                    else
+                    {
+                        //It = RamField.TermToDocs.tailMap(Current).Keys.GetEnumerator();
+                        It = RamField.TermToDocs.Where(kvpair => String.Compare(kvpair.Key, Current) >= 0).ToDictionary(kvpair => kvpair.Key, kvpair => kvpair.Value).Keys.GetEnumerator();
+                    }
+                }
+                if (It.MoveNext())
+                {
+                    Current = It.Current;
+                    return new BytesRef(Current);
+                }
+                else
+                {
+                    return null;
+                }
+            }
+
+            public override SeekStatus SeekCeil(BytesRef term)
+            {
+                Current = term.Utf8ToString();
+                It = null;
+                if (RamField.TermToDocs.ContainsKey(Current))
+                {
+                    return SeekStatus.FOUND;
+                }
+                else
+                {
+                    if (Current.CompareTo(RamField.TermToDocs.Last().Key) > 0)
+                    {
+                        return SeekStatus.END;
+                    }
+                    else
+                    {
+                        return SeekStatus.NOT_FOUND;
+                    }
+                }
+            }
+
+            public override void SeekExact(long ord)
+            {
+                throw new System.NotSupportedException();
+            }
+
+            public override long Ord
+            {
+                get { throw new System.NotSupportedException(); }
+            }
+
+            public override BytesRef Term
+            {
+                get
+                {
+                    // TODO: reuse BytesRef
+                    return new BytesRef(Current);
+                }
+            }
+
+            public override int DocFreq
+            {
+                get { return RamField.TermToDocs[Current].Docs.Count; }
+            }
+
+            public override long TotalTermFreq
+            {
+                get { return RamField.TermToDocs[Current].TotalTermFreq; }
+            }
+
+            public override DocsEnum Docs(IBits liveDocs, DocsEnum reuse, int flags)
+            {
+                return new RAMDocsEnum(RamField.TermToDocs[Current], liveDocs);
+            }
+
+            public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPositionsEnum reuse, int flags)
+            {
+                return new RAMDocsAndPositionsEnum(RamField.TermToDocs[Current], liveDocs);
+            }
+        }
+
+        private class RAMDocsEnum : DocsEnum
+        {
+            private readonly RAMTerm RamTerm;
+            private readonly IBits LiveDocs;
+            private RAMDoc Current;
+            private int Upto = -1;
+#pragma warning disable 414
+            private int PosUpto = 0; // LUCENENET NOTE: Not used
+#pragma warning restore 414
+
+            public RAMDocsEnum(RAMTerm ramTerm, IBits liveDocs)
+            {
+                this.RamTerm = ramTerm;
+                this.LiveDocs = liveDocs;
+            }
+
+            public override int Advance(int targetDocID)
+            {
+                return SlowAdvance(targetDocID);
+            }
+
+            // TODO: override bulk read, for better perf
+            public override int NextDoc()
+            {
+                while (true)
+                {
+                    Upto++;
+                    if (Upto < RamTerm.Docs.Count)
+                    {
+                        Current = RamTerm.Docs[Upto];
+                        if (LiveDocs == null || LiveDocs.Get(Current.DocID))
+                        {
+                            PosUpto = 0;
+                            return Current.DocID;
+                        }
+                    }
+                    else
+                    {
+                        return NO_MORE_DOCS;
+                    }
+                }
+            }
+
+            public override int Freq
+            {
+                get { return Current.Positions.Length; }
+            }
+
+            public override int DocID
+            {
+                get { return Current.DocID; }
+            }
+
+            public override long GetCost()
+            {
+                return RamTerm.Docs.Count;
+            }
+        }
+
+        private class RAMDocsAndPositionsEnum : DocsAndPositionsEnum
+        {
+            private readonly RAMTerm RamTerm;
+            private readonly IBits LiveDocs;
+            private RAMDoc Current;
+            private int Upto = -1;
+            private int PosUpto = 0;
+
+            public RAMDocsAndPositionsEnum(RAMTerm ramTerm, IBits liveDocs)
+            {
+                this.RamTerm = ramTerm;
+                this.LiveDocs = liveDocs;
+            }
+
+            public override int Advance(int targetDocID)
+            {
+                return SlowAdvance(targetDocID);
+            }
+
+            // TODO: override bulk read, for better perf
+            public override int NextDoc()
+            {
+                while (true)
+                {
+                    Upto++;
+                    if (Upto < RamTerm.Docs.Count)
+                    {
+                        Current = RamTerm.Docs[Upto];
+                        if (LiveDocs == null || LiveDocs.Get(Current.DocID))
+                        {
+                            PosUpto = 0;
+                            return Current.DocID;
+                        }
+                    }
+                    else
+                    {
+                        return NO_MORE_DOCS;
+                    }
+                }
+            }
+
+            public override int Freq
+            {
+                get { return Current.Positions.Length; }
+            }
+
+            public override int DocID
+            {
+                get { return Current.DocID; }
+            }
+
+            public override int NextPosition()
+            {
+                return Current.Positions[PosUpto++];
+            }
+
+            public override int StartOffset
+            {
+                get { return -1; }
+            }
+
+            public override int EndOffset
+            {
+                get { return -1; }
+            }
+
+            public override BytesRef GetPayload()
+            {
+                if (Current.Payloads != null && Current.Payloads[PosUpto - 1] != null)
+                {
+                    return new BytesRef(Current.Payloads[PosUpto - 1]);
+                }
+                else
+                {
+                    return null;
+                }
+            }
+
+            public override long GetCost()
+            {
+                return RamTerm.Docs.Count;
+            }
+        }
+
+        // Holds all indexes created, keyed by the ID assigned in fieldsConsumer
+        private readonly IDictionary<int?, RAMPostings> State = new Dictionary<int?, RAMPostings>();
+
+        private readonly AtomicInt64 NextID = new AtomicInt64();
+
+        private readonly string RAM_ONLY_NAME = "RAMOnly";
+        private const int VERSION_START = 0;
+        private const int VERSION_LATEST = VERSION_START;
+
+        private const string ID_EXTENSION = "id";
+
+        public override FieldsConsumer FieldsConsumer(SegmentWriteState writeState)
+        {
+            int id = (int)NextID.IncrementAndGet();
+
+            // TODO -- ok to do this up front instead of
+            // on close....?  should be ok?
+            // Write our ID:
+            string idFileName = IndexFileNames.SegmentFileName(writeState.SegmentInfo.Name, writeState.SegmentSuffix, ID_EXTENSION);
+            IndexOutput @out = writeState.Directory.CreateOutput(idFileName, writeState.Context);
+            bool success = false;
+            try
+            {
+                CodecUtil.WriteHeader(@out, RAM_ONLY_NAME, VERSION_LATEST);
+                @out.WriteVInt32(id);
+                success = true;
+            }
+            finally
+            {
+                if (!success)
+                {
+                    IOUtils.CloseWhileHandlingException(@out);
+                }
+                else
+                {
+                    IOUtils.Close(@out);
+                }
+            }
+
+            RAMPostings postings = new RAMPostings();
+            RAMFieldsConsumer consumer = new RAMFieldsConsumer(postings);
+
+            lock (State)
+            {
+                State[id] = postings;
+            }
+            return consumer;
+        }
+
+        public override FieldsProducer FieldsProducer(SegmentReadState readState)
+        {
+            // Load our ID:
+            string idFileName = IndexFileNames.SegmentFileName(readState.SegmentInfo.Name, readState.SegmentSuffix, ID_EXTENSION);
+            IndexInput @in = readState.Directory.OpenInput(idFileName, readState.Context);
+            bool success = false;
+            int id;
+            try
+            {
+                CodecUtil.CheckHeader(@in, RAM_ONLY_NAME, VERSION_START, VERSION_LATEST);
+                id = @in.ReadVInt32();
+                success = true;
+            }
+            finally
+            {
+                if (!success)
+                {
+                    IOUtils.CloseWhileHandlingException(@in);
+                }
+                else
+                {
+                    IOUtils.Close(@in);
+                }
+            }
+
+            lock (State)
+            {
+                return State[id];
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/49a0460f/src/Lucene.Net.TestFramework/Codecs/ramonly/RAMOnlyPostingsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/ramonly/RAMOnlyPostingsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/ramonly/RAMOnlyPostingsFormat.cs
deleted file mode 100644
index b1d9491..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/ramonly/RAMOnlyPostingsFormat.cs
+++ /dev/null
@@ -1,731 +0,0 @@
-using System;
-using System.Collections.Generic;
-using System.Diagnostics;
-using System.Linq;
-
-namespace Lucene.Net.Codecs.ramonly
-{
-    using Lucene.Net.Support;
-    using IBits = Lucene.Net.Util.IBits;
-    using BytesRef = Lucene.Net.Util.BytesRef;
-
-    /*
-         * Licensed to the Apache Software Foundation (ASF) under one or more
-         * contributor license agreements.  See the NOTICE file distributed with
-         * this work for additional information regarding copyright ownership.
-         * The ASF licenses this file to You under the Apache License, Version 2.0
-         * (the "License"); you may not use this file except in compliance with
-         * the License.  You may obtain a copy of the License at
-         *
-         *     http://www.apache.org/licenses/LICENSE-2.0
-         *
-         * Unless required by applicable law or agreed to in writing, software
-         * distributed under the License is distributed on an "AS IS" BASIS,
-         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-         * See the License for the specific language governing permissions and
-         * limitations under the License.
-         */
-
-    using DocsAndPositionsEnum = Lucene.Net.Index.DocsAndPositionsEnum;
-    using DocsEnum = Lucene.Net.Index.DocsEnum;
-    using FieldInfo = Lucene.Net.Index.FieldInfo;
-    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
-    using IndexInput = Lucene.Net.Store.IndexInput;
-    using IndexOptions = Lucene.Net.Index.IndexOptions;
-    using IndexOutput = Lucene.Net.Store.IndexOutput;
-    using IOUtils = Lucene.Net.Util.IOUtils;
-    using RamUsageEstimator = Lucene.Net.Util.RamUsageEstimator;
-    using SegmentReadState = Lucene.Net.Index.SegmentReadState;
-    using SegmentWriteState = Lucene.Net.Index.SegmentWriteState;
-    using Terms = Lucene.Net.Index.Terms;
-    using TermsEnum = Lucene.Net.Index.TermsEnum;
-
-    /// <summary>
-    /// Stores all postings data in RAM, but writes a small
-    ///  token (header + single int) to identify which "slot" the
-    ///  index is using in RAM HashMap.
-    ///
-    ///  NOTE: this codec sorts terms by reverse-unicode-order!
-    /// </summary>
-    [PostingsFormatName("RAMOnly")] // LUCENENET specific - using PostingsFormatName attribute to ensure the default name passed from subclasses is the same as this class name
-    public sealed class RAMOnlyPostingsFormat : PostingsFormat
-    {
-        // For fun, test that we can override how terms are
-        // sorted, and basic things still work -- this comparer
-        // sorts in reversed unicode code point order:
-        private static readonly IComparer<BytesRef> reverseUnicodeComparer = new ComparerAnonymousInnerClassHelper();
-
-#pragma warning disable 659 // LUCENENET: Overrides Equals but not GetHashCode
-        private class ComparerAnonymousInnerClassHelper : IComparer<BytesRef>
-#pragma warning restore 659
-        {
-            public ComparerAnonymousInnerClassHelper()
-            {
-            }
-
-            public virtual int Compare(BytesRef t1, BytesRef t2)
-            {
-                var b1 = t1.Bytes;
-                var b2 = t2.Bytes;
-                int b1Stop;
-                int b1Upto = t1.Offset;
-                int b2Upto = t2.Offset;
-                if (t1.Length < t2.Length)
-                {
-                    b1Stop = t1.Offset + t1.Length;
-                }
-                else
-                {
-                    b1Stop = t1.Offset + t2.Length;
-                }
-                while (b1Upto < b1Stop)
-                {
-                    int bb1 = b1[b1Upto++] & 0xff;
-                    int bb2 = b2[b2Upto++] & 0xff;
-                    if (bb1 != bb2)
-                    {
-                        //System.out.println("cmp 1=" + t1 + " 2=" + t2 + " return " + (bb2-bb1));
-                        return bb2 - bb1;
-                    }
-                }
-
-                // One is prefix of another, or they are equal
-                return t2.Length - t1.Length;
-            }
-
-            public override bool Equals(object other)
-            {
-                return this == other;
-            }
-        }
-
-        public RAMOnlyPostingsFormat()
-            : base()
-        {
-        }
-
-        // Postings state:
-        internal class RAMPostings : FieldsProducer
-        {
-            internal readonly IDictionary<string, RAMField> FieldToTerms = new SortedDictionary<string, RAMField>();
-
-            public override Terms GetTerms(string field)
-            {
-                return FieldToTerms[field];
-            }
-
-            public override int Count
-            {
-                get { return FieldToTerms.Count; }
-            }
-
-            public override IEnumerator<string> GetEnumerator()
-            {
-                return FieldToTerms.Keys.GetEnumerator();
-            }
-
-            public override void Dispose()
-            {
-            }
-
-            public override long RamBytesUsed()
-            {
-                long sizeInBytes = 0;
-                foreach (RAMField field in FieldToTerms.Values)
-                {
-                    sizeInBytes += field.RamBytesUsed();
-                }
-                return sizeInBytes;
-            }
-
-            public override void CheckIntegrity()
-            {
-            }
-        }
-
-        internal class RAMField : Terms
-        {
-            internal readonly string Field;
-            internal readonly SortedDictionary<string, RAMTerm> TermToDocs = new SortedDictionary<string, RAMTerm>();
-            internal long SumTotalTermFreq_Renamed;
-            internal long SumDocFreq_Renamed;
-            internal int DocCount_Renamed;
-            internal readonly FieldInfo Info;
-
-            internal RAMField(string field, FieldInfo info)
-            {
-                this.Field = field;
-                this.Info = info;
-            }
-
-            /// <summary>
-            /// Returns approximate RAM bytes used </summary>
-            public virtual long RamBytesUsed()
-            {
-                long sizeInBytes = 0;
-                foreach (RAMTerm term in TermToDocs.Values)
-                {
-                    sizeInBytes += term.RamBytesUsed();
-                }
-                return sizeInBytes;
-            }
-
-            public override long Count
-            {
-                get { return TermToDocs.Count; }
-            }
-
-            public override long SumTotalTermFreq
-            {
-                get
-                {
-                    return SumTotalTermFreq_Renamed;
-                }
-            }
-
-            public override long SumDocFreq
-            {
-                get
-                {
-                    return SumDocFreq_Renamed;
-                }
-            }
-
-            public override int DocCount
-            {
-                get
-                {
-                    return DocCount_Renamed;
-                }
-            }
-
-            public override TermsEnum GetIterator(TermsEnum reuse)
-            {
-                return new RAMTermsEnum(this);
-            }
-
-            public override IComparer<BytesRef> Comparer
-            {
-                get
-                {
-                    return reverseUnicodeComparer;
-                }
-            }
-
-            public override bool HasFreqs
-            {
-                get { return Info.IndexOptions >= IndexOptions.DOCS_AND_FREQS; }
-            }
-
-            public override bool HasOffsets
-            {
-                get { return Info.IndexOptions >= IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS; }
-            }
-
-            public override bool HasPositions
-            {
-                get { return Info.IndexOptions >= IndexOptions.DOCS_AND_FREQS_AND_POSITIONS; }
-            }
-
-            public override bool HasPayloads
-            {
-                get { return Info.HasPayloads; }
-            }
-        }
-
-        internal class RAMTerm
-        {
-            internal readonly string Term;
-            internal long TotalTermFreq;
-            internal readonly IList<RAMDoc> Docs = new List<RAMDoc>();
-
-            public RAMTerm(string term)
-            {
-                this.Term = term;
-            }
-
-            /// <summary>
-            /// Returns approximate RAM bytes used </summary>
-            public virtual long RamBytesUsed()
-            {
-                long sizeInBytes = 0;
-                foreach (RAMDoc rDoc in Docs)
-                {
-                    sizeInBytes += rDoc.RamBytesUsed();
-                }
-                return sizeInBytes;
-            }
-        }
-
-        internal class RAMDoc
-        {
-            internal readonly int DocID;
-            internal readonly int[] Positions;
-            internal byte[][] Payloads;
-
-            public RAMDoc(int docID, int freq)
-            {
-                this.DocID = docID;
-                Positions = new int[freq];
-            }
-
-            /// <summary>
-            /// Returns approximate RAM bytes used </summary>
-            public virtual long RamBytesUsed()
-            {
-                long sizeInBytes = 0;
-                sizeInBytes += (Positions != null) ? RamUsageEstimator.SizeOf(Positions) : 0;
-
-                if (Payloads != null)
-                {
-                    foreach (var payload in Payloads)
-                    {
-                        sizeInBytes += (payload != null) ? RamUsageEstimator.SizeOf(payload) : 0;
-                    }
-                }
-                return sizeInBytes;
-            }
-        }
-
-        // Classes for writing to the postings state
-        private class RAMFieldsConsumer : FieldsConsumer
-        {
-            internal readonly RAMPostings Postings;
-            internal readonly RAMTermsConsumer TermsConsumer = new RAMTermsConsumer();
-
-            public RAMFieldsConsumer(RAMPostings postings)
-            {
-                this.Postings = postings;
-            }
-
-            public override TermsConsumer AddField(FieldInfo field)
-            {
-                if (field.IndexOptions >= IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS)
-                {
-                    throw new System.NotSupportedException("this codec cannot index offsets");
-                }
-                RAMField ramField = new RAMField(field.Name, field);
-                Postings.FieldToTerms[field.Name] = ramField;
-                TermsConsumer.Reset(ramField);
-                return TermsConsumer;
-            }
-
-            public override void Dispose()
-            {
-                // TODO: finalize stuff
-            }
-        }
-
-        private class RAMTermsConsumer : TermsConsumer
-        {
-            internal RAMField Field;
-            internal readonly RAMPostingsWriterImpl PostingsWriter = new RAMPostingsWriterImpl();
-            internal RAMTerm Current;
-
-            internal virtual void Reset(RAMField field)
-            {
-                this.Field = field;
-            }
-
-            public override PostingsConsumer StartTerm(BytesRef text)
-            {
-                string term = text.Utf8ToString();
-                Current = new RAMTerm(term);
-                PostingsWriter.Reset(Current);
-                return PostingsWriter;
-            }
-
-            public override IComparer<BytesRef> Comparer
-            {
-                get
-                {
-                    return BytesRef.UTF8SortedAsUnicodeComparer;
-                }
-            }
-
-            public override void FinishTerm(BytesRef text, TermStats stats)
-            {
-                Debug.Assert(stats.DocFreq > 0);
-                Debug.Assert(stats.DocFreq == Current.Docs.Count);
-                Current.TotalTermFreq = stats.TotalTermFreq;
-                Field.TermToDocs[Current.Term] = Current;
-            }
-
-            public override void Finish(long sumTotalTermFreq, long sumDocFreq, int docCount)
-            {
-                Field.SumTotalTermFreq_Renamed = sumTotalTermFreq;
-                Field.SumDocFreq_Renamed = sumDocFreq;
-                Field.DocCount_Renamed = docCount;
-            }
-        }
-
-        internal class RAMPostingsWriterImpl : PostingsConsumer
-        {
-            internal RAMTerm Term;
-            internal RAMDoc Current;
-            internal int PosUpto = 0;
-
-            public virtual void Reset(RAMTerm term)
-            {
-                this.Term = term;
-            }
-
-            public override void StartDoc(int docID, int freq)
-            {
-                Current = new RAMDoc(docID, freq);
-                Term.Docs.Add(Current);
-                PosUpto = 0;
-            }
-
-            public override void AddPosition(int position, BytesRef payload, int startOffset, int endOffset)
-            {
-                Debug.Assert(startOffset == -1);
-                Debug.Assert(endOffset == -1);
-                Current.Positions[PosUpto] = position;
-                if (payload != null && payload.Length > 0)
-                {
-                    if (Current.Payloads == null)
-                    {
-                        Current.Payloads = new byte[Current.Positions.Length][];
-                    }
-                    var bytes = Current.Payloads[PosUpto] = new byte[payload.Length];
-                    Array.Copy(payload.Bytes, payload.Offset, bytes, 0, payload.Length);
-                }
-                PosUpto++;
-            }
-
-            public override void FinishDoc()
-            {
-                Debug.Assert(PosUpto == Current.Positions.Length);
-            }
-        }
-
-        internal class RAMTermsEnum : TermsEnum
-        {
-            internal IEnumerator<string> It;
-            internal string Current;
-            internal readonly RAMField RamField;
-
-            public RAMTermsEnum(RAMField field)
-            {
-                this.RamField = field;
-            }
-
-            public override IComparer<BytesRef> Comparer
-            {
-                get
-                {
-                    return BytesRef.UTF8SortedAsUnicodeComparer;
-                }
-            }
-
-            public override BytesRef Next()
-            {
-                if (It == null)
-                {
-                    if (Current == null)
-                    {
-                        It = RamField.TermToDocs.Keys.GetEnumerator();
-                    }
-                    else
-                    {
-                        //It = RamField.TermToDocs.tailMap(Current).Keys.GetEnumerator();
-                        It = RamField.TermToDocs.Where(kvpair => String.Compare(kvpair.Key, Current) >= 0).ToDictionary(kvpair => kvpair.Key, kvpair => kvpair.Value).Keys.GetEnumerator();
-                    }
-                }
-                if (It.MoveNext())
-                {
-                    Current = It.Current;
-                    return new BytesRef(Current);
-                }
-                else
-                {
-                    return null;
-                }
-            }
-
-            public override SeekStatus SeekCeil(BytesRef term)
-            {
-                Current = term.Utf8ToString();
-                It = null;
-                if (RamField.TermToDocs.ContainsKey(Current))
-                {
-                    return SeekStatus.FOUND;
-                }
-                else
-                {
-                    if (Current.CompareTo(RamField.TermToDocs.Last().Key) > 0)
-                    {
-                        return SeekStatus.END;
-                    }
-                    else
-                    {
-                        return SeekStatus.NOT_FOUND;
-                    }
-                }
-            }
-
-            public override void SeekExact(long ord)
-            {
-                throw new System.NotSupportedException();
-            }
-
-            public override long Ord
-            {
-                get { throw new System.NotSupportedException(); }
-            }
-
-            public override BytesRef Term
-            {
-                get
-                {
-                    // TODO: reuse BytesRef
-                    return new BytesRef(Current);
-                }
-            }
-
-            public override int DocFreq
-            {
-                get { return RamField.TermToDocs[Current].Docs.Count; }
-            }
-
-            public override long TotalTermFreq
-            {
-                get { return RamField.TermToDocs[Current].TotalTermFreq; }
-            }
-
-            public override DocsEnum Docs(IBits liveDocs, DocsEnum reuse, int flags)
-            {
-                return new RAMDocsEnum(RamField.TermToDocs[Current], liveDocs);
-            }
-
-            public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPositionsEnum reuse, int flags)
-            {
-                return new RAMDocsAndPositionsEnum(RamField.TermToDocs[Current], liveDocs);
-            }
-        }
-
-        private class RAMDocsEnum : DocsEnum
-        {
-            private readonly RAMTerm RamTerm;
-            private readonly IBits LiveDocs;
-            private RAMDoc Current;
-            private int Upto = -1;
-#pragma warning disable 414
-            private int PosUpto = 0; // LUCENENET NOTE: Not used
-#pragma warning restore 414
-
-            public RAMDocsEnum(RAMTerm ramTerm, IBits liveDocs)
-            {
-                this.RamTerm = ramTerm;
-                this.LiveDocs = liveDocs;
-            }
-
-            public override int Advance(int targetDocID)
-            {
-                return SlowAdvance(targetDocID);
-            }
-
-            // TODO: override bulk read, for better perf
-            public override int NextDoc()
-            {
-                while (true)
-                {
-                    Upto++;
-                    if (Upto < RamTerm.Docs.Count)
-                    {
-                        Current = RamTerm.Docs[Upto];
-                        if (LiveDocs == null || LiveDocs.Get(Current.DocID))
-                        {
-                            PosUpto = 0;
-                            return Current.DocID;
-                        }
-                    }
-                    else
-                    {
-                        return NO_MORE_DOCS;
-                    }
-                }
-            }
-
-            public override int Freq
-            {
-                get { return Current.Positions.Length; }
-            }
-
-            public override int DocID
-            {
-                get { return Current.DocID; }
-            }
-
-            public override long GetCost()
-            {
-                return RamTerm.Docs.Count;
-            }
-        }
-
-        private class RAMDocsAndPositionsEnum : DocsAndPositionsEnum
-        {
-            private readonly RAMTerm RamTerm;
-            private readonly IBits LiveDocs;
-            private RAMDoc Current;
-            private int Upto = -1;
-            private int PosUpto = 0;
-
-            public RAMDocsAndPositionsEnum(RAMTerm ramTerm, IBits liveDocs)
-            {
-                this.RamTerm = ramTerm;
-                this.LiveDocs = liveDocs;
-            }
-
-            public override int Advance(int targetDocID)
-            {
-                return SlowAdvance(targetDocID);
-            }
-
-            // TODO: override bulk read, for better perf
-            public override int NextDoc()
-            {
-                while (true)
-                {
-                    Upto++;
-                    if (Upto < RamTerm.Docs.Count)
-                    {
-                        Current = RamTerm.Docs[Upto];
-                        if (LiveDocs == null || LiveDocs.Get(Current.DocID))
-                        {
-                            PosUpto = 0;
-                            return Current.DocID;
-                        }
-                    }
-                    else
-                    {
-                        return NO_MORE_DOCS;
-                    }
-                }
-            }
-
-            public override int Freq
-            {
-                get { return Current.Positions.Length; }
-            }
-
-            public override int DocID
-            {
-                get { return Current.DocID; }
-            }
-
-            public override int NextPosition()
-            {
-                return Current.Positions[PosUpto++];
-            }
-
-            public override int StartOffset
-            {
-                get { return -1; }
-            }
-
-            public override int EndOffset
-            {
-                get { return -1; }
-            }
-
-            public override BytesRef GetPayload()
-            {
-                if (Current.Payloads != null && Current.Payloads[PosUpto - 1] != null)
-                {
-                    return new BytesRef(Current.Payloads[PosUpto - 1]);
-                }
-                else
-                {
-                    return null;
-                }
-            }
-
-            public override long GetCost()
-            {
-                return RamTerm.Docs.Count;
-            }
-        }
-
-        // Holds all indexes created, keyed by the ID assigned in fieldsConsumer
-        private readonly IDictionary<int?, RAMPostings> State = new Dictionary<int?, RAMPostings>();
-
-        private readonly AtomicInt64 NextID = new AtomicInt64();
-
-        private readonly string RAM_ONLY_NAME = "RAMOnly";
-        private const int VERSION_START = 0;
-        private const int VERSION_LATEST = VERSION_START;
-
-        private const string ID_EXTENSION = "id";
-
-        public override FieldsConsumer FieldsConsumer(SegmentWriteState writeState)
-        {
-            int id = (int)NextID.IncrementAndGet();
-
-            // TODO -- ok to do this up front instead of
-            // on close....?  should be ok?
-            // Write our ID:
-            string idFileName = IndexFileNames.SegmentFileName(writeState.SegmentInfo.Name, writeState.SegmentSuffix, ID_EXTENSION);
-            IndexOutput @out = writeState.Directory.CreateOutput(idFileName, writeState.Context);
-            bool success = false;
-            try
-            {
-                CodecUtil.WriteHeader(@out, RAM_ONLY_NAME, VERSION_LATEST);
-                @out.WriteVInt32(id);
-                success = true;
-            }
-            finally
-            {
-                if (!success)
-                {
-                    IOUtils.CloseWhileHandlingException(@out);
-                }
-                else
-                {
-                    IOUtils.Close(@out);
-                }
-            }
-
-            RAMPostings postings = new RAMPostings();
-            RAMFieldsConsumer consumer = new RAMFieldsConsumer(postings);
-
-            lock (State)
-            {
-                State[id] = postings;
-            }
-            return consumer;
-        }
-
-        public override FieldsProducer FieldsProducer(SegmentReadState readState)
-        {
-            // Load our ID:
-            string idFileName = IndexFileNames.SegmentFileName(readState.SegmentInfo.Name, readState.SegmentSuffix, ID_EXTENSION);
-            IndexInput @in = readState.Directory.OpenInput(idFileName, readState.Context);
-            bool success = false;
-            int id;
-            try
-            {
-                CodecUtil.CheckHeader(@in, RAM_ONLY_NAME, VERSION_START, VERSION_LATEST);
-                id = @in.ReadVInt32();
-                success = true;
-            }
-            finally
-            {
-                if (!success)
-                {
-                    IOUtils.CloseWhileHandlingException(@in);
-                }
-                else
-                {
-                    IOUtils.Close(@in);
-                }
-            }
-
-            lock (State)
-            {
-                return State[id];
-            }
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/49a0460f/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj b/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
index 2f32d94..351f632 100644
--- a/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
+++ b/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
@@ -257,7 +257,7 @@
     <Compile Include="Codecs\MockSep\MockSingleIntIndexInput.cs" />
     <Compile Include="Codecs\MockSep\MockSingleIntIndexOutput.cs" />
     <Compile Include="Codecs\NestedPulsing\NestedPulsingPostingsFormat.cs" />
-    <Compile Include="Codecs\ramonly\RAMOnlyPostingsFormat.cs">
+    <Compile Include="Codecs\RAMOnly\RAMOnlyPostingsFormat.cs">
       <SubType>Code</SubType>
     </Compile>
     <Compile Include="Codecs\TestCodecFactory.cs" />


[60/72] [abbrv] lucenenet git commit: Lucene.Net.TestFramework: Renamed Codecs\lucene41\ to Codecs\Lucene41\

Posted by ni...@apache.org.
Lucene.Net.TestFramework: Renamed Codecs\lucene41\ to Codecs\Lucene41\


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/9138d1bf
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/9138d1bf
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/9138d1bf

Branch: refs/heads/api-work
Commit: 9138d1bf47f9c0bf81f6169084a721ceb275f7a1
Parents: c0e9469
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 26 03:14:41 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Feb 27 06:17:57 2017 +0700

----------------------------------------------------------------------
 .../Codecs/Lucene41/Lucene41RWCodec.cs          | 113 +++++++++++++++++++
 .../Codecs/lucene41/Lucene41RWCodec.cs          | 113 -------------------
 .../Lucene.Net.TestFramework.csproj             |   2 +-
 3 files changed, 114 insertions(+), 114 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/9138d1bf/src/Lucene.Net.TestFramework/Codecs/Lucene41/Lucene41RWCodec.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene41/Lucene41RWCodec.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene41/Lucene41RWCodec.cs
new file mode 100644
index 0000000..2c6edef
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene41/Lucene41RWCodec.cs
@@ -0,0 +1,113 @@
+namespace Lucene.Net.Codecs.Lucene41
+{
+    using Lucene40FieldInfosFormat = Lucene.Net.Codecs.Lucene40.Lucene40FieldInfosFormat;
+    using Lucene40FieldInfosWriter = Lucene.Net.Codecs.Lucene40.Lucene40FieldInfosWriter;
+    using Lucene40RWDocValuesFormat = Lucene.Net.Codecs.Lucene40.Lucene40RWDocValuesFormat;
+    using Lucene40RWNormsFormat = Lucene.Net.Codecs.Lucene40.Lucene40RWNormsFormat;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Read-write version of <seealso cref="Lucene41Codec"/> for testing.
+    /// </summary>
+#pragma warning disable 612, 618
+    public class Lucene41RWCodec : Lucene41Codec
+    {
+        private readonly StoredFieldsFormat FieldsFormat = new Lucene41StoredFieldsFormat();
+        private readonly FieldInfosFormat fieldInfos;
+        private readonly DocValuesFormat DocValues;
+        private readonly NormsFormat Norms;
+        private readonly bool _oldFormatImpersonationIsActive;
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Creates the codec with OldFormatImpersonationIsActive = true.
+        /// </summary>
+        /// <remarks>
+        /// Added so that SPIClassIterator can locate this Codec.  The iterator
+        /// only recognises classes that have empty constructors.
+        /// </remarks>
+        public Lucene41RWCodec()
+            : this(true)
+        { }
+
+        /// <param name="oldFormatImpersonationIsActive">
+        /// LUCENENET specific
+        /// Added to remove dependency on then-static <see cref="LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/> 
+        /// </param>
+        public Lucene41RWCodec(bool oldFormatImpersonationIsActive) : base()
+        {
+            _oldFormatImpersonationIsActive = oldFormatImpersonationIsActive;
+
+            Norms = new Lucene40RWNormsFormat(oldFormatImpersonationIsActive);
+            fieldInfos = new Lucene40FieldInfosFormatAnonymousInnerClassHelper(oldFormatImpersonationIsActive);
+            DocValues = new Lucene40RWDocValuesFormat(oldFormatImpersonationIsActive);
+        }
+
+        private class Lucene40FieldInfosFormatAnonymousInnerClassHelper : Lucene40FieldInfosFormat
+        {
+            private readonly bool _oldFormatImpersonationIsActive;
+
+            /// <param name="oldFormatImpersonationIsActive">
+            /// LUCENENET specific
+            /// Added to remove dependency on then-static <see cref="LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/> 
+            /// </param>
+            public Lucene40FieldInfosFormatAnonymousInnerClassHelper(bool oldFormatImpersonationIsActive) : base()
+            {
+                _oldFormatImpersonationIsActive = oldFormatImpersonationIsActive;
+            }
+
+            public override FieldInfosWriter FieldInfosWriter
+            {
+                get
+                {
+                    if (!_oldFormatImpersonationIsActive)
+                    {
+                        return base.FieldInfosWriter;
+                    }
+                    else
+                    {
+                        return new Lucene40FieldInfosWriter();
+                    }
+                }
+            }
+        }
+
+        public override FieldInfosFormat FieldInfosFormat
+        {
+            get { return fieldInfos; }
+        }
+
+        public override StoredFieldsFormat StoredFieldsFormat
+        {
+            get { return FieldsFormat; }
+        }
+
+        public override DocValuesFormat DocValuesFormat
+        {
+            get { return DocValues; }
+        }
+
+        public override NormsFormat NormsFormat
+        {
+            get { return Norms; }
+        }
+    }
+#pragma warning restore 612, 618
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/9138d1bf/src/Lucene.Net.TestFramework/Codecs/lucene41/Lucene41RWCodec.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/lucene41/Lucene41RWCodec.cs b/src/Lucene.Net.TestFramework/Codecs/lucene41/Lucene41RWCodec.cs
deleted file mode 100644
index 2c6edef..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/lucene41/Lucene41RWCodec.cs
+++ /dev/null
@@ -1,113 +0,0 @@
-namespace Lucene.Net.Codecs.Lucene41
-{
-    using Lucene40FieldInfosFormat = Lucene.Net.Codecs.Lucene40.Lucene40FieldInfosFormat;
-    using Lucene40FieldInfosWriter = Lucene.Net.Codecs.Lucene40.Lucene40FieldInfosWriter;
-    using Lucene40RWDocValuesFormat = Lucene.Net.Codecs.Lucene40.Lucene40RWDocValuesFormat;
-    using Lucene40RWNormsFormat = Lucene.Net.Codecs.Lucene40.Lucene40RWNormsFormat;
-    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
-
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    /// <summary>
-    /// Read-write version of <seealso cref="Lucene41Codec"/> for testing.
-    /// </summary>
-#pragma warning disable 612, 618
-    public class Lucene41RWCodec : Lucene41Codec
-    {
-        private readonly StoredFieldsFormat FieldsFormat = new Lucene41StoredFieldsFormat();
-        private readonly FieldInfosFormat fieldInfos;
-        private readonly DocValuesFormat DocValues;
-        private readonly NormsFormat Norms;
-        private readonly bool _oldFormatImpersonationIsActive;
-
-        /// <summary>
-        /// LUCENENET specific
-        /// Creates the codec with OldFormatImpersonationIsActive = true.
-        /// </summary>
-        /// <remarks>
-        /// Added so that SPIClassIterator can locate this Codec.  The iterator
-        /// only recognises classes that have empty constructors.
-        /// </remarks>
-        public Lucene41RWCodec()
-            : this(true)
-        { }
-
-        /// <param name="oldFormatImpersonationIsActive">
-        /// LUCENENET specific
-        /// Added to remove dependency on then-static <see cref="LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/> 
-        /// </param>
-        public Lucene41RWCodec(bool oldFormatImpersonationIsActive) : base()
-        {
-            _oldFormatImpersonationIsActive = oldFormatImpersonationIsActive;
-
-            Norms = new Lucene40RWNormsFormat(oldFormatImpersonationIsActive);
-            fieldInfos = new Lucene40FieldInfosFormatAnonymousInnerClassHelper(oldFormatImpersonationIsActive);
-            DocValues = new Lucene40RWDocValuesFormat(oldFormatImpersonationIsActive);
-        }
-
-        private class Lucene40FieldInfosFormatAnonymousInnerClassHelper : Lucene40FieldInfosFormat
-        {
-            private readonly bool _oldFormatImpersonationIsActive;
-
-            /// <param name="oldFormatImpersonationIsActive">
-            /// LUCENENET specific
-            /// Added to remove dependency on then-static <see cref="LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/> 
-            /// </param>
-            public Lucene40FieldInfosFormatAnonymousInnerClassHelper(bool oldFormatImpersonationIsActive) : base()
-            {
-                _oldFormatImpersonationIsActive = oldFormatImpersonationIsActive;
-            }
-
-            public override FieldInfosWriter FieldInfosWriter
-            {
-                get
-                {
-                    if (!_oldFormatImpersonationIsActive)
-                    {
-                        return base.FieldInfosWriter;
-                    }
-                    else
-                    {
-                        return new Lucene40FieldInfosWriter();
-                    }
-                }
-            }
-        }
-
-        public override FieldInfosFormat FieldInfosFormat
-        {
-            get { return fieldInfos; }
-        }
-
-        public override StoredFieldsFormat StoredFieldsFormat
-        {
-            get { return FieldsFormat; }
-        }
-
-        public override DocValuesFormat DocValuesFormat
-        {
-            get { return DocValues; }
-        }
-
-        public override NormsFormat NormsFormat
-        {
-            get { return Norms; }
-        }
-    }
-#pragma warning restore 612, 618
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/9138d1bf/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj b/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
index f23c9e2..64cf3e5 100644
--- a/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
+++ b/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
@@ -229,7 +229,7 @@
       <SubType>Code</SubType>
     </Compile>
     <Compile Include="Codecs\Lucene41Ords\Lucene41WithOrds.cs" />
-    <Compile Include="Codecs\lucene41\Lucene41RWCodec.cs">
+    <Compile Include="Codecs\Lucene41\Lucene41RWCodec.cs">
       <SubType>Code</SubType>
     </Compile>
     <Compile Include="Codecs\lucene42\Lucene42DocValuesConsumer.cs">


[42/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility3x.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility3x.cs b/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility3x.cs
new file mode 100644
index 0000000..c5de1e4
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility3x.cs
@@ -0,0 +1,1053 @@
+using Lucene.Net.Randomized.Generators;
+using Lucene.Net.Support;
+using Lucene.Net.Util;
+using NUnit.Framework;
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.IO;
+
+namespace Lucene.Net.Index
+{
+
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    using FileInfo = System.IO.FileInfo;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using BinaryDocValuesField = Lucene.Net.Documents.BinaryDocValuesField;
+    using Document = Lucene.Net.Documents.Document;
+    using DoubleDocValuesField = Lucene.Net.Documents.DoubleDocValuesField;
+    using Field = Lucene.Net.Documents.Field;
+    using FieldType = Lucene.Net.Documents.FieldType;
+    using SingleDocValuesField = Lucene.Net.Documents.SingleDocValuesField;
+    using Int32Field = Lucene.Net.Documents.Int32Field;
+    using Int64Field = Lucene.Net.Documents.Int64Field;
+    using NumericDocValuesField = Lucene.Net.Documents.NumericDocValuesField;
+    using SortedDocValuesField = Lucene.Net.Documents.SortedDocValuesField;
+    using StringField = Lucene.Net.Documents.StringField;
+    using TextField = Lucene.Net.Documents.TextField;
+    //using IndexOptions = Lucene.Net.Index.IndexOptions;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using FieldCache = Lucene.Net.Search.FieldCache;
+    using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+    using NumericRangeQuery = Lucene.Net.Search.NumericRangeQuery;
+    using PhraseQuery = Lucene.Net.Search.PhraseQuery;
+    using ScoreDoc = Lucene.Net.Search.ScoreDoc;
+    using TermQuery = Lucene.Net.Search.TermQuery;
+    using TopDocs = Lucene.Net.Search.TopDocs;
+    using BaseDirectoryWrapper = Lucene.Net.Store.BaseDirectoryWrapper;
+    using Directory = Lucene.Net.Store.Directory;
+    using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+    using IBits = Lucene.Net.Util.IBits;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Constants = Lucene.Net.Util.Constants;
+    using StringHelper = Lucene.Net.Util.StringHelper;
+    using SuppressCodecs = Lucene.Net.Util.LuceneTestCase.SuppressCodecs;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    /*
+      Verify we can read the pre-4.0 file format, do searches
+      against it, and add documents to it.
+    */
+    // don't use 3.x codec, its unrealistic since it means
+    // we won't even be running the actual code, only the impostor
+    // Sep codec cannot yet handle the offsets we add when changing indexes!
+    [SuppressCodecs("Lucene3x", "MockFixedIntBlock", "MockVariableIntBlock", "MockSep", "MockRandom", "Lucene40", "Lucene41", "Appending", "Lucene42", "Lucene45")]
+    [TestFixture]
+    public class TestBackwardsCompatibility3x : LuceneTestCase
+    {
+
+        // Uncomment these cases & run them on an older Lucene
+        // version, to generate an index to test backwards
+        // compatibility.  Then, cd to build/test/index.cfs and
+        // run "zip index.<VERSION>.cfs.zip *"; cd to
+        // build/test/index.nocfs and run "zip
+        // index.<VERSION>.nocfs.zip *".  Then move those 2 zip
+        // files to your trunk checkout and add them to the
+        // oldNames array.
+
+        /*
+        public void testCreateCFS() throws IOException {
+          createIndex("index.cfs", true, false);
+        }
+	
+        public void testCreateNoCFS() throws IOException {
+          createIndex("index.nocfs", false, false);
+        }
+        */
+
+        /*
+          // These are only needed for the special upgrade test to verify
+          // that also single-segment indexes are correctly upgraded by IndexUpgrader.
+          // You don't need them to be build for non-3.1 (the test is happy with just one
+          // "old" segment format, version is unimportant:
+	  
+          public void testCreateSingleSegmentCFS() throws IOException {
+            createIndex("index.singlesegment.cfs", true, true);
+          }
+	
+          public void testCreateSingleSegmentNoCFS() throws IOException {
+            createIndex("index.singlesegment.nocfs", false, true);
+          }
+	
+        */
+
+        // LUCENENET specific to load resources for this type
+        internal const string CURRENT_RESOURCE_DIRECTORY = "Lucene.Net.Tests.Index.";
+
+        internal static readonly string[] OldNames = new string[] {
+            "30.cfs", "30.nocfs", "31.cfs", "31.nocfs", "32.cfs",
+            "32.nocfs", "34.cfs", "34.nocfs"
+        };
+
+        internal readonly string[] UnsupportedNames = new string[] {
+            "19.cfs", "19.nocfs", "20.cfs", "20.nocfs", "21.cfs",
+            "21.nocfs", "22.cfs", "22.nocfs", "23.cfs", "23.nocfs",
+            "24.cfs", "24.nocfs", "29.cfs", "29.nocfs"
+        };
+
+        internal static readonly string[] OldSingleSegmentNames = new string[] {
+            "31.optimized.cfs", "31.optimized.nocfs"
+        };
+
+        internal static IDictionary<string, Directory> OldIndexDirs;
+
+        [OneTimeSetUp]
+        public void BeforeClass()
+        {
+            Assert.IsFalse(OLD_FORMAT_IMPERSONATION_IS_ACTIVE, "test infra is broken!");
+            IList<string> names = new List<string>(OldNames.Length + OldSingleSegmentNames.Length);
+            names.AddRange(Arrays.AsList(OldNames));
+            names.AddRange(Arrays.AsList(OldSingleSegmentNames));
+            OldIndexDirs = new Dictionary<string, Directory>();
+            foreach (string name in names)
+            {
+                DirectoryInfo dir = CreateTempDir(name);
+                using (Stream zipFileStream = this.GetType().Assembly.GetManifestResourceStream(CURRENT_RESOURCE_DIRECTORY + "index." + name + ".zip"))
+                {
+                    TestUtil.Unzip(zipFileStream, dir);
+                }
+                OldIndexDirs[name] = NewFSDirectory(dir);
+            }
+        }
+
+        [OneTimeTearDown]
+        public void AfterClass()
+        {
+            foreach (Directory d in OldIndexDirs.Values)
+            {
+                d.Dispose();
+            }
+            OldIndexDirs = null;
+            base.TearDown();
+        }
+
+        public override void TearDown()
+        {
+            // LUCENENET: We don't want our temp directory deleted until after
+            // all of the tests in the class run. So we need to override this and
+            // call base.TearDown() manually during TestFixtureTearDown
+        }
+
+        /// <summary>
+        /// this test checks that *only* IndexFormatTooOldExceptions are thrown when you open and operate on too old indexes! </summary>
+        [Test]
+        public virtual void TestUnsupportedOldIndexes()
+        {
+            for (int i = 0; i < UnsupportedNames.Length; i++)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: index " + UnsupportedNames[i]);
+                }
+                DirectoryInfo oldIndexDir = CreateTempDir(UnsupportedNames[i]);
+                using (Stream dataFile = this.GetType().Assembly.GetManifestResourceStream(CURRENT_RESOURCE_DIRECTORY + "unsupported." + UnsupportedNames[i] + ".zip"))
+                {
+                    TestUtil.Unzip(dataFile, oldIndexDir);
+                }
+                BaseDirectoryWrapper dir = NewFSDirectory(oldIndexDir);
+                // don't checkindex, these are intentionally not supported
+                dir.CheckIndexOnClose = false;
+
+                IndexReader reader = null;
+                IndexWriter writer = null;
+                try
+                {
+                    reader = DirectoryReader.Open(dir);
+                    Assert.Fail("DirectoryReader.open should not pass for " + UnsupportedNames[i]);
+                }
+#pragma warning disable 168
+                catch (IndexFormatTooOldException e)
+#pragma warning restore 168
+                {
+                    // pass
+                }
+                finally
+                {
+                    if (reader != null)
+                    {
+                        reader.Dispose();
+                    }
+                    reader = null;
+                }
+
+                try
+                {
+                    writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+                    Assert.Fail("IndexWriter creation should not pass for " + UnsupportedNames[i]);
+                }
+                catch (IndexFormatTooOldException e)
+                {
+                    // pass
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("TEST: got expected exc:");
+                        Console.WriteLine(e.StackTrace);
+                    }
+                    // Make sure exc message includes a path=
+                    Assert.IsTrue(e.Message.IndexOf("path=\"") != -1, "got exc message: " + e.Message);
+                }
+                finally
+                {
+                    // we should fail to open IW, and so it should be null when we get here.
+                    // However, if the test fails (i.e., IW did not fail on open), we need
+                    // to close IW. However, if merges are run, IW may throw
+                    // IndexFormatTooOldException, and we don't want to mask the Assert.Fail()
+                    // above, so close without waiting for merges.
+                    if (writer != null)
+                    {
+                        writer.Dispose(false);
+                    }
+                    writer = null;
+                }
+
+                MemoryStream bos = new MemoryStream(1024);
+                CheckIndex checker = new CheckIndex(dir);
+#pragma warning disable 612, 618
+                checker.InfoStream = new StreamWriter(bos.ToString(), false, IOUtils.CHARSET_UTF_8);
+#pragma warning restore 612, 618
+                CheckIndex.Status indexStatus = checker.DoCheckIndex();
+                Assert.IsFalse(indexStatus.Clean);
+                Assert.IsTrue(bos.ToString().Contains(typeof(IndexFormatTooOldException).Name));
+
+                dir.Dispose();
+            }
+        }
+
+        [Test]
+        public virtual void TestFullyMergeOldIndex()
+        {
+            foreach (string name in OldNames)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("\nTEST: index=" + name);
+                }
+                Directory dir = NewDirectory(OldIndexDirs[name]);
+                IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+                w.ForceMerge(1);
+                w.Dispose();
+
+                dir.Dispose();
+            }
+        }
+
+        [Test]
+        public virtual void TestAddOldIndexes()
+        {
+            foreach (string name in OldNames)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("\nTEST: old index " + name);
+                }
+                Directory targetDir = NewDirectory();
+                IndexWriter w = new IndexWriter(targetDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+                w.AddIndexes(OldIndexDirs[name]);
+                if (VERBOSE)
+                {
+                    Console.WriteLine("\nTEST: done adding indices; now close");
+                }
+                w.Dispose();
+
+                targetDir.Dispose();
+            }
+        }
+
+        [Test]
+        public virtual void TestAddOldIndexesReader()
+        {
+            foreach (string name in OldNames)
+            {
+                IndexReader reader = DirectoryReader.Open(OldIndexDirs[name]);
+
+                Directory targetDir = NewDirectory();
+                IndexWriter w = new IndexWriter(targetDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+                w.AddIndexes(reader);
+                w.Dispose();
+                reader.Dispose();
+
+                targetDir.Dispose();
+            }
+        }
+
+        [Test]
+        public virtual void TestSearchOldIndex()
+        {
+            foreach (string name in OldNames)
+            {
+                SearchIndex(OldIndexDirs[name], name);
+            }
+        }
+
+        [Test]
+        public virtual void TestIndexOldIndexNoAdds()
+        {
+            foreach (string name in OldNames)
+            {
+                Directory dir = NewDirectory(OldIndexDirs[name]);
+                ChangeIndexNoAdds(Random(), dir);
+                dir.Dispose();
+            }
+        }
+
+        [Test]
+        public virtual void TestIndexOldIndex()
+        {
+            foreach (string name in OldNames)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: oldName=" + name);
+                }
+                Directory dir = NewDirectory(OldIndexDirs[name]);
+                ChangeIndexWithAdds(Random(), dir, name);
+                dir.Dispose();
+            }
+        }
+
+        /// @deprecated 3.x transition mechanism 
+        [Obsolete("3.x transition mechanism")]
+        [Test]
+        public virtual void TestDeleteOldIndex()
+        {
+            foreach (string name in OldNames)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: oldName=" + name);
+                }
+
+                // Try one delete:
+                Directory dir = NewDirectory(OldIndexDirs[name]);
+
+                IndexReader ir = DirectoryReader.Open(dir);
+                Assert.AreEqual(35, ir.NumDocs);
+                ir.Dispose();
+
+                IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, null));
+                iw.DeleteDocuments(new Term("id", "3"));
+                iw.Dispose();
+
+                ir = DirectoryReader.Open(dir);
+                Assert.AreEqual(34, ir.NumDocs);
+                ir.Dispose();
+
+                // Delete all but 1 document:
+                iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, null));
+                for (int i = 0; i < 35; i++)
+                {
+                    iw.DeleteDocuments(new Term("id", "" + i));
+                }
+
+                // Verify NRT reader takes:
+                ir = DirectoryReader.Open(iw, true);
+                iw.Dispose();
+
+                Assert.AreEqual(1, ir.NumDocs, "index " + name);
+                ir.Dispose();
+
+                // Verify non-NRT reader takes:
+                ir = DirectoryReader.Open(dir);
+                Assert.AreEqual(1, ir.NumDocs, "index " + name);
+                ir.Dispose();
+
+                dir.Dispose();
+            }
+        }
+
+        private void DoTestHits(ScoreDoc[] hits, int expectedCount, IndexReader reader)
+        {
+            int hitCount = hits.Length;
+            Assert.AreEqual(expectedCount, hitCount, "wrong number of hits");
+            for (int i = 0; i < hitCount; i++)
+            {
+                reader.Document(hits[i].Doc);
+                reader.GetTermVectors(hits[i].Doc);
+            }
+        }
+
+        public virtual void SearchIndex(Directory dir, string oldName)
+        {
+            //QueryParser parser = new QueryParser("contents", new MockAnalyzer(random));
+            //Query query = parser.parse("handle:1");
+
+            IndexReader reader = DirectoryReader.Open(dir);
+            IndexSearcher searcher = new IndexSearcher(reader);
+
+            TestUtil.CheckIndex(dir);
+
+            // true if this is a 4.0+ index
+            bool is40Index = MultiFields.GetMergedFieldInfos(reader).FieldInfo("content5") != null;
+
+            IBits liveDocs = MultiFields.GetLiveDocs(reader);
+
+            for (int i = 0; i < 35; i++)
+            {
+                if (liveDocs.Get(i))
+                {
+                    Document d = reader.Document(i);
+                    IList<IIndexableField> fields = d.Fields;
+                    bool isProxDoc = d.GetField("content3") == null;
+                    if (isProxDoc)
+                    {
+                        int numFields = is40Index ? 7 : 5;
+                        Assert.AreEqual(numFields, fields.Count);
+                        IIndexableField f = d.GetField("id");
+                        Assert.AreEqual("" + i, f.GetStringValue());
+
+                        f = d.GetField("utf8");
+                        Assert.AreEqual("Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", f.GetStringValue());
+
+                        f = d.GetField("autf8");
+                        Assert.AreEqual("Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", f.GetStringValue());
+
+                        f = d.GetField("content2");
+                        Assert.AreEqual("here is more content with aaa aaa aaa", f.GetStringValue());
+
+                        f = d.GetField("fie\u2C77ld");
+                        Assert.AreEqual("field with non-ascii name", f.GetStringValue());
+                    }
+
+                    Fields tfvFields = reader.GetTermVectors(i);
+                    Assert.IsNotNull(tfvFields, "i=" + i);
+                    Terms tfv = tfvFields.GetTerms("utf8");
+                    Assert.IsNotNull(tfv, "docID=" + i + " index=" + oldName);
+                }
+                else
+                {
+                    // Only ID 7 is deleted
+                    Assert.AreEqual(7, i);
+                }
+            }
+
+            if (is40Index)
+            {
+                // check docvalues fields
+                NumericDocValues dvByte = MultiDocValues.GetNumericValues(reader, "dvByte");
+                BinaryDocValues dvBytesDerefFixed = MultiDocValues.GetBinaryValues(reader, "dvBytesDerefFixed");
+                BinaryDocValues dvBytesDerefVar = MultiDocValues.GetBinaryValues(reader, "dvBytesDerefVar");
+                SortedDocValues dvBytesSortedFixed = MultiDocValues.GetSortedValues(reader, "dvBytesSortedFixed");
+                SortedDocValues dvBytesSortedVar = MultiDocValues.GetSortedValues(reader, "dvBytesSortedVar");
+                BinaryDocValues dvBytesStraightFixed = MultiDocValues.GetBinaryValues(reader, "dvBytesStraightFixed");
+                BinaryDocValues dvBytesStraightVar = MultiDocValues.GetBinaryValues(reader, "dvBytesStraightVar");
+                NumericDocValues dvDouble = MultiDocValues.GetNumericValues(reader, "dvDouble");
+                NumericDocValues dvFloat = MultiDocValues.GetNumericValues(reader, "dvFloat");
+                NumericDocValues dvInt = MultiDocValues.GetNumericValues(reader, "dvInt");
+                NumericDocValues dvLong = MultiDocValues.GetNumericValues(reader, "dvLong");
+                NumericDocValues dvPacked = MultiDocValues.GetNumericValues(reader, "dvPacked");
+                NumericDocValues dvShort = MultiDocValues.GetNumericValues(reader, "dvShort");
+
+                for (int i = 0; i < 35; i++)
+                {
+                    int id = Convert.ToInt32(reader.Document(i).Get("id"));
+                    Assert.AreEqual(id, dvByte.Get(i));
+
+                    sbyte[] bytes = new sbyte[] { (sbyte)((int)((uint)id >> 24)), (sbyte)((int)((uint)id >> 16)), (sbyte)((int)((uint)id >> 8)), (sbyte)id };
+                    BytesRef expectedRef = new BytesRef((byte[])(Array)bytes);
+                    BytesRef scratch = new BytesRef();
+
+                    dvBytesDerefFixed.Get(i, scratch);
+                    Assert.AreEqual(expectedRef, scratch);
+                    dvBytesDerefVar.Get(i, scratch);
+                    Assert.AreEqual(expectedRef, scratch);
+                    dvBytesSortedFixed.Get(i, scratch);
+                    Assert.AreEqual(expectedRef, scratch);
+                    dvBytesSortedVar.Get(i, scratch);
+                    Assert.AreEqual(expectedRef, scratch);
+                    dvBytesStraightFixed.Get(i, scratch);
+                    Assert.AreEqual(expectedRef, scratch);
+                    dvBytesStraightVar.Get(i, scratch);
+                    Assert.AreEqual(expectedRef, scratch);
+
+                    Assert.AreEqual((double)id, BitConverter.Int64BitsToDouble(dvDouble.Get(i)), 0D);
+                    Assert.AreEqual((float)id, Number.Int32BitsToSingle((int)dvFloat.Get(i)), 0F);
+                    Assert.AreEqual(id, dvInt.Get(i));
+                    Assert.AreEqual(id, dvLong.Get(i));
+                    Assert.AreEqual(id, dvPacked.Get(i));
+                    Assert.AreEqual(id, dvShort.Get(i));
+                }
+            }
+
+            ScoreDoc[] hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).ScoreDocs;
+
+            // First document should be #21 since it's norm was
+            // increased:
+            Document d_ = searcher.IndexReader.Document(hits[0].Doc);
+            assertEquals("didn't get the right document first", "21", d_.Get("id"));
+
+            DoTestHits(hits, 34, searcher.IndexReader);
+
+            if (is40Index)
+            {
+                hits = searcher.Search(new TermQuery(new Term("content5", "aaa")), null, 1000).ScoreDocs;
+
+                DoTestHits(hits, 34, searcher.IndexReader);
+
+                hits = searcher.Search(new TermQuery(new Term("content6", "aaa")), null, 1000).ScoreDocs;
+
+                DoTestHits(hits, 34, searcher.IndexReader);
+            }
+
+            hits = searcher.Search(new TermQuery(new Term("utf8", "\u0000")), null, 1000).ScoreDocs;
+            Assert.AreEqual(34, hits.Length);
+            hits = searcher.Search(new TermQuery(new Term("utf8", "Lu\uD834\uDD1Ece\uD834\uDD60ne")), null, 1000).ScoreDocs;
+            Assert.AreEqual(34, hits.Length);
+            hits = searcher.Search(new TermQuery(new Term("utf8", "ab\ud917\udc17cd")), null, 1000).ScoreDocs;
+            Assert.AreEqual(34, hits.Length);
+
+            reader.Dispose();
+        }
+
+        private int Compare(string name, string v)
+        {
+            int v0 = Convert.ToInt32(name.Substring(0, 2));
+            int v1 = Convert.ToInt32(v);
+            return v0 - v1;
+        }
+
+        public virtual void ChangeIndexWithAdds(Random random, Directory dir, string origOldName)
+        {
+            // open writer
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).SetOpenMode(OpenMode.APPEND));
+            // add 10 docs
+            for (int i = 0; i < 10; i++)
+            {
+                AddDoc(writer, 35 + i);
+            }
+
+            // make sure writer sees right total -- writer seems not to know about deletes in .del?
+            int expected;
+            if (Compare(origOldName, "24") < 0)
+            {
+                expected = 44;
+            }
+            else
+            {
+                expected = 45;
+            }
+            Assert.AreEqual(expected, writer.NumDocs, "wrong doc count");
+            writer.Dispose();
+
+            // make sure searching sees right # hits
+            IndexReader reader = DirectoryReader.Open(dir);
+            IndexSearcher searcher = new IndexSearcher(reader);
+            ScoreDoc[] hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).ScoreDocs;
+            Document d = searcher.IndexReader.Document(hits[0].Doc);
+            assertEquals("wrong first document", "21", d.Get("id"));
+            DoTestHits(hits, 44, searcher.IndexReader);
+            reader.Dispose();
+
+            // fully merge
+            writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).SetOpenMode(OpenMode.APPEND));
+            writer.ForceMerge(1);
+            writer.Dispose();
+
+            reader = DirectoryReader.Open(dir);
+            searcher = new IndexSearcher(reader);
+            hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).ScoreDocs;
+            Assert.AreEqual(44, hits.Length, "wrong number of hits");
+            d = searcher.Doc(hits[0].Doc);
+            DoTestHits(hits, 44, searcher.IndexReader);
+            Assert.AreEqual("wrong first document", "21", d.Get("id"));
+            reader.Dispose();
+        }
+
+        public virtual void ChangeIndexNoAdds(Random random, Directory dir)
+        {
+            // make sure searching sees right # hits
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            IndexSearcher searcher = new IndexSearcher(reader);
+            ScoreDoc[] hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).ScoreDocs;
+            Assert.AreEqual(34, hits.Length, "wrong number of hits");
+            Document d = searcher.Doc(hits[0].Doc);
+            assertEquals("wrong first document", "21", d.Get("id"));
+            reader.Dispose();
+
+            // fully merge
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).SetOpenMode(OpenMode.APPEND));
+            writer.ForceMerge(1);
+            writer.Dispose();
+
+            reader = DirectoryReader.Open(dir);
+            searcher = new IndexSearcher(reader);
+            hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).ScoreDocs;
+            Assert.AreEqual(34, hits.Length, "wrong number of hits");
+            DoTestHits(hits, 34, searcher.IndexReader);
+            reader.Dispose();
+        }
+
+        public virtual DirectoryInfo CreateIndex(string dirName, bool doCFS, bool fullyMerged)
+        {
+            // we use a real directory name that is not cleaned up, because this method is only used to create backwards indexes:
+            DirectoryInfo indexDir = new DirectoryInfo(Path.Combine("/tmp/4x/", dirName));
+            TestUtil.Rm(indexDir);
+            Directory dir = NewFSDirectory(indexDir);
+            LogByteSizeMergePolicy mp = new LogByteSizeMergePolicy();
+            mp.NoCFSRatio = doCFS ? 1.0 : 0.0;
+            mp.MaxCFSSegmentSizeMB = double.PositiveInfinity;
+            // TODO: remove randomness
+            IndexWriterConfig conf = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetMaxBufferedDocs(10).SetMergePolicy(mp).SetUseCompoundFile(doCFS);
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            for (int i = 0; i < 35; i++)
+            {
+                AddDoc(writer, i);
+            }
+            Assert.AreEqual(35, writer.MaxDoc, "wrong doc count");
+            if (fullyMerged)
+            {
+                writer.ForceMerge(1);
+            }
+            writer.Dispose();
+
+            if (!fullyMerged)
+            {
+                // open fresh writer so we get no prx file in the added segment
+                mp = new LogByteSizeMergePolicy();
+                mp.NoCFSRatio = doCFS ? 1.0 : 0.0;
+                // TODO: remove randomness
+                conf = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetMaxBufferedDocs(10).SetMergePolicy(mp).SetUseCompoundFile(doCFS);
+                writer = new IndexWriter(dir, conf);
+                AddNoProxDoc(writer);
+                writer.Dispose();
+
+                writer = new IndexWriter(dir, conf.SetMergePolicy(doCFS ? NoMergePolicy.COMPOUND_FILES : NoMergePolicy.NO_COMPOUND_FILES));
+                Term searchTerm = new Term("id", "7");
+                writer.DeleteDocuments(searchTerm);
+                writer.Dispose();
+            }
+
+            dir.Dispose();
+
+            return indexDir;
+        }
+
+        private void AddDoc(IndexWriter writer, int id)
+        {
+            Document doc = new Document();
+            doc.Add(new TextField("content", "aaa", Field.Store.NO));
+            doc.Add(new StringField("id", Convert.ToString(id), Field.Store.YES));
+            FieldType customType2 = new FieldType(TextField.TYPE_STORED);
+            customType2.StoreTermVectors = true;
+            customType2.StoreTermVectorPositions = true;
+            customType2.StoreTermVectorOffsets = true;
+            doc.Add(new Field("autf8", "Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", customType2));
+            doc.Add(new Field("utf8", "Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", customType2));
+            doc.Add(new Field("content2", "here is more content with aaa aaa aaa", customType2));
+            doc.Add(new Field("fie\u2C77ld", "field with non-ascii name", customType2));
+            // add numeric fields, to test if flex preserves encoding
+            doc.Add(new Int32Field("trieInt", id, Field.Store.NO));
+            doc.Add(new Int64Field("trieLong", (long)id, Field.Store.NO));
+            // add docvalues fields
+            doc.Add(new NumericDocValuesField("dvByte", (sbyte)id));
+            sbyte[] bytes = new sbyte[] { (sbyte)((int)((uint)id >> 24)), (sbyte)((int)((uint)id >> 16)), (sbyte)((int)((uint)id >> 8)), (sbyte)id };
+            BytesRef @ref = new BytesRef((byte[])(Array)bytes);
+            doc.Add(new BinaryDocValuesField("dvBytesDerefFixed", @ref));
+            doc.Add(new BinaryDocValuesField("dvBytesDerefVar", @ref));
+            doc.Add(new SortedDocValuesField("dvBytesSortedFixed", @ref));
+            doc.Add(new SortedDocValuesField("dvBytesSortedVar", @ref));
+            doc.Add(new BinaryDocValuesField("dvBytesStraightFixed", @ref));
+            doc.Add(new BinaryDocValuesField("dvBytesStraightVar", @ref));
+            doc.Add(new DoubleDocValuesField("dvDouble", (double)id));
+            doc.Add(new SingleDocValuesField("dvFloat", (float)id));
+            doc.Add(new NumericDocValuesField("dvInt", id));
+            doc.Add(new NumericDocValuesField("dvLong", id));
+            doc.Add(new NumericDocValuesField("dvPacked", id));
+            doc.Add(new NumericDocValuesField("dvShort", (short)id));
+            // a field with both offsets and term vectors for a cross-check
+            FieldType customType3 = new FieldType(TextField.TYPE_STORED);
+            customType3.StoreTermVectors = true;
+            customType3.StoreTermVectorPositions = true;
+            customType3.StoreTermVectorOffsets = true;
+            customType3.IndexOptions = IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS;
+            doc.Add(new Field("content5", "here is more content with aaa aaa aaa", customType3));
+            // a field that omits only positions
+            FieldType customType4 = new FieldType(TextField.TYPE_STORED);
+            customType4.StoreTermVectors = true;
+            customType4.StoreTermVectorPositions = false;
+            customType4.StoreTermVectorOffsets = true;
+            customType4.IndexOptions = IndexOptions.DOCS_AND_FREQS;
+            doc.Add(new Field("content6", "here is more content with aaa aaa aaa", customType4));
+            // TODO: 
+            //   index different norms types via similarity (we use a random one currently?!)
+            //   remove any analyzer randomness, explicitly add payloads for certain fields.
+            writer.AddDocument(doc);
+        }
+
+        private void AddNoProxDoc(IndexWriter writer)
+        {
+            Document doc = new Document();
+            FieldType customType = new FieldType(TextField.TYPE_STORED);
+            customType.IndexOptions = IndexOptions.DOCS_ONLY;
+            Field f = new Field("content3", "aaa", customType);
+            doc.Add(f);
+            FieldType customType2 = new FieldType();
+            customType2.IsStored = true;
+            customType2.IndexOptions = IndexOptions.DOCS_ONLY;
+            f = new Field("content4", "aaa", customType2);
+            doc.Add(f);
+            writer.AddDocument(doc);
+        }
+
+        private int CountDocs(DocsEnum docs)
+        {
+            int count = 0;
+            while ((docs.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS)
+            {
+                count++;
+            }
+            return count;
+        }
+
+        // flex: test basics of TermsEnum api on non-flex index
+        [Test]
+        public virtual void TestNextIntoWrongField()
+        {
+            foreach (string name in OldNames)
+            {
+                Directory dir = OldIndexDirs[name];
+                IndexReader r = DirectoryReader.Open(dir);
+                TermsEnum terms = MultiFields.GetFields(r).GetTerms("content").GetIterator(null);
+                BytesRef t = terms.Next();
+                Assert.IsNotNull(t);
+
+                // content field only has term aaa:
+                Assert.AreEqual("aaa", t.Utf8ToString());
+                Assert.IsNull(terms.Next());
+
+                BytesRef aaaTerm = new BytesRef("aaa");
+
+                // should be found exactly
+                Assert.AreEqual(TermsEnum.SeekStatus.FOUND, terms.SeekCeil(aaaTerm));
+                Assert.AreEqual(35, CountDocs(TestUtil.Docs(Random(), terms, null, null, 0)));
+                Assert.IsNull(terms.Next());
+
+                // should hit end of field
+                Assert.AreEqual(TermsEnum.SeekStatus.END, terms.SeekCeil(new BytesRef("bbb")));
+                Assert.IsNull(terms.Next());
+
+                // should seek to aaa
+                Assert.AreEqual(TermsEnum.SeekStatus.NOT_FOUND, terms.SeekCeil(new BytesRef("a")));
+                Assert.IsTrue(terms.Term.BytesEquals(aaaTerm));
+                Assert.AreEqual(35, CountDocs(TestUtil.Docs(Random(), terms, null, null, 0)));
+                Assert.IsNull(terms.Next());
+
+                Assert.AreEqual(TermsEnum.SeekStatus.FOUND, terms.SeekCeil(aaaTerm));
+                Assert.AreEqual(35, CountDocs(TestUtil.Docs(Random(), terms, null, null, 0)));
+                Assert.IsNull(terms.Next());
+
+                r.Dispose();
+            }
+        }
+
+        /// <summary>
+        /// Test that we didn't forget to bump the current Constants.LUCENE_MAIN_VERSION.
+        /// this is important so that we can determine which version of lucene wrote the segment.
+        /// </summary>
+        [Test]
+        public virtual void TestOldVersions()
+        {
+            // first create a little index with the current code and get the version
+            Directory currentDir = NewDirectory();
+            RandomIndexWriter riw = new RandomIndexWriter(Random(), currentDir, Similarity, TimeZone);
+            riw.AddDocument(new Document());
+            riw.Dispose();
+            DirectoryReader ir = DirectoryReader.Open(currentDir);
+            SegmentReader air = (SegmentReader)ir.Leaves[0].Reader;
+            string currentVersion = air.SegmentInfo.Info.Version;
+            Assert.IsNotNull(currentVersion); // only 3.0 segments can have a null version
+            ir.Dispose();
+            currentDir.Dispose();
+
+            IComparer<string> comparer = StringHelper.VersionComparer;
+
+            // now check all the old indexes, their version should be < the current version
+            foreach (string name in OldNames)
+            {
+                Directory dir = OldIndexDirs[name];
+                DirectoryReader r = DirectoryReader.Open(dir);
+                foreach (AtomicReaderContext context in r.Leaves)
+                {
+                    air = (SegmentReader)context.Reader;
+                    string oldVersion = air.SegmentInfo.Info.Version;
+                    // TODO: does preflex codec actually set "3.0" here? this is safe to do I think.
+                    // Assert.IsNotNull(oldVersion);
+                    Assert.IsTrue(oldVersion == null || comparer.Compare(oldVersion, currentVersion) < 0, "current Constants.LUCENE_MAIN_VERSION is <= an old index: did you forget to bump it?!");
+                }
+                r.Dispose();
+            }
+        }
+
+        [Test]
+        public virtual void TestNumericFields()
+        {
+            foreach (string name in OldNames)
+            {
+
+                Directory dir = OldIndexDirs[name];
+                IndexReader reader = DirectoryReader.Open(dir);
+                IndexSearcher searcher = new IndexSearcher(reader);
+
+                for (int id = 10; id < 15; id++)
+                {
+                    ScoreDoc[] hits = searcher.Search(NumericRangeQuery.NewInt32Range("trieInt", 4, Convert.ToInt32(id), Convert.ToInt32(id), true, true), 100).ScoreDocs;
+                    Assert.AreEqual(1, hits.Length, "wrong number of hits");
+                    Document d = searcher.Doc(hits[0].Doc);
+                    Assert.AreEqual(Convert.ToString(id), d.Get("id"));
+
+                    hits = searcher.Search(NumericRangeQuery.NewInt64Range("trieLong", 4, Convert.ToInt64(id), Convert.ToInt64(id), true, true), 100).ScoreDocs;
+                    Assert.AreEqual(1, hits.Length, "wrong number of hits");
+                    d = searcher.Doc(hits[0].Doc);
+                    Assert.AreEqual(Convert.ToString(id), d.Get("id"));
+                }
+
+                // check that also lower-precision fields are ok
+                ScoreDoc[] hits_ = searcher.Search(NumericRangeQuery.NewInt32Range("trieInt", 4, int.MinValue, int.MaxValue, false, false), 100).ScoreDocs;
+                Assert.AreEqual(34, hits_.Length, "wrong number of hits");
+
+                hits_ = searcher.Search(NumericRangeQuery.NewInt64Range("trieLong", 4, long.MinValue, long.MaxValue, false, false), 100).ScoreDocs;
+                Assert.AreEqual(34, hits_.Length, "wrong number of hits");
+
+                // check decoding into field cache
+                FieldCache.Int32s fci = FieldCache.DEFAULT.GetInt32s(SlowCompositeReaderWrapper.Wrap(searcher.IndexReader), "trieInt", false);
+                int maxDoc = searcher.IndexReader.MaxDoc;
+                for (int doc = 0; doc < maxDoc; doc++)
+                {
+                    int val = fci.Get(doc);
+                    Assert.IsTrue(val >= 0 && val < 35, "value in id bounds");
+                }
+
+                FieldCache.Int64s fcl = FieldCache.DEFAULT.GetInt64s(SlowCompositeReaderWrapper.Wrap(searcher.IndexReader), "trieLong", false);
+                for (int doc = 0; doc < maxDoc; doc++)
+                {
+                    long val = fcl.Get(doc);
+                    Assert.IsTrue(val >= 0L && val < 35L, "value in id bounds");
+                }
+
+                reader.Dispose();
+            }
+        }
+
+        private int CheckAllSegmentsUpgraded(Directory dir)
+        {
+            SegmentInfos infos = new SegmentInfos();
+            infos.Read(dir);
+            if (VERBOSE)
+            {
+                Console.WriteLine("checkAllSegmentsUpgraded: " + infos);
+            }
+            foreach (SegmentCommitInfo si in infos.Segments)
+            {
+                Assert.AreEqual(Constants.LUCENE_MAIN_VERSION, si.Info.Version);
+            }
+            return infos.Count;
+        }
+
+        private int GetNumberOfSegments(Directory dir)
+        {
+            SegmentInfos infos = new SegmentInfos();
+            infos.Read(dir);
+            return infos.Count;
+        }
+
+        [Test]
+        public virtual void TestUpgradeOldIndex()
+        {
+            IList<string> names = new List<string>(OldNames.Length + OldSingleSegmentNames.Length);
+            names.AddRange(Arrays.AsList(OldNames));
+            names.AddRange(Arrays.AsList(OldSingleSegmentNames));
+            foreach (string name in names)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("testUpgradeOldIndex: index=" + name);
+                }
+                Directory dir = NewDirectory(OldIndexDirs[name]);
+
+                (new IndexUpgrader(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, null), false)).Upgrade();
+
+                CheckAllSegmentsUpgraded(dir);
+
+                dir.Dispose();
+            }
+        }
+
+        [Test]
+        public virtual void TestUpgradeOldSingleSegmentIndexWithAdditions()
+        {
+            foreach (string name in OldSingleSegmentNames)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("testUpgradeOldSingleSegmentIndexWithAdditions: index=" + name);
+                }
+                Directory dir = NewDirectory(OldIndexDirs[name]);
+
+                Assert.AreEqual(1, GetNumberOfSegments(dir), "Original index must be single segment");
+
+                // create a bunch of dummy segments
+                int id = 40;
+                RAMDirectory ramDir = new RAMDirectory();
+                for (int i = 0; i < 3; i++)
+                {
+                    // only use Log- or TieredMergePolicy, to make document addition predictable and not suddenly merge:
+                    MergePolicy mp = Random().NextBoolean() ? (MergePolicy)NewLogMergePolicy() : NewTieredMergePolicy();
+                    IndexWriterConfig iwc = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetMergePolicy(mp);
+                    IndexWriter w = new IndexWriter(ramDir, iwc);
+                    // add few more docs:
+                    for (int j = 0; j < RANDOM_MULTIPLIER * Random().Next(30); j++)
+                    {
+                        AddDoc(w, id++);
+                    }
+                    w.Dispose(false);
+                }
+
+                // add dummy segments (which are all in current
+                // version) to single segment index
+                MergePolicy mp_ = Random().NextBoolean() ? (MergePolicy)NewLogMergePolicy() : NewTieredMergePolicy();
+                IndexWriterConfig iwc_ = (new IndexWriterConfig(TEST_VERSION_CURRENT, null)).SetMergePolicy(mp_);
+                IndexWriter w_ = new IndexWriter(dir, iwc_);
+                w_.AddIndexes(ramDir);
+                w_.Dispose(false);
+
+                // determine count of segments in modified index
+                int origSegCount = GetNumberOfSegments(dir);
+
+                (new IndexUpgrader(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, null), false)).Upgrade();
+
+                int segCount = CheckAllSegmentsUpgraded(dir);
+                Assert.AreEqual(origSegCount, segCount, "Index must still contain the same number of segments, as only one segment was upgraded and nothing else merged");
+
+                dir.Dispose();
+            }
+        }
+
+        public const string SurrogatesIndexName = "index.36.surrogates.zip";
+
+        [Test]
+        public virtual void TestSurrogates()
+        {
+            DirectoryInfo oldIndexDir = CreateTempDir("surrogates");
+            using (Stream dataFile = this.GetType().Assembly.GetManifestResourceStream(CURRENT_RESOURCE_DIRECTORY + SurrogatesIndexName))
+            {
+                TestUtil.Unzip(dataFile, oldIndexDir);
+            }
+            Directory dir = NewFSDirectory(oldIndexDir);
+            // TODO: more tests
+            TestUtil.CheckIndex(dir);
+            dir.Dispose();
+        }
+
+        /* 
+         * Index with negative positions (LUCENE-1542)
+         * Created with this code, using a 2.4.0 jar, then upgraded with 3.6 upgrader:
+         *
+         * public class CreateBogusIndexes {
+         *   public static void main(String args[]) throws Exception {
+         *     Directory d = FSDirectory.getDirectory("/tmp/bogus24");
+         *     IndexWriter iw = new IndexWriter(d, new StandardAnalyzer());
+         *     Document doc = new Document();
+         *     Token brokenToken = new Token("broken", 0, 3);
+         *     brokenToken.setPositionIncrement(0);
+         *     Token okToken = new Token("ok", 0, 2);
+         *     doc.Add(new Field("field1", new CannedTokenStream(brokenToken), Field.TermVector.NO));
+         *     doc.Add(new Field("field2", new CannedTokenStream(brokenToken), Field.TermVector.WITH_POSITIONS));
+         *     doc.Add(new Field("field3", new CannedTokenStream(brokenToken, okToken), Field.TermVector.NO));
+         *     doc.Add(new Field("field4", new CannedTokenStream(brokenToken, okToken), Field.TermVector.WITH_POSITIONS));
+         *     iw.AddDocument(doc);
+         *     doc = new Document();
+         *     doc.Add(new Field("field1", "just more text, not broken", Field.Store.NO, Field.Index.ANALYZED));
+         *     doc.Add(new Field("field2", "just more text, not broken", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS));
+         *     doc.Add(new Field("field3", "just more text, not broken", Field.Store.NO, Field.Index.ANALYZED));
+         *     doc.Add(new Field("field4", "just more text, not broken", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS));
+         *     iw.AddDocument(doc);
+         *     iw.Dispose();
+         *     d.Dispose();
+         *   }
+         * 
+         *   static class CannedTokenStream extends TokenStream {
+         *     private final Token[] tokens;
+         *     private int upto = 0;
+         *  
+         *     CannedTokenStream(Token... tokens) {
+         *       this.tokens = tokens;
+         *     }
+         *  
+         *     @Override
+         *     public Token next() {
+         *       if (upto < tokens.Length) {
+         *         return tokens[upto++];
+         *       } else {
+         *         return null;
+         *       }
+         *     }
+         *   }
+         * }
+         */
+        public const string Bogus24IndexName = "bogus24.upgraded.to.36.zip";
+
+        [Test]
+        public virtual void TestNegativePositions()
+        {
+            DirectoryInfo oldIndexDir = CreateTempDir("negatives");
+            using (Stream dataFile = this.GetType().Assembly.GetManifestResourceStream(CURRENT_RESOURCE_DIRECTORY + Bogus24IndexName))
+            {
+                TestUtil.Unzip(dataFile, oldIndexDir);
+            }
+            Directory dir = NewFSDirectory(oldIndexDir);
+            DirectoryReader ir = DirectoryReader.Open(dir);
+            IndexSearcher @is = new IndexSearcher(ir);
+            PhraseQuery pq = new PhraseQuery();
+            pq.Add(new Term("field3", "more"));
+            pq.Add(new Term("field3", "text"));
+            TopDocs td = @is.Search(pq, 10);
+            Assert.AreEqual(1, td.TotalHits);
+            AtomicReader wrapper = SlowCompositeReaderWrapper.Wrap(ir);
+            DocsAndPositionsEnum de = wrapper.TermPositionsEnum(new Term("field3", "broken"));
+            Debug.Assert(de != null);
+            Assert.AreEqual(0, de.NextDoc());
+            Assert.AreEqual(0, de.NextPosition());
+            ir.Dispose();
+            TestUtil.CheckIndex(dir);
+            dir.Dispose();
+        }
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestBagOfPositions.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestBagOfPositions.cs b/src/Lucene.Net.Tests/Index/TestBagOfPositions.cs
new file mode 100644
index 0000000..070fa08
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestBagOfPositions.cs
@@ -0,0 +1,210 @@
+using System;
+using System.Collections.Concurrent;
+using System.Collections.Generic;
+using System.Text;
+using System.Threading;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Randomized.Generators;
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using System.Threading;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldType = FieldType;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    /// <summary>
+    /// Simple test that adds numeric terms, where each term has the
+    /// totalTermFreq of its integer value, and checks that the totalTermFreq is correct.
+    /// </summary>
+    // TODO: somehow factor this with BagOfPostings? its almost the same
+    [SuppressCodecs("Direct", "Memory", "Lucene3x")] // at night this makes like 200k/300k docs and will make Direct's heart beat!
+                                                     // Lucene3x doesnt have totalTermFreq, so the test isn't interesting there.
+    [TestFixture]
+    public class TestBagOfPositions : LuceneTestCase
+    
+    {
+        [Test]
+        public virtual void Test()
+        {
+            IList<string> postingsList = new List<string>();
+            int numTerms = AtLeast(300);
+            int maxTermsPerDoc = TestUtil.NextInt(Random(), 10, 20);
+            bool isSimpleText = "SimpleText".Equals(TestUtil.GetPostingsFormat("field"));
+
+            IndexWriterConfig iwc = NewIndexWriterConfig(Random(), TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+
+            if ((isSimpleText || iwc.MergePolicy is MockRandomMergePolicy) && (TEST_NIGHTLY || RANDOM_MULTIPLIER > 1))
+            {
+                // Otherwise test can take way too long (> 2 hours)
+                numTerms /= 2;
+            }
+            if (VERBOSE)
+            {
+                Console.WriteLine("maxTermsPerDoc=" + maxTermsPerDoc);
+                Console.WriteLine("numTerms=" + numTerms);
+            }
+            for (int i = 0; i < numTerms; i++)
+            {
+                string term = Convert.ToString(i);
+                for (int j = 0; j < i; j++)
+                {
+                    postingsList.Add(term);
+                }
+            }
+
+            Collections.Shuffle(postingsList);
+
+            ConcurrentQueue<string> postings = new ConcurrentQueue<string>(postingsList);
+
+            Directory dir = NewFSDirectory(CreateTempDir(GetFullMethodName()));
+
+            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwc);
+
+            int threadCount = TestUtil.NextInt(Random(), 1, 5);
+            if (VERBOSE)
+            {
+                Console.WriteLine("config: " + iw.w.Config);
+                Console.WriteLine("threadCount=" + threadCount);
+            }
+
+            Field prototype = NewTextField("field", "", Field.Store.NO);
+            FieldType fieldType = new FieldType((FieldType)prototype.FieldType);
+            if (Random().NextBoolean())
+            {
+                fieldType.OmitNorms = true;
+            }
+            int options = Random().Next(3);
+            if (options == 0)
+            {
+                fieldType.IndexOptions = IndexOptions.DOCS_AND_FREQS; // we dont actually need positions
+                fieldType.StoreTermVectors = true; // but enforce term vectors when we do this so we check SOMETHING
+            }
+            else if (options == 1 && !DoesntSupportOffsets.Contains(TestUtil.GetPostingsFormat("field")))
+            {
+                fieldType.IndexOptions = IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS;
+            }
+            // else just positions
+
+            ThreadClass[] threads = new ThreadClass[threadCount];
+            CountdownEvent startingGun = new CountdownEvent(1);
+
+            for (int threadID = 0; threadID < threadCount; threadID++)
+            {
+                Random threadRandom = new Random(Random().Next());
+                Document document = new Document();
+                Field field = new Field("field", "", fieldType);
+                document.Add(field);
+                threads[threadID] = new ThreadAnonymousInnerClassHelper(this, numTerms, maxTermsPerDoc, postings, iw, startingGun, threadRandom, document, field);
+                threads[threadID].Start();
+            }
+            startingGun.Signal();
+            foreach (ThreadClass t in threads)
+            {
+                t.Join();
+            }
+
+            iw.ForceMerge(1);
+            DirectoryReader ir = iw.Reader;
+            Assert.AreEqual(1, ir.Leaves.Count);
+            AtomicReader air = (AtomicReader)ir.Leaves[0].Reader;
+            Terms terms = air.Terms("field");
+            // numTerms-1 because there cannot be a term 0 with 0 postings:
+            Assert.AreEqual(numTerms - 1, terms.Count);
+            TermsEnum termsEnum = terms.GetIterator(null);
+            BytesRef termBR;
+            while ((termBR = termsEnum.Next()) != null)
+            {
+                int value = Convert.ToInt32(termBR.Utf8ToString());
+                Assert.AreEqual(value, termsEnum.TotalTermFreq);
+                // don't really need to check more than this, as CheckIndex
+                // will verify that totalTermFreq == total number of positions seen
+                // from a docsAndPositionsEnum.
+            }
+            ir.Dispose();
+            iw.Dispose();
+            dir.Dispose();
+        }
+
+        private class ThreadAnonymousInnerClassHelper : ThreadClass
+        {
+            private readonly TestBagOfPositions OuterInstance;
+
+            private int NumTerms;
+            private int MaxTermsPerDoc;
+            private ConcurrentQueue<string> Postings;
+            private RandomIndexWriter Iw;
+            private CountdownEvent StartingGun;
+            private Random ThreadRandom;
+            private Document Document;
+            private Field Field;
+
+            public ThreadAnonymousInnerClassHelper(TestBagOfPositions outerInstance, int numTerms, int maxTermsPerDoc, ConcurrentQueue<string> postings, RandomIndexWriter iw, CountdownEvent startingGun, Random threadRandom, Document document, Field field)
+            {
+                this.OuterInstance = outerInstance;
+                this.NumTerms = numTerms;
+                this.MaxTermsPerDoc = maxTermsPerDoc;
+                this.Postings = postings;
+                this.Iw = iw;
+                this.StartingGun = startingGun;
+                this.ThreadRandom = threadRandom;
+                this.Document = document;
+                this.Field = field;
+            }
+
+            public override void Run()
+            {
+                try
+                {
+                    StartingGun.Wait();
+                    while (!(Postings.Count == 0))
+                    {
+                        StringBuilder text = new StringBuilder();
+                        int numTerms = ThreadRandom.Next(MaxTermsPerDoc);
+                        for (int i = 0; i < numTerms; i++)
+                        {
+                            string token;
+                            if (!Postings.TryDequeue(out token))
+                            {
+                                break;
+                            }
+                            text.Append(' ');
+                            text.Append(token);
+                        }
+                        Field.SetStringValue(text.ToString());
+                        Iw.AddDocument(Document);
+                    }
+                }
+                catch (Exception e)
+                {
+                    throw new Exception(e.Message, e);
+                }
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestBagOfPostings.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestBagOfPostings.cs b/src/Lucene.Net.Tests/Index/TestBagOfPostings.cs
new file mode 100644
index 0000000..6e87c10
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestBagOfPostings.cs
@@ -0,0 +1,193 @@
+using System;
+using System.Collections.Concurrent;
+using System.Collections.Generic;
+using System.Text;
+using System.Threading;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using System.Threading;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using Lucene3xCodec = Lucene.Net.Codecs.Lucene3x.Lucene3xCodec;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    /// <summary>
+    /// Simple test that adds numeric terms, where each term has the
+    /// docFreq of its integer value, and checks that the docFreq is correct.
+    /// </summary>
+    [SuppressCodecs("Direct", "Memory")]
+    [TestFixture]
+    public class TestBagOfPostings : LuceneTestCase // at night this makes like 200k/300k docs and will make Direct's heart beat!
+    {
+        [Test]
+        public virtual void Test()
+        {
+            IList<string> postingsList = new List<string>();
+            int numTerms = AtLeast(300);
+            int maxTermsPerDoc = TestUtil.NextInt(Random(), 10, 20);
+
+            bool isSimpleText = "SimpleText".Equals(TestUtil.GetPostingsFormat("field"));
+
+            IndexWriterConfig iwc = NewIndexWriterConfig(Random(), TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+
+            if ((isSimpleText || iwc.MergePolicy is MockRandomMergePolicy) && (TEST_NIGHTLY || RANDOM_MULTIPLIER > 1))
+            {
+                // Otherwise test can take way too long (> 2 hours)
+                numTerms /= 2;
+            }
+
+            if (VERBOSE)
+            {
+                Console.WriteLine("maxTermsPerDoc=" + maxTermsPerDoc);
+                Console.WriteLine("numTerms=" + numTerms);
+            }
+
+            for (int i = 0; i < numTerms; i++)
+            {
+                string term = Convert.ToString(i);
+                for (int j = 0; j < i; j++)
+                {
+                    postingsList.Add(term);
+                }
+            }
+            Collections.Shuffle(postingsList);
+
+            ConcurrentQueue<string> postings = new ConcurrentQueue<string>(postingsList);
+
+            Directory dir = NewFSDirectory(CreateTempDir("bagofpostings"));
+            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwc);
+
+            int threadCount = TestUtil.NextInt(Random(), 1, 5);
+            if (VERBOSE)
+            {
+                Console.WriteLine("config: " + iw.w.Config);
+                Console.WriteLine("threadCount=" + threadCount);
+            }
+
+            ThreadClass[] threads = new ThreadClass[threadCount];
+            CountdownEvent startingGun = new CountdownEvent(1);
+
+            for (int threadID = 0; threadID < threadCount; threadID++)
+            {
+                threads[threadID] = new ThreadAnonymousInnerClassHelper(this, maxTermsPerDoc, postings, iw, startingGun);
+                threads[threadID].Start();
+            }
+            startingGun.Signal();
+            foreach (ThreadClass t in threads)
+            {
+                t.Join();
+            }
+
+            iw.ForceMerge(1);
+            DirectoryReader ir = iw.Reader;
+            Assert.AreEqual(1, ir.Leaves.Count);
+            AtomicReader air = (AtomicReader)ir.Leaves[0].Reader;
+            Terms terms = air.Terms("field");
+            // numTerms-1 because there cannot be a term 0 with 0 postings:
+#pragma warning disable 612, 618
+            Assert.AreEqual(numTerms - 1, air.Fields.UniqueTermCount);
+            if (iwc.Codec is Lucene3xCodec == false)
+#pragma warning restore 612, 618
+            {
+                Assert.AreEqual(numTerms - 1, terms.Count);
+            }
+            TermsEnum termsEnum = terms.GetIterator(null);
+            BytesRef term_;
+            while ((term_ = termsEnum.Next()) != null)
+            {
+                int value = Convert.ToInt32(term_.Utf8ToString());
+                Assert.AreEqual(value, termsEnum.DocFreq);
+                // don't really need to check more than this, as CheckIndex
+                // will verify that docFreq == actual number of documents seen
+                // from a docsAndPositionsEnum.
+            }
+            ir.Dispose();
+            iw.Dispose();
+            dir.Dispose();
+        }
+
+        private class ThreadAnonymousInnerClassHelper : ThreadClass
+        {
+            private readonly TestBagOfPostings OuterInstance;
+
+            private int MaxTermsPerDoc;
+            private ConcurrentQueue<string> Postings;
+            private RandomIndexWriter Iw;
+            private CountdownEvent StartingGun;
+
+            public ThreadAnonymousInnerClassHelper(TestBagOfPostings outerInstance, int maxTermsPerDoc, ConcurrentQueue<string> postings, RandomIndexWriter iw, CountdownEvent startingGun)
+            {
+                this.OuterInstance = outerInstance;
+                this.MaxTermsPerDoc = maxTermsPerDoc;
+                this.Postings = postings;
+                this.Iw = iw;
+                this.StartingGun = startingGun;
+            }
+
+            public override void Run()
+            {
+                try
+                {
+                    Document document = new Document();
+                    Field field = OuterInstance.NewTextField("field", "", Field.Store.NO);
+                    document.Add(field);
+                    StartingGun.Wait();
+                    while (!(Postings.Count == 0))
+                    {
+                        StringBuilder text = new StringBuilder();
+                        HashSet<string> visited = new HashSet<string>();
+                        for (int i = 0; i < MaxTermsPerDoc; i++)
+                        {
+                            string token;
+                            if (!Postings.TryDequeue(out token))
+                            {
+                                break;
+                            }
+                            if (visited.Contains(token))
+                            {
+                                // Put it back:
+                                Postings.Enqueue(token);
+                                break;
+                            }
+                            text.Append(' ');
+                            text.Append(token);
+                            visited.Add(token);
+                        }
+                        field.SetStringValue(text.ToString());
+                        Iw.AddDocument(document);
+                    }
+                }
+                catch (Exception e)
+                {
+                    throw new Exception(e.Message, e);
+                }
+            }
+        }
+    }
+}
\ No newline at end of file


[50/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Analysis/TestGraphTokenizers.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Analysis/TestGraphTokenizers.cs b/src/Lucene.Net.Tests/Analysis/TestGraphTokenizers.cs
new file mode 100644
index 0000000..73619d1
--- /dev/null
+++ b/src/Lucene.Net.Tests/Analysis/TestGraphTokenizers.cs
@@ -0,0 +1,728 @@
+using Lucene.Net.Analysis.TokenAttributes;
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.Text;
+
+namespace Lucene.Net.Analysis
+{
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using System.IO;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Automaton = Lucene.Net.Util.Automaton.Automaton;
+    using BasicAutomata = Lucene.Net.Util.Automaton.BasicAutomata;
+    using BasicOperations = Lucene.Net.Util.Automaton.BasicOperations;
+
+    [TestFixture]
+    public class TestGraphTokenizers : BaseTokenStreamTestCase
+    {
+        // Makes a graph TokenStream from the string; separate
+        // positions with single space, multiple tokens at the same
+        // position with /, and add optional position length with
+        // :.  EG "a b c" is a simple chain, "a/x b c" adds 'x'
+        // over 'a' at position 0 with posLen=1, "a/x:3 b c" adds
+        // 'x' over a with posLen=3.  Tokens are in normal-form!
+        // So, offsets are computed based on the first token at a
+        // given position.  NOTE: each token must be a single
+        // character!  We assume this when computing offsets...
+
+        // NOTE: all input tokens must be length 1!!!  this means
+        // you cannot turn on MockCharFilter when random
+        // testing...
+
+        private class GraphTokenizer : Tokenizer
+        {
+            internal IList<Token> Tokens;
+            internal int Upto;
+            internal int InputLength;
+
+            internal readonly ICharTermAttribute TermAtt;
+            internal readonly IOffsetAttribute OffsetAtt;
+            internal readonly IPositionIncrementAttribute PosIncrAtt;
+            internal readonly IPositionLengthAttribute PosLengthAtt;
+
+            public GraphTokenizer(TextReader input)
+                : base(input)
+            {
+                TermAtt = AddAttribute<ICharTermAttribute>();
+                OffsetAtt = AddAttribute<IOffsetAttribute>();
+                PosIncrAtt = AddAttribute<IPositionIncrementAttribute>();
+                PosLengthAtt = AddAttribute<IPositionLengthAttribute>();
+            }
+
+            public override void Reset()
+            {
+                base.Reset();
+                Tokens = null;
+                Upto = 0;
+            }
+
+            public sealed override bool IncrementToken()
+            {
+                if (Tokens == null)
+                {
+                    FillTokens();
+                }
+                //System.out.println("graphTokenizer: incr upto=" + upto + " vs " + tokens.size());
+                if (Upto == Tokens.Count)
+                {
+                    //System.out.println("  END @ " + tokens.size());
+                    return false;
+                }
+                Token t = Tokens[Upto++];
+                //System.out.println("  return token=" + t);
+                ClearAttributes();
+                TermAtt.Append(t.ToString());
+                OffsetAtt.SetOffset(t.StartOffset, t.EndOffset);
+                PosIncrAtt.PositionIncrement = t.PositionIncrement;
+                PosLengthAtt.PositionLength = t.PositionLength;
+                return true;
+            }
+
+            public override void End()
+            {
+                base.End();
+                // NOTE: somewhat... hackish, but we need this to
+                // satisfy BTSTC:
+                int lastOffset;
+                if (Tokens != null && Tokens.Count > 0)
+                {
+                    lastOffset = Tokens[Tokens.Count - 1].EndOffset;
+                }
+                else
+                {
+                    lastOffset = 0;
+                }
+                OffsetAtt.SetOffset(CorrectOffset(lastOffset), CorrectOffset(InputLength));
+            }
+
+            internal virtual void FillTokens()
+            {
+                StringBuilder sb = new StringBuilder();
+                char[] buffer = new char[256];
+                while (true)
+                {
+                    int count = m_input.Read(buffer, 0, buffer.Length);
+
+                    //.NET TextReader.Read(buff, int, int) returns 0, not -1 on no chars
+                    // but in some cases, such as MockCharFilter, it overloads read and returns -1
+                    // so we should handle both 0 and -1 values
+                    if (count <= 0)
+                    {
+                        break;
+                    }
+                    sb.Append(buffer, 0, count);
+                    //System.out.println("got count=" + count);
+                }
+                //System.out.println("fillTokens: " + sb);
+
+                InputLength = sb.Length;
+
+                string[] parts = sb.ToString().Split(' ');
+
+                Tokens = new List<Token>();
+                int pos = 0;
+                int maxPos = -1;
+                int offset = 0;
+                //System.out.println("again");
+                foreach (string part in parts)
+                {
+                    string[] overlapped = part.Split('/');
+                    bool firstAtPos = true;
+                    int minPosLength = int.MaxValue;
+                    foreach (string part2 in overlapped)
+                    {
+                        int colonIndex = part2.IndexOf(':');
+                        string token;
+                        int posLength;
+                        if (colonIndex != -1)
+                        {
+                            token = part2.Substring(0, colonIndex);
+                            posLength = Convert.ToInt32(part2.Substring(1 + colonIndex));
+                        }
+                        else
+                        {
+                            token = part2;
+                            posLength = 1;
+                        }
+                        maxPos = Math.Max(maxPos, pos + posLength);
+                        minPosLength = Math.Min(minPosLength, posLength);
+                        Token t = new Token(token, offset, offset + 2 * posLength - 1);
+                        t.PositionLength = posLength;
+                        t.PositionIncrement = firstAtPos ? 1 : 0;
+                        firstAtPos = false;
+                        //System.out.println("  add token=" + t + " startOff=" + t.StartOffset + " endOff=" + t.EndOffset);
+                        Tokens.Add(t);
+                    }
+                    pos += minPosLength;
+                    offset = 2 * pos;
+                }
+                Debug.Assert(maxPos <= pos, "input string mal-formed: posLength>1 tokens hang over the end");
+            }
+        }
+
+        [Test]
+        public virtual void TestMockGraphTokenFilterBasic()
+        {
+            for (int iter = 0; iter < 10 * RANDOM_MULTIPLIER; iter++)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("\nTEST: iter=" + iter);
+                }
+
+                // Make new analyzer each time, because MGTF has fixed
+                // seed:
+                Analyzer a = new AnalyzerAnonymousInnerClassHelper(this);
+
+                CheckAnalysisConsistency(Random(), a, false, "a b c d e f g h i j k");
+            }
+        }
+
+        private class AnalyzerAnonymousInnerClassHelper : Analyzer
+        {
+            private readonly TestGraphTokenizers OuterInstance;
+
+            public AnalyzerAnonymousInnerClassHelper(TestGraphTokenizers outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                Tokenizer t = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
+                TokenStream t2 = new MockGraphTokenFilter(Random(), t);
+                return new TokenStreamComponents(t, t2);
+            }
+        }
+
+        [Test]
+        public virtual void TestMockGraphTokenFilterOnGraphInput()
+        {
+            for (int iter = 0; iter < 100 * RANDOM_MULTIPLIER; iter++)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("\nTEST: iter=" + iter);
+                }
+
+                // Make new analyzer each time, because MGTF has fixed
+                // seed:
+                Analyzer a = new AnalyzerAnonymousInnerClassHelper2(this);
+
+                CheckAnalysisConsistency(Random(), a, false, "a/x:3 c/y:2 d e f/z:4 g h i j k");
+            }
+        }
+
+        private class AnalyzerAnonymousInnerClassHelper2 : Analyzer
+        {
+            private readonly TestGraphTokenizers OuterInstance;
+
+            public AnalyzerAnonymousInnerClassHelper2(TestGraphTokenizers outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                Tokenizer t = new GraphTokenizer(reader);
+                TokenStream t2 = new MockGraphTokenFilter(Random(), t);
+                return new TokenStreamComponents(t, t2);
+            }
+        }
+
+        // Just deletes (leaving hole) token 'a':
+        private sealed class RemoveATokens : TokenFilter
+        {
+            internal int PendingPosInc;
+
+            internal readonly ICharTermAttribute TermAtt;// = addAttribute(typeof(CharTermAttribute));
+            internal readonly IPositionIncrementAttribute PosIncAtt;// = addAttribute(typeof(PositionIncrementAttribute));
+
+            public RemoveATokens(TokenStream @in)
+                : base(@in)
+            {
+                TermAtt = AddAttribute<ICharTermAttribute>();
+                PosIncAtt = AddAttribute<IPositionIncrementAttribute>();
+            }
+
+            public override void Reset()
+            {
+                base.Reset();
+                PendingPosInc = 0;
+            }
+
+            public override void End()
+            {
+                base.End();
+                PosIncAtt.PositionIncrement = PendingPosInc + PosIncAtt.PositionIncrement;
+            }
+
+            public override bool IncrementToken()
+            {
+                while (true)
+                {
+                    bool gotOne = m_input.IncrementToken();
+                    if (!gotOne)
+                    {
+                        return false;
+                    }
+                    else if (TermAtt.ToString().Equals("a"))
+                    {
+                        PendingPosInc += PosIncAtt.PositionIncrement;
+                    }
+                    else
+                    {
+                        PosIncAtt.PositionIncrement = PendingPosInc + PosIncAtt.PositionIncrement;
+                        PendingPosInc = 0;
+                        return true;
+                    }
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestMockGraphTokenFilterBeforeHoles()
+        {
+            for (int iter = 0; iter < 100 * RANDOM_MULTIPLIER; iter++)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("\nTEST: iter=" + iter);
+                }
+
+                // Make new analyzer each time, because MGTF has fixed
+                // seed:
+                Analyzer a = new MGTFBHAnalyzerAnonymousInnerClassHelper(this);
+
+                Random random = Random();
+                CheckAnalysisConsistency(random, a, false, "a b c d e f g h i j k");
+                CheckAnalysisConsistency(random, a, false, "x y a b c d e f g h i j k");
+                CheckAnalysisConsistency(random, a, false, "a b c d e f g h i j k a");
+                CheckAnalysisConsistency(random, a, false, "a b c d e f g h i j k a x y");
+            }
+        }
+
+        private class MGTFBHAnalyzerAnonymousInnerClassHelper : Analyzer
+        {
+            private readonly TestGraphTokenizers OuterInstance;
+
+            public MGTFBHAnalyzerAnonymousInnerClassHelper(TestGraphTokenizers outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                Tokenizer t = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
+                TokenStream t2 = new MockGraphTokenFilter(Random(), t);
+                TokenStream t3 = new RemoveATokens(t2);
+                return new TokenStreamComponents(t, t3);
+            }
+        }
+
+        [Test]
+        public virtual void TestMockGraphTokenFilterAfterHoles()
+        {
+            for (int iter = 0; iter < 100 * RANDOM_MULTIPLIER; iter++)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("\nTEST: iter=" + iter);
+                }
+
+                // Make new analyzer each time, because MGTF has fixed
+                // seed:
+                Analyzer a = new MGTFAHAnalyzerAnonymousInnerClassHelper2(this);
+
+                Random random = Random();
+                CheckAnalysisConsistency(random, a, false, "a b c d e f g h i j k");
+                CheckAnalysisConsistency(random, a, false, "x y a b c d e f g h i j k");
+                CheckAnalysisConsistency(random, a, false, "a b c d e f g h i j k a");
+                CheckAnalysisConsistency(random, a, false, "a b c d e f g h i j k a x y");
+            }
+        }
+
+        private class MGTFAHAnalyzerAnonymousInnerClassHelper2 : Analyzer
+        {
+            private readonly TestGraphTokenizers OuterInstance;
+
+            public MGTFAHAnalyzerAnonymousInnerClassHelper2(TestGraphTokenizers outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                Tokenizer t = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
+                TokenStream t2 = new RemoveATokens(t);
+                TokenStream t3 = new MockGraphTokenFilter(Random(), t2);
+                return new TokenStreamComponents(t, t3);
+            }
+        }
+
+        [Test]
+        public virtual void TestMockGraphTokenFilterRandom()
+        {
+            for (int iter = 0; iter < 10 * RANDOM_MULTIPLIER; iter++)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("\nTEST: iter=" + iter);
+                }
+
+                // Make new analyzer each time, because MGTF has fixed
+                // seed:
+                Analyzer a = new AnalyzerAnonymousInnerClassHelper3(this);
+
+                Random random = Random();
+                CheckRandomData(random, a, 5, AtLeast(100));
+            }
+        }
+
+        private class AnalyzerAnonymousInnerClassHelper3 : Analyzer
+        {
+            private readonly TestGraphTokenizers OuterInstance;
+
+            public AnalyzerAnonymousInnerClassHelper3(TestGraphTokenizers outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                Tokenizer t = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
+                TokenStream t2 = new MockGraphTokenFilter(Random(), t);
+                return new TokenStreamComponents(t, t2);
+            }
+        }
+
+        // Two MockGraphTokenFilters
+        [Test]
+        public virtual void TestDoubleMockGraphTokenFilterRandom()
+        {
+            for (int iter = 0; iter < 10 * RANDOM_MULTIPLIER; iter++)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("\nTEST: iter=" + iter);
+                }
+
+                // Make new analyzer each time, because MGTF has fixed
+                // seed:
+                Analyzer a = new AnalyzerAnonymousInnerClassHelper4(this);
+
+                Random random = Random();
+                CheckRandomData(random, a, 5, AtLeast(100));
+            }
+        }
+
+        [Test]
+        public void TestMockTokenizerCtor()
+        {
+            var sr = new StringReader("Hello");
+            var mt = new MockTokenizer(sr);
+        }
+
+        private class AnalyzerAnonymousInnerClassHelper4 : Analyzer
+        {
+            private readonly TestGraphTokenizers OuterInstance;
+
+            public AnalyzerAnonymousInnerClassHelper4(TestGraphTokenizers outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                Tokenizer t = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
+                TokenStream t1 = new MockGraphTokenFilter(Random(), t);
+                TokenStream t2 = new MockGraphTokenFilter(Random(), t1);
+                return new TokenStreamComponents(t, t2);
+            }
+        }
+
+        [Test]
+        public virtual void TestMockGraphTokenFilterBeforeHolesRandom()
+        {
+            for (int iter = 0; iter < 10 * RANDOM_MULTIPLIER; iter++)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("\nTEST: iter=" + iter);
+                }
+
+                // Make new analyzer each time, because MGTF has fixed
+                // seed:
+                Analyzer a = new AnalyzerAnonymousInnerClassHelper5(this);
+
+                Random random = Random();
+                CheckRandomData(random, a, 5, AtLeast(100));
+            }
+        }
+
+        private class AnalyzerAnonymousInnerClassHelper5 : Analyzer
+        {
+            private readonly TestGraphTokenizers OuterInstance;
+
+            public AnalyzerAnonymousInnerClassHelper5(TestGraphTokenizers outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                Tokenizer t = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
+                TokenStream t1 = new MockGraphTokenFilter(Random(), t);
+                TokenStream t2 = new MockHoleInjectingTokenFilter(Random(), t1);
+                return new TokenStreamComponents(t, t2);
+            }
+        }
+
+        [Test]
+        public virtual void TestMockGraphTokenFilterAfterHolesRandom()
+        {
+            for (int iter = 0; iter < 10 * RANDOM_MULTIPLIER; iter++)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("\nTEST: iter=" + iter);
+                }
+
+                // Make new analyzer each time, because MGTF has fixed
+                // seed:
+                Analyzer a = new AnalyzerAnonymousInnerClassHelper6(this);
+
+                Random random = Random();
+                CheckRandomData(random, a, 5, AtLeast(100));
+            }
+        }
+
+        private class AnalyzerAnonymousInnerClassHelper6 : Analyzer
+        {
+            private readonly TestGraphTokenizers OuterInstance;
+
+            public AnalyzerAnonymousInnerClassHelper6(TestGraphTokenizers outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                Tokenizer t = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
+                TokenStream t1 = new MockHoleInjectingTokenFilter(Random(), t);
+                TokenStream t2 = new MockGraphTokenFilter(Random(), t1);
+                return new TokenStreamComponents(t, t2);
+            }
+        }
+
+        private static Token Token(string term, int posInc, int posLength)
+        {
+            Token t = new Token(term, 0, 0);
+            t.PositionIncrement = posInc;
+            t.PositionLength = posLength;
+            return t;
+        }
+
+        private static Token Token(string term, int posInc, int posLength, int startOffset, int endOffset)
+        {
+            Token t = new Token(term, startOffset, endOffset);
+            t.PositionIncrement = posInc;
+            t.PositionLength = posLength;
+            return t;
+        }
+
+        [Test]
+        public virtual void TestSingleToken()
+        {
+            TokenStream ts = new CannedTokenStream(new Token[] { Token("abc", 1, 1) });
+            Automaton actual = (new TokenStreamToAutomaton()).ToAutomaton(ts);
+            Automaton expected = BasicAutomata.MakeString("abc");
+            Assert.IsTrue(BasicOperations.SameLanguage(expected, actual));
+        }
+
+        [Test]
+        public virtual void TestMultipleHoles()
+        {
+            TokenStream ts = new CannedTokenStream(new Token[] { Token("a", 1, 1), Token("b", 3, 1) });
+            Automaton actual = (new TokenStreamToAutomaton()).ToAutomaton(ts);
+            Automaton expected = Join(S2a("a"), SEP_A, HOLE_A, SEP_A, HOLE_A, SEP_A, S2a("b"));
+            Assert.IsTrue(BasicOperations.SameLanguage(expected, actual));
+        }
+
+        [Test]
+        public virtual void TestSynOverMultipleHoles()
+        {
+            TokenStream ts = new CannedTokenStream(new Token[] { Token("a", 1, 1), Token("x", 0, 3), Token("b", 3, 1) });
+            Automaton actual = (new TokenStreamToAutomaton()).ToAutomaton(ts);
+            Automaton a1 = Join(S2a("a"), SEP_A, HOLE_A, SEP_A, HOLE_A, SEP_A, S2a("b"));
+            Automaton a2 = Join(S2a("x"), SEP_A, S2a("b"));
+            Automaton expected = BasicOperations.Union(a1, a2);
+            Assert.IsTrue(BasicOperations.SameLanguage(expected, actual));
+        }
+
+        // for debugging!
+        /*
+        private static void toDot(Automaton a) throws IOException {
+          final String s = a.toDot();
+          Writer w = new OutputStreamWriter(new FileOutputStream("/x/tmp/out.dot"));
+          w.write(s);
+          w.close();
+          System.out.println("TEST: saved to /x/tmp/out.dot");
+        }
+        */
+
+        private static readonly Automaton SEP_A = BasicAutomata.MakeChar(TokenStreamToAutomaton.POS_SEP);
+        private static readonly Automaton HOLE_A = BasicAutomata.MakeChar(TokenStreamToAutomaton.HOLE);
+
+        private Automaton Join(params string[] strings)
+        {
+            IList<Automaton> @as = new List<Automaton>();
+            foreach (string s in strings)
+            {
+                @as.Add(BasicAutomata.MakeString(s));
+                @as.Add(SEP_A);
+            }
+            @as.RemoveAt(@as.Count - 1);
+            return BasicOperations.Concatenate(@as);
+        }
+
+        private Automaton Join(params Automaton[] @as)
+        {
+            return BasicOperations.Concatenate(Arrays.AsList(@as));
+        }
+
+        private Automaton S2a(string s)
+        {
+            return BasicAutomata.MakeString(s);
+        }
+
+        [Test]
+        public virtual void TestTwoTokens()
+        {
+            TokenStream ts = new CannedTokenStream(new Token[] { Token("abc", 1, 1), Token("def", 1, 1) });
+            Automaton actual = (new TokenStreamToAutomaton()).ToAutomaton(ts);
+            Automaton expected = Join("abc", "def");
+
+            //toDot(actual);
+            Assert.IsTrue(BasicOperations.SameLanguage(expected, actual));
+        }
+
+        [Test]
+        public virtual void TestHole()
+        {
+            TokenStream ts = new CannedTokenStream(new Token[] { Token("abc", 1, 1), Token("def", 2, 1) });
+            Automaton actual = (new TokenStreamToAutomaton()).ToAutomaton(ts);
+
+            Automaton expected = Join(S2a("abc"), SEP_A, HOLE_A, SEP_A, S2a("def"));
+
+            //toDot(actual);
+            Assert.IsTrue(BasicOperations.SameLanguage(expected, actual));
+        }
+
+        [Test]
+        public virtual void TestOverlappedTokensSausage()
+        {
+            // Two tokens on top of each other (sausage):
+            TokenStream ts = new CannedTokenStream(new Token[] { Token("abc", 1, 1), Token("xyz", 0, 1) });
+            Automaton actual = (new TokenStreamToAutomaton()).ToAutomaton(ts);
+            Automaton a1 = BasicAutomata.MakeString("abc");
+            Automaton a2 = BasicAutomata.MakeString("xyz");
+            Automaton expected = BasicOperations.Union(a1, a2);
+            Assert.IsTrue(BasicOperations.SameLanguage(expected, actual));
+        }
+
+        [Test]
+        public virtual void TestOverlappedTokensLattice()
+        {
+            TokenStream ts = new CannedTokenStream(new Token[] { Token("abc", 1, 1), Token("xyz", 0, 2), Token("def", 1, 1) });
+            Automaton actual = (new TokenStreamToAutomaton()).ToAutomaton(ts);
+            Automaton a1 = BasicAutomata.MakeString("xyz");
+            Automaton a2 = Join("abc", "def");
+
+            Automaton expected = BasicOperations.Union(a1, a2);
+            //toDot(actual);
+            Assert.IsTrue(BasicOperations.SameLanguage(expected, actual));
+        }
+
+        [Test]
+        public virtual void TestSynOverHole()
+        {
+            TokenStream ts = new CannedTokenStream(new Token[] { Token("a", 1, 1), Token("X", 0, 2), Token("b", 2, 1) });
+            Automaton actual = (new TokenStreamToAutomaton()).ToAutomaton(ts);
+            Automaton a1 = BasicOperations.Union(Join(S2a("a"), SEP_A, HOLE_A), BasicAutomata.MakeString("X"));
+            Automaton expected = BasicOperations.Concatenate(a1, Join(SEP_A, S2a("b")));
+            //toDot(actual);
+            Assert.IsTrue(BasicOperations.SameLanguage(expected, actual));
+        }
+
+        [Test]
+        public virtual void TestSynOverHole2()
+        {
+            TokenStream ts = new CannedTokenStream(new Token[] { Token("xyz", 1, 1), Token("abc", 0, 3), Token("def", 2, 1) });
+            Automaton actual = (new TokenStreamToAutomaton()).ToAutomaton(ts);
+            Automaton expected = BasicOperations.Union(Join(S2a("xyz"), SEP_A, HOLE_A, SEP_A, S2a("def")), BasicAutomata.MakeString("abc"));
+            Assert.IsTrue(BasicOperations.SameLanguage(expected, actual));
+        }
+
+        [Test]
+        public virtual void TestOverlappedTokensLattice2()
+        {
+            TokenStream ts = new CannedTokenStream(new Token[] { Token("abc", 1, 1), Token("xyz", 0, 3), Token("def", 1, 1), Token("ghi", 1, 1) });
+            Automaton actual = (new TokenStreamToAutomaton()).ToAutomaton(ts);
+            Automaton a1 = BasicAutomata.MakeString("xyz");
+            Automaton a2 = Join("abc", "def", "ghi");
+            Automaton expected = BasicOperations.Union(a1, a2);
+            //toDot(actual);
+            Assert.IsTrue(BasicOperations.SameLanguage(expected, actual));
+        }
+
+        [Test]
+        public virtual void TestToDot()
+        {
+            TokenStream ts = new CannedTokenStream(new Token[] { Token("abc", 1, 1, 0, 4) });
+            StringWriter w = new StringWriter();
+            (new TokenStreamToDot("abcd", ts, (TextWriter)(w))).ToDot();
+            Assert.IsTrue(w.ToString().IndexOf("abc / abcd") != -1);
+        }
+
+        [Test]
+        public virtual void TestStartsWithHole()
+        {
+            TokenStream ts = new CannedTokenStream(new Token[] { Token("abc", 2, 1) });
+            Automaton actual = (new TokenStreamToAutomaton()).ToAutomaton(ts);
+            Automaton expected = Join(HOLE_A, SEP_A, S2a("abc"));
+            //toDot(actual);
+            Assert.IsTrue(BasicOperations.SameLanguage(expected, actual));
+        }
+
+        // TODO: testEndsWithHole... but we need posInc to set in TS.end()
+
+        [Test]
+        public virtual void TestSynHangingOverEnd()
+        {
+            TokenStream ts = new CannedTokenStream(new Token[] { Token("a", 1, 1), Token("X", 0, 10) });
+            Automaton actual = (new TokenStreamToAutomaton()).ToAutomaton(ts);
+            Automaton expected = BasicOperations.Union(BasicAutomata.MakeString("a"), BasicAutomata.MakeString("X"));
+            Assert.IsTrue(BasicOperations.SameLanguage(expected, actual));
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Analysis/TestLookaheadTokenFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Analysis/TestLookaheadTokenFilter.cs b/src/Lucene.Net.Tests/Analysis/TestLookaheadTokenFilter.cs
new file mode 100644
index 0000000..ee733bd
--- /dev/null
+++ b/src/Lucene.Net.Tests/Analysis/TestLookaheadTokenFilter.cs
@@ -0,0 +1,129 @@
+using Lucene.Net.Attributes;
+using Lucene.Net.Randomized.Generators;
+using NUnit.Framework;
+using System;
+using System.IO;
+
+namespace Lucene.Net.Analysis
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    [TestFixture]
+    public class TestLookaheadTokenFilter : BaseTokenStreamTestCase
+    {
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(int.MaxValue)]
+#endif
+        [Test, LongRunningTest, HasTimeout]
+        public virtual void TestRandomStrings()
+        {
+            Analyzer a = new AnalyzerAnonymousInnerClassHelper(this);
+            CheckRandomData(Random(), a, 200 * RANDOM_MULTIPLIER, 8192);
+        }
+
+        private class AnalyzerAnonymousInnerClassHelper : Analyzer
+        {
+            private readonly TestLookaheadTokenFilter OuterInstance;
+
+            public AnalyzerAnonymousInnerClassHelper(TestLookaheadTokenFilter outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                Random random = Random();
+                Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, random.NextBoolean());
+                TokenStream output = new MockRandomLookaheadTokenFilter(random, tokenizer);
+                return new TokenStreamComponents(tokenizer, output);
+            }
+        }
+
+        private class NeverPeeksLookaheadTokenFilter : LookaheadTokenFilter<LookaheadTokenFilter.Position>
+        {
+            public NeverPeeksLookaheadTokenFilter(TokenStream input)
+                : base(input)
+            {
+            }
+
+            protected internal override LookaheadTokenFilter.Position NewPosition()
+            {
+                return new LookaheadTokenFilter.Position();
+            }
+
+            public sealed override bool IncrementToken()
+            {
+                return NextToken();
+            }
+        }
+
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(int.MaxValue)]
+#endif
+        [Test, LongRunningTest, HasTimeout]
+        public virtual void TestNeverCallingPeek()
+        {
+            Analyzer a = new NCPAnalyzerAnonymousInnerClassHelper(this);
+            CheckRandomData(Random(), a, 200 * RANDOM_MULTIPLIER, 8192);
+        }
+
+        private class NCPAnalyzerAnonymousInnerClassHelper : Analyzer
+        {
+            private readonly TestLookaheadTokenFilter OuterInstance;
+
+            public NCPAnalyzerAnonymousInnerClassHelper(TestLookaheadTokenFilter outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, Random().NextBoolean());
+                TokenStream output = new NeverPeeksLookaheadTokenFilter(tokenizer);
+                return new TokenStreamComponents(tokenizer, output);
+            }
+        }
+
+        [Test]
+        public virtual void TestMissedFirstToken()
+        {
+            Analyzer analyzer = new AnalyzerAnonymousInnerClassHelper2(this);
+
+            AssertAnalyzesTo(analyzer, "Only he who is running knows .", new string[] { "Only", "Only-huh?", "he", "he-huh?", "who", "who-huh?", "is", "is-huh?", "running", "running-huh?", "knows", "knows-huh?", ".", ".-huh?" });
+        }
+
+        private class AnalyzerAnonymousInnerClassHelper2 : Analyzer
+        {
+            private readonly TestLookaheadTokenFilter OuterInstance;
+
+            public AnalyzerAnonymousInnerClassHelper2(TestLookaheadTokenFilter outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                Tokenizer source = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
+                TrivialLookaheadFilter filter = new TrivialLookaheadFilter(source);
+                return new TokenStreamComponents(source, filter);
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Analysis/TestMockAnalyzer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Analysis/TestMockAnalyzer.cs b/src/Lucene.Net.Tests/Analysis/TestMockAnalyzer.cs
new file mode 100644
index 0000000..cd8f315
--- /dev/null
+++ b/src/Lucene.Net.Tests/Analysis/TestMockAnalyzer.cs
@@ -0,0 +1,420 @@
+using System;
+using Lucene.Net.Attributes;
+using Lucene.Net.Documents;
+using Lucene.Net.Index;
+
+namespace Lucene.Net.Analysis
+{
+    using Lucene.Net.Randomized.Generators;
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using System.IO;
+    using AtomicReader = Lucene.Net.Index.AtomicReader;
+    using Automaton = Lucene.Net.Util.Automaton.Automaton;
+    using AutomatonTestUtil = Lucene.Net.Util.Automaton.AutomatonTestUtil;
+    using BasicAutomata = Lucene.Net.Util.Automaton.BasicAutomata;
+    using BasicOperations = Lucene.Net.Util.Automaton.BasicOperations;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using CharacterRunAutomaton = Lucene.Net.Util.Automaton.CharacterRunAutomaton;
+    using DocsAndPositionsEnum = Lucene.Net.Index.DocsAndPositionsEnum;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Document = Documents.Document;
+    using Field = Field;
+    using Fields = Lucene.Net.Index.Fields;
+    using FieldType = FieldType;
+    using IOUtils = Lucene.Net.Util.IOUtils;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using RegExp = Lucene.Net.Util.Automaton.RegExp;
+    using Terms = Lucene.Net.Index.Terms;
+    using TermsEnum = Lucene.Net.Index.TermsEnum;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    [TestFixture]
+    public class TestMockAnalyzer : BaseTokenStreamTestCase
+    {
+        /// <summary>
+        /// Test a configuration that behaves a lot like WhitespaceAnalyzer </summary>
+        [Test]
+        public virtual void TestWhitespace()
+        {
+            Analyzer a = new MockAnalyzer(Random());
+            AssertAnalyzesTo(a, "A bc defg hiJklmn opqrstuv wxy z ", new string[] { "a", "bc", "defg", "hijklmn", "opqrstuv", "wxy", "z" });
+            AssertAnalyzesTo(a, "aba cadaba shazam", new string[] { "aba", "cadaba", "shazam" });
+            AssertAnalyzesTo(a, "break on whitespace", new string[] { "break", "on", "whitespace" });
+        }
+
+        /// <summary>
+        /// Test a configuration that behaves a lot like SimpleAnalyzer </summary>
+        [Test]
+        public virtual void TestSimple()
+        {
+            Analyzer a = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true);
+            AssertAnalyzesTo(a, "a-bc123 defg+hijklmn567opqrstuv78wxy_z ", new string[] { "a", "bc", "defg", "hijklmn", "opqrstuv", "wxy", "z" });
+            AssertAnalyzesTo(a, "aba4cadaba-Shazam", new string[] { "aba", "cadaba", "shazam" });
+            AssertAnalyzesTo(a, "break+on/Letters", new string[] { "break", "on", "letters" });
+        }
+
+        /// <summary>
+        /// Test a configuration that behaves a lot like KeywordAnalyzer </summary>
+        [Test]
+        public virtual void TestKeyword()
+        {
+            Analyzer a = new MockAnalyzer(Random(), MockTokenizer.KEYWORD, false);
+            AssertAnalyzesTo(a, "a-bc123 defg+hijklmn567opqrstuv78wxy_z ", new string[] { "a-bc123 defg+hijklmn567opqrstuv78wxy_z " });
+            AssertAnalyzesTo(a, "aba4cadaba-Shazam", new string[] { "aba4cadaba-Shazam" });
+            AssertAnalyzesTo(a, "break+on/Nothing", new string[] { "break+on/Nothing" });
+            // currently though emits no tokens for empty string: maybe we can do it,
+            // but we don't want to emit tokens infinitely...
+            AssertAnalyzesTo(a, "", new string[0]);
+        }
+
+        // Test some regular expressions as tokenization patterns
+        /// <summary>
+        /// Test a configuration where each character is a term </summary>
+        [Test]
+        public virtual void TestSingleChar()
+        {
+            var single = new CharacterRunAutomaton((new RegExp(".")).ToAutomaton());
+            Analyzer a = new MockAnalyzer(Random(), single, false);
+            AssertAnalyzesTo(a, "foobar", new[] { "f", "o", "o", "b", "a", "r" }, new[] { 0, 1, 2, 3, 4, 5 }, new[] { 1, 2, 3, 4, 5, 6 });
+            CheckRandomData(Random(), a, 100);
+        }
+
+        /// <summary>
+        /// Test a configuration where two characters makes a term </summary>
+        [Test]
+        public virtual void TestTwoChars()
+        {
+            CharacterRunAutomaton single = new CharacterRunAutomaton((new RegExp("..")).ToAutomaton());
+            Analyzer a = new MockAnalyzer(Random(), single, false);
+            AssertAnalyzesTo(a, "foobar", new string[] { "fo", "ob", "ar" }, new int[] { 0, 2, 4 }, new int[] { 2, 4, 6 });
+            // make sure when last term is a "partial" match that End() is correct
+            AssertTokenStreamContents(a.TokenStream("bogus", new StringReader("fooba")), new string[] { "fo", "ob" }, new int[] { 0, 2 }, new int[] { 2, 4 }, new int[] { 1, 1 }, new int?(5));
+            CheckRandomData(Random(), a, 100);
+        }
+
+        /// <summary>
+        /// Test a configuration where three characters makes a term </summary>
+        [Test]
+        public virtual void TestThreeChars()
+        {
+            CharacterRunAutomaton single = new CharacterRunAutomaton((new RegExp("...")).ToAutomaton());
+            Analyzer a = new MockAnalyzer(Random(), single, false);
+            AssertAnalyzesTo(a, "foobar", new string[] { "foo", "bar" }, new int[] { 0, 3 }, new int[] { 3, 6 });
+            // make sure when last term is a "partial" match that End() is correct
+            AssertTokenStreamContents(a.TokenStream("bogus", new StringReader("fooba")), new string[] { "foo" }, new int[] { 0 }, new int[] { 3 }, new int[] { 1 }, new int?(5));
+            CheckRandomData(Random(), a, 100);
+        }
+
+        /// <summary>
+        /// Test a configuration where word starts with one uppercase </summary>
+        [Test]
+        public virtual void TestUppercase()
+        {
+            CharacterRunAutomaton single = new CharacterRunAutomaton((new RegExp("[A-Z][a-z]*")).ToAutomaton());
+            Analyzer a = new MockAnalyzer(Random(), single, false);
+            AssertAnalyzesTo(a, "FooBarBAZ", new string[] { "Foo", "Bar", "B", "A", "Z" }, new int[] { 0, 3, 6, 7, 8 }, new int[] { 3, 6, 7, 8, 9 });
+            AssertAnalyzesTo(a, "aFooBar", new string[] { "Foo", "Bar" }, new int[] { 1, 4 }, new int[] { 4, 7 });
+            CheckRandomData(Random(), a, 100);
+        }
+
+        /// <summary>
+        /// Test a configuration that behaves a lot like StopAnalyzer </summary>
+        [Test]
+        public virtual void TestStop()
+        {
+            Analyzer a = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET);
+            AssertAnalyzesTo(a, "the quick brown a fox", new string[] { "quick", "brown", "fox" }, new int[] { 2, 1, 2 });
+        }
+
+        /// <summary>
+        /// Test a configuration that behaves a lot like KeepWordFilter </summary>
+        [Test]
+        public virtual void TestKeep()
+        {
+            CharacterRunAutomaton keepWords = new CharacterRunAutomaton(BasicOperations.Complement(Automaton.Union(Arrays.AsList(BasicAutomata.MakeString("foo"), BasicAutomata.MakeString("bar")))));
+            Analyzer a = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true, keepWords);
+            AssertAnalyzesTo(a, "quick foo brown bar bar fox foo", new string[] { "foo", "bar", "bar", "foo" }, new int[] { 2, 2, 1, 2 });
+        }
+
+        /// <summary>
+        /// Test a configuration that behaves a lot like LengthFilter </summary>
+        [Test]
+        public virtual void TestLength()
+        {
+            CharacterRunAutomaton length5 = new CharacterRunAutomaton((new RegExp(".{5,}")).ToAutomaton());
+            Analyzer a = new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, true, length5);
+            AssertAnalyzesTo(a, "ok toolong fine notfine", new string[] { "ok", "fine" }, new int[] { 1, 2 });
+        }
+
+        /// <summary>
+        /// Test MockTokenizer encountering a too long token </summary>
+        [Test]
+        public virtual void TestTooLongToken()
+        {
+            Analyzer whitespace = new AnalyzerAnonymousInnerClassHelper(this);
+            AssertTokenStreamContents(whitespace.TokenStream("bogus", new StringReader("test 123 toolong ok ")), new string[] { "test", "123", "toolo", "ng", "ok" }, new int[] { 0, 5, 9, 14, 17 }, new int[] { 4, 8, 14, 16, 19 }, new int?(20));
+            AssertTokenStreamContents(whitespace.TokenStream("bogus", new StringReader("test 123 toolo")), new string[] { "test", "123", "toolo" }, new int[] { 0, 5, 9 }, new int[] { 4, 8, 14 }, new int?(14));
+        }
+
+        private class AnalyzerAnonymousInnerClassHelper : Analyzer
+        {
+            private readonly TestMockAnalyzer OuterInstance;
+
+            public AnalyzerAnonymousInnerClassHelper(TestMockAnalyzer outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                Tokenizer t = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false, 5);
+                return new TokenStreamComponents(t, t);
+            }
+        }
+
+        [Test]
+        public virtual void TestLUCENE_3042()
+        {
+            string testString = "t";
+
+            Analyzer analyzer = new MockAnalyzer(Random());
+            Exception priorException = null;
+            TokenStream stream = analyzer.TokenStream("dummy", new StringReader(testString));
+            try
+            {
+                stream.Reset();
+                while (stream.IncrementToken())
+                {
+                    // consume
+                }
+                stream.End();
+            }
+            catch (Exception e)
+            {
+                priorException = e;
+            }
+            finally
+            {
+                IOUtils.CloseWhileHandlingException(priorException, stream);
+            }
+
+            AssertAnalyzesTo(analyzer, testString, new string[] { "t" });
+        }
+
+        /// <summary>
+        /// blast some random strings through the analyzer </summary>
+        [Test]
+        public virtual void TestRandomStrings()
+        {
+            CheckRandomData(Random(), new MockAnalyzer(Random()), AtLeast(1000));
+        }
+
+        /// <summary>
+        /// blast some random strings through differently configured tokenizers </summary>
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(int.MaxValue)]
+#endif
+        [Test, LongRunningTest, HasTimeout]
+        public virtual void TestRandomRegexps()
+        {
+            int iters = AtLeast(30);
+            for (int i = 0; i < iters; i++)
+            {
+                CharacterRunAutomaton dfa = new CharacterRunAutomaton(AutomatonTestUtil.RandomAutomaton(Random()));
+                bool lowercase = Random().NextBoolean();
+                int limit = TestUtil.NextInt(Random(), 0, 500);
+                Analyzer a = new AnalyzerAnonymousInnerClassHelper2(this, dfa, lowercase, limit);
+                CheckRandomData(Random(), a, 100);
+                a.Dispose();
+            }
+        }
+
+        private class AnalyzerAnonymousInnerClassHelper2 : Analyzer
+        {
+            private readonly TestMockAnalyzer OuterInstance;
+
+            private CharacterRunAutomaton Dfa;
+            private bool Lowercase;
+            private int Limit;
+
+            public AnalyzerAnonymousInnerClassHelper2(TestMockAnalyzer outerInstance, CharacterRunAutomaton dfa, bool lowercase, int limit)
+            {
+                this.OuterInstance = outerInstance;
+                this.Dfa = dfa;
+                this.Lowercase = lowercase;
+                this.Limit = limit;
+            }
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                Tokenizer t = new MockTokenizer(reader, Dfa, Lowercase, Limit);
+                return new TokenStreamComponents(t, t);
+            }
+        }
+
+        [Test]
+        public virtual void TestForwardOffsets()
+        {
+            int num = AtLeast(10000);
+            for (int i = 0; i < num; i++)
+            {
+                string s = TestUtil.RandomHtmlishString(Random(), 20);
+                StringReader reader = new StringReader(s);
+                MockCharFilter charfilter = new MockCharFilter(reader, 2);
+                MockAnalyzer analyzer = new MockAnalyzer(Random());
+                Exception priorException = null;
+                TokenStream ts = analyzer.TokenStream("bogus", charfilter.m_input);
+                try
+                {
+                    ts.Reset();
+                    while (ts.IncrementToken())
+                    {
+                        ;
+                    }
+                    ts.End();
+                }
+                catch (Exception e)
+                {
+                    priorException = e;
+                }
+                finally
+                {
+                    IOUtils.CloseWhileHandlingException(priorException, ts);
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestWrapReader()
+        {
+            // LUCENE-5153: test that wrapping an analyzer's reader is allowed
+            Random random = Random();
+
+            Analyzer @delegate = new MockAnalyzer(random);
+            Analyzer a = new AnalyzerWrapperAnonymousInnerClassHelper(this, @delegate.Strategy, @delegate);
+
+            CheckOneTerm(a, "abc", "aabc");
+        }
+
+        private class AnalyzerWrapperAnonymousInnerClassHelper : AnalyzerWrapper
+        {
+            private readonly TestMockAnalyzer OuterInstance;
+
+            private Analyzer @delegate;
+
+            public AnalyzerWrapperAnonymousInnerClassHelper(TestMockAnalyzer outerInstance, ReuseStrategy getReuseStrategy, Analyzer @delegate)
+                : base(getReuseStrategy)
+            {
+                this.OuterInstance = outerInstance;
+                this.@delegate = @delegate;
+            }
+
+            protected override TextReader WrapReader(string fieldName, TextReader reader)
+            {
+                return new MockCharFilter(reader, 7);
+            }
+
+            protected override TokenStreamComponents WrapComponents(string fieldName, TokenStreamComponents components)
+            {
+                return components;
+            }
+
+            protected override Analyzer GetWrappedAnalyzer(string fieldName)
+            {
+                return @delegate;
+            }
+        }
+
+        [Test]
+        public virtual void TestChangeGaps()
+        {
+            // LUCENE-5324: check that it is possible to change the wrapper's gaps
+            int positionGap = Random().Next(1000);
+            int offsetGap = Random().Next(1000);
+            Analyzer @delegate = new MockAnalyzer(Random());
+            Analyzer a = new AnalyzerWrapperAnonymousInnerClassHelper2(this, @delegate.Strategy, positionGap, offsetGap, @delegate);
+
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), NewDirectory(), Similarity, TimeZone);
+            Document doc = new Document();
+            FieldType ft = new FieldType();
+            ft.IsIndexed = true;
+            ft.IndexOptions = IndexOptions.DOCS_ONLY;
+            ft.IsTokenized = true;
+            ft.StoreTermVectors = true;
+            ft.StoreTermVectorPositions = true;
+            ft.StoreTermVectorOffsets = true;
+            doc.Add(new Field("f", "a", ft));
+            doc.Add(new Field("f", "a", ft));
+            writer.AddDocument(doc, a);
+            AtomicReader reader = GetOnlySegmentReader(writer.Reader);
+            Fields fields = reader.GetTermVectors(0);
+            Terms terms = fields.GetTerms("f");
+            TermsEnum te = terms.GetIterator(null);
+            Assert.AreEqual(new BytesRef("a"), te.Next());
+            DocsAndPositionsEnum dpe = te.DocsAndPositions(null, null);
+            Assert.AreEqual(0, dpe.NextDoc());
+            Assert.AreEqual(2, dpe.Freq);
+            Assert.AreEqual(0, dpe.NextPosition());
+            Assert.AreEqual(0, dpe.StartOffset);
+            int endOffset = dpe.EndOffset;
+            Assert.AreEqual(1 + positionGap, dpe.NextPosition());
+            Assert.AreEqual(1 + endOffset + offsetGap, dpe.EndOffset);
+            Assert.AreEqual(null, te.Next());
+            reader.Dispose();
+            writer.Dispose();
+            writer.w.Directory.Dispose();
+        }
+
+        private class AnalyzerWrapperAnonymousInnerClassHelper2 : AnalyzerWrapper
+        {
+            private readonly TestMockAnalyzer OuterInstance;
+
+            private int PositionGap;
+            private int OffsetGap;
+            private Analyzer @delegate;
+
+            public AnalyzerWrapperAnonymousInnerClassHelper2(TestMockAnalyzer outerInstance, ReuseStrategy getReuseStrategy, int positionGap, int offsetGap, Analyzer @delegate)
+                : base(getReuseStrategy)
+            {
+                this.OuterInstance = outerInstance;
+                this.PositionGap = positionGap;
+                this.OffsetGap = offsetGap;
+                this.@delegate = @delegate;
+            }
+
+            protected override Analyzer GetWrappedAnalyzer(string fieldName)
+            {
+                return @delegate;
+            }
+
+            public override int GetPositionIncrementGap(string fieldName)
+            {
+                return PositionGap;
+            }
+
+            public override int GetOffsetGap(string fieldName)
+            {
+                return OffsetGap;
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Analysis/TestMockCharFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Analysis/TestMockCharFilter.cs b/src/Lucene.Net.Tests/Analysis/TestMockCharFilter.cs
new file mode 100644
index 0000000..75fc8cd
--- /dev/null
+++ b/src/Lucene.Net.Tests/Analysis/TestMockCharFilter.cs
@@ -0,0 +1,59 @@
+using NUnit.Framework;
+using System.IO;
+
+namespace Lucene.Net.Analysis
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    [TestFixture]
+    public class TestMockCharFilter : BaseTokenStreamTestCase
+    {
+        [Test]
+        public virtual void Test()
+        {
+            Analyzer analyzer = new AnalyzerAnonymousInnerClassHelper(this);
+
+            AssertAnalyzesTo(analyzer, "aab", new string[] { "aab" }, new int[] { 0 }, new int[] { 3 });
+
+            AssertAnalyzesTo(analyzer, "aabaa", new string[] { "aabaa" }, new int[] { 0 }, new int[] { 5 });
+
+            AssertAnalyzesTo(analyzer, "aabcdefgaa", new string[] { "aabcdefgaa" }, new int[] { 0 }, new int[] { 10 });
+        }
+
+        private class AnalyzerAnonymousInnerClassHelper : Analyzer
+        {
+            private readonly TestMockCharFilter OuterInstance;
+
+            public AnalyzerAnonymousInnerClassHelper(TestMockCharFilter outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
+                return new TokenStreamComponents(tokenizer, tokenizer);
+            }
+
+            protected internal override TextReader InitReader(string fieldName, TextReader reader)
+            {
+                return new MockCharFilter(reader, 7);
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Analysis/TestNumericTokenStream.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Analysis/TestNumericTokenStream.cs b/src/Lucene.Net.Tests/Analysis/TestNumericTokenStream.cs
new file mode 100644
index 0000000..8c020e4
--- /dev/null
+++ b/src/Lucene.Net.Tests/Analysis/TestNumericTokenStream.cs
@@ -0,0 +1,144 @@
+using Lucene.Net.Analysis.TokenAttributes;
+
+namespace Lucene.Net.Analysis
+{
+    using NUnit.Framework;
+    using System;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using CharTermAttribute = Lucene.Net.Analysis.TokenAttributes.CharTermAttribute;
+    using ICharTermAttribute = Lucene.Net.Analysis.TokenAttributes.ICharTermAttribute;
+    using NumericUtils = Lucene.Net.Util.NumericUtils;
+
+    [TestFixture]
+    public class TestNumericTokenStream : BaseTokenStreamTestCase
+    {
+        internal const long Lvalue = 4573245871874382L;
+        internal const int Ivalue = 123456;
+
+        [NUnit.Framework.Test]
+        public virtual void TestLongStream()
+        {
+            using (NumericTokenStream stream = (new NumericTokenStream()).SetInt64Value(Lvalue)) {
+                // use getAttribute to test if attributes really exist, if not an IAE will be throwed
+                ITermToBytesRefAttribute bytesAtt = stream.GetAttribute<ITermToBytesRefAttribute>();
+                ITypeAttribute typeAtt = stream.GetAttribute<ITypeAttribute>();
+                NumericTokenStream.INumericTermAttribute numericAtt = stream.GetAttribute<NumericTokenStream.INumericTermAttribute>();
+                BytesRef bytes = bytesAtt.BytesRef;
+                stream.Reset();
+                Assert.AreEqual(64, numericAtt.ValueSize);
+                for (int shift = 0; shift < 64; shift += NumericUtils.PRECISION_STEP_DEFAULT)
+                {
+                    Assert.IsTrue(stream.IncrementToken(), "New token is available");
+                    Assert.AreEqual(shift, numericAtt.Shift, "Shift value wrong");
+                    bytesAtt.FillBytesRef();
+                    Assert.AreEqual(Lvalue & ~((1L << shift) - 1L), NumericUtils.PrefixCodedToInt64(bytes), "Term is incorrectly encoded");
+                    Assert.AreEqual(Lvalue & ~((1L << shift) - 1L), numericAtt.RawValue, "Term raw value is incorrectly encoded");
+                    Assert.AreEqual((shift == 0) ? NumericTokenStream.TOKEN_TYPE_FULL_PREC : NumericTokenStream.TOKEN_TYPE_LOWER_PREC, typeAtt.Type, "Type incorrect");
+                }
+                Assert.IsFalse(stream.IncrementToken(), "More tokens available");
+                stream.End();
+            }
+        }
+
+        [NUnit.Framework.Test]
+        public virtual void TestIntStream()
+        {
+            NumericTokenStream stream = (new NumericTokenStream()).SetInt32Value(Ivalue);
+            // use getAttribute to test if attributes really exist, if not an IAE will be throwed
+            ITermToBytesRefAttribute bytesAtt = stream.GetAttribute<ITermToBytesRefAttribute>();
+            ITypeAttribute typeAtt = stream.GetAttribute<ITypeAttribute>();
+            NumericTokenStream.INumericTermAttribute numericAtt = stream.GetAttribute<NumericTokenStream.INumericTermAttribute>();
+            BytesRef bytes = bytesAtt.BytesRef;
+            stream.Reset();
+            Assert.AreEqual(32, numericAtt.ValueSize);
+            for (int shift = 0; shift < 32; shift += NumericUtils.PRECISION_STEP_DEFAULT)
+            {
+                Assert.IsTrue(stream.IncrementToken(), "New token is available");
+                Assert.AreEqual(shift, numericAtt.Shift, "Shift value wrong");
+                bytesAtt.FillBytesRef();
+                Assert.AreEqual(Ivalue & ~((1 << shift) - 1), NumericUtils.PrefixCodedToInt32(bytes), "Term is incorrectly encoded");
+                Assert.AreEqual(((long)Ivalue) & ~((1L << shift) - 1L), numericAtt.RawValue, "Term raw value is incorrectly encoded");
+                Assert.AreEqual((shift == 0) ? NumericTokenStream.TOKEN_TYPE_FULL_PREC : NumericTokenStream.TOKEN_TYPE_LOWER_PREC, typeAtt.Type, "Type incorrect");
+            }
+            Assert.IsFalse(stream.IncrementToken(), "More tokens available");
+            stream.End();
+            stream.Dispose();
+        }
+
+        [NUnit.Framework.Test]
+        public virtual void TestNotInitialized()
+        {
+            NumericTokenStream stream = new NumericTokenStream();
+
+            try
+            {
+                stream.Reset();
+                Assert.Fail("reset() should not succeed.");
+            }
+            catch (Exception)
+            {
+                // pass
+            }
+
+            try
+            {
+                stream.IncrementToken();
+                Assert.Fail("IncrementToken() should not succeed.");
+            }
+            catch (Exception)
+            {
+                // pass
+            }
+        }
+
+        public interface ITestAttribute : ICharTermAttribute
+        {
+        }
+
+        public class TestAttribute : CharTermAttribute, ITestAttribute
+        {
+        }
+
+        [NUnit.Framework.Test]
+        public virtual void TestCTA()
+        {
+            NumericTokenStream stream = new NumericTokenStream();
+            try
+            {
+                stream.AddAttribute<ICharTermAttribute>();
+                Assert.Fail("Succeeded to add CharTermAttribute.");
+            }
+            catch (System.ArgumentException iae)
+            {
+                Assert.IsTrue(iae.Message.StartsWith("NumericTokenStream does not support"));
+            }
+            try
+            {
+                stream.AddAttribute<ITestAttribute>();
+                Assert.Fail("Succeeded to add TestAttribute.");
+            }
+            catch (System.ArgumentException iae)
+            {
+                Assert.IsTrue(iae.Message.StartsWith("NumericTokenStream does not support"));
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Analysis/TestPosition.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Analysis/TestPosition.cs b/src/Lucene.Net.Tests/Analysis/TestPosition.cs
new file mode 100644
index 0000000..54eb089
--- /dev/null
+++ b/src/Lucene.Net.Tests/Analysis/TestPosition.cs
@@ -0,0 +1,27 @@
+namespace Lucene.Net.Analysis
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Trivial position class.
+    /// </summary>
+    public class TestPosition : LookaheadTokenFilter.Position
+    {
+        public virtual string Fact { get; set; }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Analysis/TestReusableStringReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Analysis/TestReusableStringReader.cs b/src/Lucene.Net.Tests/Analysis/TestReusableStringReader.cs
new file mode 100644
index 0000000..150a3f8
--- /dev/null
+++ b/src/Lucene.Net.Tests/Analysis/TestReusableStringReader.cs
@@ -0,0 +1,71 @@
+using System.Text;
+
+namespace Lucene.Net.Analysis
+{
+    using NUnit.Framework;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    [TestFixture]
+    public class TestReusableStringReader : LuceneTestCase
+    {
+        [Test]
+        public virtual void Test()
+        {
+            char[] buf = new char[4];
+
+            using (ReusableStringReader reader = new ReusableStringReader())
+            {
+                Assert.AreEqual(-1, reader.Read());
+                Assert.AreEqual(-1, reader.Read(new char[1], 0, 1));
+                Assert.AreEqual(-1, reader.Read(new char[2], 1, 1));
+                //Assert.AreEqual(-1, reader.Read(CharBuffer.wrap(new char[2])));
+
+                reader.SetValue("foobar");
+                Assert.AreEqual(4, reader.Read(buf, 0, 4));
+                Assert.AreEqual("foob", new string(buf));
+                Assert.AreEqual(2, reader.Read(buf, 0, 2));
+                Assert.AreEqual("ar", new string(buf, 0, 2));
+                Assert.AreEqual(-1, reader.Read(buf, 2, 0));
+            }
+
+            using (ReusableStringReader reader = new ReusableStringReader())
+            {
+                reader.SetValue("foobar");
+                Assert.AreEqual(0, reader.Read(buf, 1, 0));
+                Assert.AreEqual(3, reader.Read(buf, 1, 3));
+                Assert.AreEqual("foo", new string(buf, 1, 3));
+                Assert.AreEqual(2, reader.Read(buf, 2, 2));
+                Assert.AreEqual("ba", new string(buf, 2, 2));
+                Assert.AreEqual('r', (char)reader.Read());
+                Assert.AreEqual(-1, reader.Read(buf, 2, 0));
+                reader.Dispose();
+
+                reader.SetValue("foobar");
+                StringBuilder sb = new StringBuilder();
+                int ch;
+                while ((ch = reader.Read()) != -1)
+                {
+                    sb.Append((char)ch);
+                }
+                Assert.AreEqual("foobar", sb.ToString());
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Analysis/TestToken.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Analysis/TestToken.cs b/src/Lucene.Net.Tests/Analysis/TestToken.cs
new file mode 100644
index 0000000..5e3fa66
--- /dev/null
+++ b/src/Lucene.Net.Tests/Analysis/TestToken.cs
@@ -0,0 +1,305 @@
+using System.Collections.Generic;
+using System.Text;
+
+namespace Lucene.Net.Analysis
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    using Lucene.Net.Analysis.TokenAttributes;
+    using NUnit.Framework;
+    using Attribute = Lucene.Net.Util.Attribute;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using IAttribute = Lucene.Net.Util.IAttribute;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    [TestFixture]
+    public class TestToken : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestCtor()
+        {
+            Token t = new Token();
+            char[] content = "hello".ToCharArray();
+            t.CopyBuffer(content, 0, content.Length);
+            Assert.AreNotSame(t.Buffer, content);
+            Assert.AreEqual(0, t.StartOffset);
+            Assert.AreEqual(0, t.EndOffset);
+            Assert.AreEqual("hello", t.ToString());
+            Assert.AreEqual("word", t.Type);
+            Assert.AreEqual(0, t.Flags);
+
+            t = new Token(6, 22);
+            t.CopyBuffer(content, 0, content.Length);
+            Assert.AreEqual("hello", t.ToString());
+            Assert.AreEqual("hello", t.ToString());
+            Assert.AreEqual(6, t.StartOffset);
+            Assert.AreEqual(22, t.EndOffset);
+            Assert.AreEqual("word", t.Type);
+            Assert.AreEqual(0, t.Flags);
+
+            t = new Token(6, 22, 7);
+            t.CopyBuffer(content, 0, content.Length);
+            Assert.AreEqual("hello", t.ToString());
+            Assert.AreEqual("hello", t.ToString());
+            Assert.AreEqual(6, t.StartOffset);
+            Assert.AreEqual(22, t.EndOffset);
+            Assert.AreEqual("word", t.Type);
+            Assert.AreEqual(7, t.Flags);
+
+            t = new Token(6, 22, "junk");
+            t.CopyBuffer(content, 0, content.Length);
+            Assert.AreEqual("hello", t.ToString());
+            Assert.AreEqual("hello", t.ToString());
+            Assert.AreEqual(6, t.StartOffset);
+            Assert.AreEqual(22, t.EndOffset);
+            Assert.AreEqual("junk", t.Type);
+            Assert.AreEqual(0, t.Flags);
+        }
+
+        [Test]
+        public virtual void TestResize()
+        {
+            Token t = new Token();
+            char[] content = "hello".ToCharArray();
+            t.CopyBuffer(content, 0, content.Length);
+            for (int i = 0; i < 2000; i++)
+            {
+                t.ResizeBuffer(i);
+                Assert.IsTrue(i <= t.Buffer.Length);
+                Assert.AreEqual("hello", t.ToString());
+            }
+        }
+
+        [Test]
+        public virtual void TestGrow()
+        {
+            Token t = new Token();
+            StringBuilder buf = new StringBuilder("ab");
+            for (int i = 0; i < 20; i++)
+            {
+                char[] content = buf.ToString().ToCharArray();
+                t.CopyBuffer(content, 0, content.Length);
+                Assert.AreEqual(buf.Length, t.Length);
+                Assert.AreEqual(buf.ToString(), t.ToString());
+                buf.Append(buf.ToString());
+            }
+            Assert.AreEqual(1048576, t.Length);
+
+            // now as a string, second variant
+            t = new Token();
+            buf = new StringBuilder("ab");
+            for (int i = 0; i < 20; i++)
+            {
+                t.SetEmpty().Append(buf);
+                string content = buf.ToString();
+                Assert.AreEqual(content.Length, t.Length);
+                Assert.AreEqual(content, t.ToString());
+                buf.Append(content);
+            }
+            Assert.AreEqual(1048576, t.Length);
+
+            // Test for slow growth to a long term
+            t = new Token();
+            buf = new StringBuilder("a");
+            for (int i = 0; i < 20000; i++)
+            {
+                t.SetEmpty().Append(buf);
+                string content = buf.ToString();
+                Assert.AreEqual(content.Length, t.Length);
+                Assert.AreEqual(content, t.ToString());
+                buf.Append("a");
+            }
+            Assert.AreEqual(20000, t.Length);
+
+            // Test for slow growth to a long term
+            t = new Token();
+            buf = new StringBuilder("a");
+            for (int i = 0; i < 20000; i++)
+            {
+                t.SetEmpty().Append(buf);
+                string content = buf.ToString();
+                Assert.AreEqual(content.Length, t.Length);
+                Assert.AreEqual(content, t.ToString());
+                buf.Append("a");
+            }
+            Assert.AreEqual(20000, t.Length);
+        }
+
+        [Test]
+        public virtual void TestToString()
+        {
+            char[] b = new char[] { 'a', 'l', 'o', 'h', 'a' };
+            Token t = new Token("", 0, 5);
+            t.CopyBuffer(b, 0, 5);
+            Assert.AreEqual("aloha", t.ToString());
+
+            t.SetEmpty().Append("hi there");
+            Assert.AreEqual("hi there", t.ToString());
+        }
+
+        [Test]
+        public virtual void TestTermBufferEquals()
+        {
+            Token t1a = new Token();
+            char[] content1a = "hello".ToCharArray();
+            t1a.CopyBuffer(content1a, 0, 5);
+            Token t1b = new Token();
+            char[] content1b = "hello".ToCharArray();
+            t1b.CopyBuffer(content1b, 0, 5);
+            Token t2 = new Token();
+            char[] content2 = "hello2".ToCharArray();
+            t2.CopyBuffer(content2, 0, 6);
+            Assert.IsTrue(t1a.Equals(t1b));
+            Assert.IsFalse(t1a.Equals(t2));
+            Assert.IsFalse(t2.Equals(t1b));
+        }
+
+        [Test]
+        public virtual void TestMixedStringArray()
+        {
+            Token t = new Token("hello", 0, 5);
+            Assert.AreEqual(t.Length, 5);
+            Assert.AreEqual(t.ToString(), "hello");
+            t.SetEmpty().Append("hello2");
+            Assert.AreEqual(t.Length, 6);
+            Assert.AreEqual(t.ToString(), "hello2");
+            t.CopyBuffer("hello3".ToCharArray(), 0, 6);
+            Assert.AreEqual(t.ToString(), "hello3");
+
+            char[] buffer = t.Buffer;
+            buffer[1] = 'o';
+            Assert.AreEqual(t.ToString(), "hollo3");
+        }
+
+        [Test]
+        public virtual void TestClone()
+        {
+            Token t = new Token(0, 5);
+            char[] content = "hello".ToCharArray();
+            t.CopyBuffer(content, 0, 5);
+            char[] buf = t.Buffer;
+            Token copy = AssertCloneIsEqual(t);
+            Assert.AreEqual(t.ToString(), copy.ToString());
+            Assert.AreNotSame(buf, copy.Buffer);
+
+            BytesRef pl = new BytesRef(new byte[] { 1, 2, 3, 4 });
+            t.Payload = pl;
+            copy = AssertCloneIsEqual(t);
+            Assert.AreEqual(pl, copy.Payload);
+            Assert.AreNotSame(pl, copy.Payload);
+        }
+
+        [Test]
+        public virtual void TestCopyTo()
+        {
+            Token t = new Token();
+            Token copy = AssertCopyIsEqual(t);
+            Assert.AreEqual("", t.ToString());
+            Assert.AreEqual("", copy.ToString());
+
+            t = new Token(0, 5);
+            char[] content = "hello".ToCharArray();
+            t.CopyBuffer(content, 0, 5);
+            char[] buf = t.Buffer;
+            copy = AssertCopyIsEqual(t);
+            Assert.AreEqual(t.ToString(), copy.ToString());
+            Assert.AreNotSame(buf, copy.Buffer);
+
+            BytesRef pl = new BytesRef(new byte[] { 1, 2, 3, 4 });
+            t.Payload = pl;
+            copy = AssertCopyIsEqual(t);
+            Assert.AreEqual(pl, copy.Payload);
+            Assert.AreNotSame(pl, copy.Payload);
+        }
+
+        public interface ISenselessAttribute : Lucene.Net.Util.IAttribute
+        {
+        }
+
+        public sealed class SenselessAttribute : Attribute, ISenselessAttribute
+        {
+            public override void CopyTo(IAttribute target)
+            {
+            }
+
+            public override void Clear()
+            {
+            }
+
+            public override bool Equals(object o)
+            {
+                return (o is SenselessAttribute);
+            }
+
+            public override int GetHashCode()
+            {
+                return 0;
+            }
+        }
+
+        [Test]
+        public virtual void TestTokenAttributeFactory()
+        {
+            TokenStream ts = new MockTokenizer(Token.TOKEN_ATTRIBUTE_FACTORY, new System.IO.StringReader("foo bar"), MockTokenizer.WHITESPACE, false, MockTokenizer.DEFAULT_MAX_TOKEN_LENGTH);
+
+            Assert.IsTrue(ts.AddAttribute<ISenselessAttribute>() is SenselessAttribute, "SenselessAttribute is not implemented by SenselessAttributeImpl");
+
+            Assert.IsTrue(ts.AddAttribute<ICharTermAttribute>() is Token, "CharTermAttribute is not implemented by Token");
+            Assert.IsTrue(ts.AddAttribute<IOffsetAttribute>() is Token, "OffsetAttribute is not implemented by Token");
+            Assert.IsTrue(ts.AddAttribute<IFlagsAttribute>() is Token, "FlagsAttribute is not implemented by Token");
+            Assert.IsTrue(ts.AddAttribute<IPayloadAttribute>() is Token, "PayloadAttribute is not implemented by Token");
+            Assert.IsTrue(ts.AddAttribute<IPositionIncrementAttribute>() is Token, "PositionIncrementAttribute is not implemented by Token");
+            Assert.IsTrue(ts.AddAttribute<ITypeAttribute>() is Token, "TypeAttribute is not implemented by Token");
+        }
+
+        [Test]
+        public virtual void TestAttributeReflection()
+        {
+            Token t = new Token("foobar", 6, 22, 8);
+            TestUtil.AssertAttributeReflection(t, new Dictionary<string, object>()
+            {
+                { typeof(ICharTermAttribute).Name + "#term", "foobar" },
+                { typeof(ITermToBytesRefAttribute).Name + "#bytes", new BytesRef("foobar") },
+                { typeof(IOffsetAttribute).Name + "#startOffset", 6 },
+                { typeof(IOffsetAttribute).Name + "#endOffset", 22 },
+                { typeof(IPositionIncrementAttribute).Name + "#positionIncrement", 1 },
+                { typeof(IPayloadAttribute).Name + "#payload", null },
+                { typeof(ITypeAttribute).Name + "#type", TypeAttribute_Fields.DEFAULT_TYPE },
+                { typeof(IFlagsAttribute).Name + "#flags", 8 }
+            });
+        }
+
+        public static T AssertCloneIsEqual<T>(T att) where T : Attribute
+        {
+            T clone = (T)att.Clone();
+            Assert.AreEqual(att, clone, "Clone must be equal");
+            Assert.AreEqual(att.GetHashCode(), clone.GetHashCode(), "Clone's hashcode must be equal");
+            return clone;
+        }
+
+        public static T AssertCopyIsEqual<T>(T att) where T : Attribute
+        {
+            T copy = (T)System.Activator.CreateInstance(att.GetType());
+            att.CopyTo(copy);
+            Assert.AreEqual(att, copy, "Copied instance must be equal");
+            Assert.AreEqual(att.GetHashCode(), copy.GetHashCode(), "Copied instance's hashcode must be equal");
+            return copy;
+        }
+    }
+}
\ No newline at end of file


[54/72] [abbrv] lucenenet git commit: Lucene.Net.TestFramework: Renamed Codecs\asserting\ to Codecs\Asserting\

Posted by ni...@apache.org.
Lucene.Net.TestFramework: Renamed Codecs\asserting\ to Codecs\Asserting\


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/77e95ccc
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/77e95ccc
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/77e95ccc

Branch: refs/heads/api-work
Commit: 77e95ccc235a83696494c674f4945c42d80e9d61
Parents: 9682239
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 26 02:44:43 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Feb 27 06:17:55 2017 +0700

----------------------------------------------------------------------
 .../Codecs/Asserting/AssertingCodec.cs          |  64 ++++
 .../Asserting/AssertingDocValuesFormat.cs       | 346 +++++++++++++++++++
 .../Codecs/Asserting/AssertingNormsFormat.cs    |  51 +++
 .../Codecs/Asserting/AssertingPostingsFormat.cs | 318 +++++++++++++++++
 .../Asserting/AssertingStoredFieldsFormat.cs    | 154 +++++++++
 .../Asserting/AssertingTermVectorsFormat.cs     | 208 +++++++++++
 .../Codecs/asserting/AssertingCodec.cs          |  64 ----
 .../asserting/AssertingDocValuesFormat.cs       | 346 -------------------
 .../Codecs/asserting/AssertingNormsFormat.cs    |  51 ---
 .../Codecs/asserting/AssertingPostingsFormat.cs | 318 -----------------
 .../asserting/AssertingStoredFieldsFormat.cs    | 154 ---------
 .../asserting/AssertingTermVectorsFormat.cs     | 208 -----------
 .../Index/RandomCodec.cs                        |   4 +-
 .../Lucene.Net.TestFramework.csproj             |  12 +-
 .../Index/TestBinaryDocValuesUpdates.cs         |   2 +-
 .../Index/TestNumericDocValuesUpdates.cs        |   2 +-
 16 files changed, 1151 insertions(+), 1151 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/77e95ccc/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingCodec.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingCodec.cs b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingCodec.cs
new file mode 100644
index 0000000..5bf1179
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingCodec.cs
@@ -0,0 +1,64 @@
+namespace Lucene.Net.Codecs.Asserting
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    using Lucene46Codec = Lucene.Net.Codecs.Lucene46.Lucene46Codec;
+
+    /// <summary>
+    /// Acts like <seealso cref="Lucene46Codec"/> but with additional asserts.
+    /// </summary>
+    [CodecName("Asserting")]
+    public sealed class AssertingCodec : FilterCodec
+    {
+        private readonly PostingsFormat Postings = new AssertingPostingsFormat();
+        private readonly TermVectorsFormat Vectors = new AssertingTermVectorsFormat();
+        private readonly StoredFieldsFormat StoredFields = new AssertingStoredFieldsFormat();
+        private readonly DocValuesFormat DocValues = new AssertingDocValuesFormat();
+        private readonly NormsFormat Norms = new AssertingNormsFormat();
+
+        public AssertingCodec()
+            : base(new Lucene46Codec())
+        {
+        }
+
+        public override PostingsFormat PostingsFormat
+        {
+            get { return Postings; }
+        }
+
+        public override TermVectorsFormat TermVectorsFormat
+        {
+            get { return Vectors; }
+        }
+
+        public override StoredFieldsFormat StoredFieldsFormat
+        {
+            get { return StoredFields; }
+        }
+
+        public override DocValuesFormat DocValuesFormat
+        {
+            get { return DocValues; }
+        }
+
+        public override NormsFormat NormsFormat
+        {
+            get { return Norms; }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/77e95ccc/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingDocValuesFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingDocValuesFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingDocValuesFormat.cs
new file mode 100644
index 0000000..0d91fb0
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingDocValuesFormat.cs
@@ -0,0 +1,346 @@
+using System.Collections.Generic;
+using System.Diagnostics;
+
+namespace Lucene.Net.Codecs.Asserting
+{
+    using System;
+    using AssertingAtomicReader = Lucene.Net.Index.AssertingAtomicReader;
+    using BinaryDocValues = Lucene.Net.Index.BinaryDocValues;
+    using IBits = Lucene.Net.Util.IBits;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using DocValuesType = Lucene.Net.Index.DocValuesType;
+    using FieldInfo = Lucene.Net.Index.FieldInfo;
+    using FixedBitSet = Lucene.Net.Util.FixedBitSet;
+    using Int64BitSet = Lucene.Net.Util.Int64BitSet;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Lucene45DocValuesFormat = Lucene.Net.Codecs.Lucene45.Lucene45DocValuesFormat;
+    using NumericDocValues = Lucene.Net.Index.NumericDocValues;
+    using SegmentReadState = Lucene.Net.Index.SegmentReadState;
+    using SegmentWriteState = Lucene.Net.Index.SegmentWriteState;
+    using SortedDocValues = Lucene.Net.Index.SortedDocValues;
+    using SortedSetDocValues = Lucene.Net.Index.SortedSetDocValues;
+
+    /// <summary>
+    /// Just like <seealso cref="Lucene45DocValuesFormat"/> but with additional asserts.
+    /// </summary>
+    [DocValuesFormatName("Asserting")]
+    public class AssertingDocValuesFormat : DocValuesFormat
+    {
+        private readonly DocValuesFormat @in = new Lucene45DocValuesFormat();
+
+        public AssertingDocValuesFormat()
+            : base()
+        {
+        }
+
+        public override DocValuesConsumer FieldsConsumer(SegmentWriteState state)
+        {
+            DocValuesConsumer consumer = @in.FieldsConsumer(state);
+            Debug.Assert(consumer != null);
+            return new AssertingDocValuesConsumer(consumer, state.SegmentInfo.DocCount);
+        }
+
+        public override DocValuesProducer FieldsProducer(SegmentReadState state)
+        {
+            Debug.Assert(state.FieldInfos.HasDocValues);
+            DocValuesProducer producer = @in.FieldsProducer(state);
+            Debug.Assert(producer != null);
+            return new AssertingDocValuesProducer(producer, state.SegmentInfo.DocCount);
+        }
+
+        internal class AssertingDocValuesConsumer : DocValuesConsumer
+        {
+            internal readonly DocValuesConsumer @in;
+            internal readonly int MaxDoc;
+
+            internal AssertingDocValuesConsumer(DocValuesConsumer @in, int maxDoc)
+            {
+                this.@in = @in;
+                this.MaxDoc = maxDoc;
+            }
+
+            public override void AddNumericField(FieldInfo field, IEnumerable<long?> values)
+            {
+                int count = 0;
+                foreach (var v in values)
+                {
+                    count++;
+                }
+                Debug.Assert(count == MaxDoc);
+                CheckIterator(values.GetEnumerator(), MaxDoc, true);
+                @in.AddNumericField(field, values);
+            }
+
+            public override void AddBinaryField(FieldInfo field, IEnumerable<BytesRef> values)
+            {
+                int count = 0;
+                foreach (BytesRef b in values)
+                {
+                    Debug.Assert(b == null || b.IsValid());
+                    count++;
+                }
+                Debug.Assert(count == MaxDoc);
+                CheckIterator(values.GetEnumerator(), MaxDoc, true);
+                @in.AddBinaryField(field, values);
+            }
+
+            public override void AddSortedField(FieldInfo field, IEnumerable<BytesRef> values, IEnumerable<long?> docToOrd)
+            {
+                int valueCount = 0;
+                BytesRef lastValue = null;
+                foreach (BytesRef b in values)
+                {
+                    Debug.Assert(b != null);
+                    Debug.Assert(b.IsValid());
+                    if (valueCount > 0)
+                    {
+                        Debug.Assert(b.CompareTo(lastValue) > 0);
+                    }
+                    lastValue = BytesRef.DeepCopyOf(b);
+                    valueCount++;
+                }
+                Debug.Assert(valueCount <= MaxDoc);
+
+                FixedBitSet seenOrds = new FixedBitSet(valueCount);
+
+                int count = 0;
+                foreach (long? v in docToOrd)
+                {
+                    Debug.Assert(v != null);
+                    int ord = (int)v.Value;
+                    Debug.Assert(ord >= -1 && ord < valueCount);
+                    if (ord >= 0)
+                    {
+                        seenOrds.Set(ord);
+                    }
+                    count++;
+                }
+
+                Debug.Assert(count == MaxDoc);
+                Debug.Assert(seenOrds.Cardinality() == valueCount);
+                CheckIterator(values.GetEnumerator(), valueCount, false);
+                CheckIterator(docToOrd.GetEnumerator(), MaxDoc, false);
+                @in.AddSortedField(field, values, docToOrd);
+            }
+
+            public override void AddSortedSetField(FieldInfo field, IEnumerable<BytesRef> values, IEnumerable<long?> docToOrdCount, IEnumerable<long?> ords)
+            {
+                long valueCount = 0;
+                BytesRef lastValue = null;
+                foreach (BytesRef b in values)
+                {
+                    Debug.Assert(b != null);
+                    Debug.Assert(b.IsValid());
+                    if (valueCount > 0)
+                    {
+                        Debug.Assert(b.CompareTo(lastValue) > 0);
+                    }
+                    lastValue = BytesRef.DeepCopyOf(b);
+                    valueCount++;
+                }
+
+                int docCount = 0;
+                long ordCount = 0;
+                Int64BitSet seenOrds = new Int64BitSet(valueCount);
+                IEnumerator<long?> ordIterator = ords.GetEnumerator();
+                foreach (long? v in docToOrdCount)
+                {
+                    Debug.Assert(v != null);
+                    int count = (int)v.Value;
+                    Debug.Assert(count >= 0);
+                    docCount++;
+                    ordCount += count;
+
+                    long lastOrd = -1;
+                    for (int i = 0; i < count; i++)
+                    {
+                        ordIterator.MoveNext();
+                        long? o = ordIterator.Current;
+                        Debug.Assert(o != null);
+                        long ord = o.Value;
+                        Debug.Assert(ord >= 0 && ord < valueCount);
+                        Debug.Assert(ord > lastOrd, "ord=" + ord + ",lastOrd=" + lastOrd);
+                        seenOrds.Set(ord);
+                        lastOrd = ord;
+                    }
+                }
+                Debug.Assert(ordIterator.MoveNext() == false);
+
+                Debug.Assert(docCount == MaxDoc);
+                Debug.Assert(seenOrds.Cardinality() == valueCount);
+                CheckIterator(values.GetEnumerator(), valueCount, false);
+                CheckIterator(docToOrdCount.GetEnumerator(), MaxDoc, false);
+                CheckIterator(ords.GetEnumerator(), ordCount, false);
+                @in.AddSortedSetField(field, values, docToOrdCount, ords);
+            }
+
+            protected override void Dispose(bool disposing)
+            {
+                if (disposing)
+                    @in.Dispose();
+            }
+        }
+
+        internal class AssertingNormsConsumer : DocValuesConsumer
+        {
+            internal readonly DocValuesConsumer @in;
+            internal readonly int MaxDoc;
+
+            internal AssertingNormsConsumer(DocValuesConsumer @in, int maxDoc)
+            {
+                this.@in = @in;
+                this.MaxDoc = maxDoc;
+            }
+
+            public override void AddNumericField(FieldInfo field, IEnumerable<long?> values)
+            {
+                int count = 0;
+                foreach (long? v in values)
+                {
+                    Debug.Assert(v != null);
+                    count++;
+                }
+                Debug.Assert(count == MaxDoc);
+                CheckIterator(values.GetEnumerator(), MaxDoc, false);
+                @in.AddNumericField(field, values);
+            }
+
+            protected override void Dispose(bool disposing)
+            {
+                if (disposing)
+                    @in.Dispose();
+            }
+
+            public override void AddBinaryField(FieldInfo field, IEnumerable<BytesRef> values)
+            {
+                throw new InvalidOperationException();
+            }
+
+            public override void AddSortedField(FieldInfo field, IEnumerable<BytesRef> values, IEnumerable<long?> docToOrd)
+            {
+                throw new InvalidOperationException();
+            }
+
+            public override void AddSortedSetField(FieldInfo field, IEnumerable<BytesRef> values, IEnumerable<long?> docToOrdCount, IEnumerable<long?> ords)
+            {
+                throw new InvalidOperationException();
+            }
+        }
+
+        private static void CheckIterator<T>(IEnumerator<T> iterator, long expectedSize, bool allowNull)
+        {
+            for (long i = 0; i < expectedSize; i++)
+            {
+                bool hasNext = iterator.MoveNext();
+                Debug.Assert(hasNext);
+                T v = iterator.Current;
+                Debug.Assert(allowNull || v != null);
+                try
+                {
+                    iterator.Reset();
+                    throw new InvalidOperationException("broken iterator (supports remove): " + iterator);
+                }
+                catch (System.NotSupportedException)
+                {
+                    // ok
+                }
+            }
+            Debug.Assert(!iterator.MoveNext());
+            /*try
+            {
+              //iterator.next();
+              throw new InvalidOperationException("broken iterator (allows next() when hasNext==false) " + iterator);
+            }
+            catch (Exception)
+            {
+              // ok
+            }*/
+        }
+
+        internal class AssertingDocValuesProducer : DocValuesProducer
+        {
+            internal readonly DocValuesProducer @in;
+            internal readonly int MaxDoc;
+
+            internal AssertingDocValuesProducer(DocValuesProducer @in, int maxDoc)
+            {
+                this.@in = @in;
+                this.MaxDoc = maxDoc;
+            }
+
+            public override NumericDocValues GetNumeric(FieldInfo field)
+            {
+                Debug.Assert(field.DocValuesType == DocValuesType.NUMERIC || field.NormType == DocValuesType.NUMERIC);
+                NumericDocValues values = @in.GetNumeric(field);
+                Debug.Assert(values != null);
+                return new AssertingAtomicReader.AssertingNumericDocValues(values, MaxDoc);
+            }
+
+            public override BinaryDocValues GetBinary(FieldInfo field)
+            {
+                Debug.Assert(field.DocValuesType == DocValuesType.BINARY);
+                BinaryDocValues values = @in.GetBinary(field);
+                Debug.Assert(values != null);
+                return new AssertingAtomicReader.AssertingBinaryDocValues(values, MaxDoc);
+            }
+
+            public override SortedDocValues GetSorted(FieldInfo field)
+            {
+                Debug.Assert(field.DocValuesType == DocValuesType.SORTED);
+                SortedDocValues values = @in.GetSorted(field);
+                Debug.Assert(values != null);
+                return new AssertingAtomicReader.AssertingSortedDocValues(values, MaxDoc);
+            }
+
+            public override SortedSetDocValues GetSortedSet(FieldInfo field)
+            {
+                Debug.Assert(field.DocValuesType == DocValuesType.SORTED_SET);
+                SortedSetDocValues values = @in.GetSortedSet(field);
+                Debug.Assert(values != null);
+                return new AssertingAtomicReader.AssertingSortedSetDocValues(values, MaxDoc);
+            }
+
+            public override IBits GetDocsWithField(FieldInfo field)
+            {
+                Debug.Assert(field.DocValuesType != null);
+                IBits bits = @in.GetDocsWithField(field);
+                Debug.Assert(bits != null);
+                Debug.Assert(bits.Length == MaxDoc);
+                return new AssertingAtomicReader.AssertingBits(bits);
+            }
+
+            protected override void Dispose(bool disposing)
+            {
+                if (disposing)
+                    @in.Dispose();
+            }
+
+            public override long RamBytesUsed()
+            {
+                return @in.RamBytesUsed();
+            }
+
+            public override void CheckIntegrity()
+            {
+                @in.CheckIntegrity();
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/77e95ccc/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingNormsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingNormsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingNormsFormat.cs
new file mode 100644
index 0000000..4bdc7a3
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingNormsFormat.cs
@@ -0,0 +1,51 @@
+using System.Diagnostics;
+
+namespace Lucene.Net.Codecs.Asserting
+{
+    using AssertingDocValuesProducer = Lucene.Net.Codecs.Asserting.AssertingDocValuesFormat.AssertingDocValuesProducer;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using AssertingNormsConsumer = Lucene.Net.Codecs.Asserting.AssertingDocValuesFormat.AssertingNormsConsumer;
+    using Lucene42NormsFormat = Lucene.Net.Codecs.Lucene42.Lucene42NormsFormat;
+    using SegmentReadState = Lucene.Net.Index.SegmentReadState;
+    using SegmentWriteState = Lucene.Net.Index.SegmentWriteState;
+
+    /// <summary>
+    /// Just like <seealso cref="Lucene42NormsFormat"/> but with additional asserts.
+    /// </summary>
+    public class AssertingNormsFormat : NormsFormat
+    {
+        private readonly NormsFormat @in = new Lucene42NormsFormat();
+
+        public override DocValuesConsumer NormsConsumer(SegmentWriteState state)
+        {
+            DocValuesConsumer consumer = @in.NormsConsumer(state);
+            Debug.Assert(consumer != null);
+            return new AssertingNormsConsumer(consumer, state.SegmentInfo.DocCount);
+        }
+
+        public override DocValuesProducer NormsProducer(SegmentReadState state)
+        {
+            Debug.Assert(state.FieldInfos.HasNorms);
+            DocValuesProducer producer = @in.NormsProducer(state);
+            Debug.Assert(producer != null);
+            return new AssertingDocValuesProducer(producer, state.SegmentInfo.DocCount);
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/77e95ccc/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingPostingsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingPostingsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingPostingsFormat.cs
new file mode 100644
index 0000000..c039ceb
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingPostingsFormat.cs
@@ -0,0 +1,318 @@
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+
+namespace Lucene.Net.Codecs.Asserting
+{
+    using AssertingAtomicReader = Lucene.Net.Index.AssertingAtomicReader;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using FieldInfo = Lucene.Net.Index.FieldInfo;
+    using IndexOptions = Lucene.Net.Index.IndexOptions;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Lucene41PostingsFormat = Lucene.Net.Codecs.Lucene41.Lucene41PostingsFormat;
+    using OpenBitSet = Lucene.Net.Util.OpenBitSet;
+    using SegmentReadState = Lucene.Net.Index.SegmentReadState;
+    using SegmentWriteState = Lucene.Net.Index.SegmentWriteState;
+    using Terms = Lucene.Net.Index.Terms;
+
+    /// <summary>
+    /// Just like <seealso cref="Lucene41PostingsFormat"/> but with additional asserts.
+    /// </summary>
+    [PostingsFormatName("Asserting")] // LUCENENET specific - using PostingsFormatName attribute to ensure the default name passed from subclasses is the same as this class name
+    public sealed class AssertingPostingsFormat : PostingsFormat
+    {
+        private readonly PostingsFormat @in = new Lucene41PostingsFormat();
+
+        public AssertingPostingsFormat()
+            : base()
+        {
+        }
+
+        public override FieldsConsumer FieldsConsumer(SegmentWriteState state)
+        {
+            return new AssertingFieldsConsumer(@in.FieldsConsumer(state));
+        }
+
+        public override FieldsProducer FieldsProducer(SegmentReadState state)
+        {
+            return new AssertingFieldsProducer(@in.FieldsProducer(state));
+        }
+
+        internal class AssertingFieldsProducer : FieldsProducer
+        {
+            internal readonly FieldsProducer @in;
+
+            internal AssertingFieldsProducer(FieldsProducer @in)
+            {
+                this.@in = @in;
+            }
+
+            public override void Dispose()
+            {
+                Dispose(true);
+            }
+
+            protected void Dispose(bool disposing)
+            {
+                if (disposing)
+                    @in.Dispose();
+            }
+
+            public override IEnumerator<string> GetEnumerator()
+            {
+                IEnumerator<string> iterator = @in.GetEnumerator();
+                Debug.Assert(iterator != null);
+                return iterator;
+            }
+
+            public override Terms GetTerms(string field)
+            {
+                Terms terms = @in.GetTerms(field);
+                return terms == null ? null : new AssertingAtomicReader.AssertingTerms(terms);
+            }
+
+            public override int Count
+            {
+                get { return @in.Count; }
+            }
+
+            [Obsolete("iterate fields and add their Count instead.")]
+            public override long UniqueTermCount
+            {
+                get
+                {
+                    return @in.UniqueTermCount;
+                }
+            }
+
+            public override long RamBytesUsed()
+            {
+                return @in.RamBytesUsed();
+            }
+
+            public override void CheckIntegrity()
+            {
+                @in.CheckIntegrity();
+            }
+        }
+
+        internal class AssertingFieldsConsumer : FieldsConsumer
+        {
+            internal readonly FieldsConsumer @in;
+
+            internal AssertingFieldsConsumer(FieldsConsumer @in)
+            {
+                this.@in = @in;
+            }
+
+            public override TermsConsumer AddField(FieldInfo field)
+            {
+                TermsConsumer consumer = @in.AddField(field);
+                Debug.Assert(consumer != null);
+                return new AssertingTermsConsumer(consumer, field);
+            }
+
+            public override void Dispose()
+            {
+                Dispose(true);
+            }
+
+            protected void Dispose(bool disposing)
+            {
+                if (disposing)
+                    @in.Dispose();
+            }
+        }
+
+        internal enum TermsConsumerState
+        {
+            INITIAL,
+            START,
+            FINISHED
+        }
+
+        internal class AssertingTermsConsumer : TermsConsumer
+        {
+            internal readonly TermsConsumer @in;
+            private readonly FieldInfo fieldInfo;
+            internal BytesRef LastTerm = null;
+            internal TermsConsumerState State = TermsConsumerState.INITIAL;
+            internal AssertingPostingsConsumer LastPostingsConsumer = null;
+            internal long SumTotalTermFreq = 0;
+            internal long SumDocFreq = 0;
+            internal OpenBitSet VisitedDocs = new OpenBitSet();
+
+            internal AssertingTermsConsumer(TermsConsumer @in, FieldInfo fieldInfo)
+            {
+                this.@in = @in;
+                this.fieldInfo = fieldInfo;
+            }
+
+            public override PostingsConsumer StartTerm(BytesRef text)
+            {
+                Debug.Assert(State == TermsConsumerState.INITIAL || State == TermsConsumerState.START && LastPostingsConsumer.DocFreq == 0);
+                State = TermsConsumerState.START;
+                Debug.Assert(LastTerm == null || @in.Comparer.Compare(text, LastTerm) > 0);
+                LastTerm = BytesRef.DeepCopyOf(text);
+                return LastPostingsConsumer = new AssertingPostingsConsumer(@in.StartTerm(text), fieldInfo, VisitedDocs);
+            }
+
+            public override void FinishTerm(BytesRef text, TermStats stats)
+            {
+                Debug.Assert(State == TermsConsumerState.START);
+                State = TermsConsumerState.INITIAL;
+                Debug.Assert(text.Equals(LastTerm));
+                Debug.Assert(stats.DocFreq > 0); // otherwise, this method should not be called.
+                Debug.Assert(stats.DocFreq == LastPostingsConsumer.DocFreq);
+                SumDocFreq += stats.DocFreq;
+                if (fieldInfo.IndexOptions == IndexOptions.DOCS_ONLY)
+                {
+                    Debug.Assert(stats.TotalTermFreq == -1);
+                }
+                else
+                {
+                    Debug.Assert(stats.TotalTermFreq == LastPostingsConsumer.TotalTermFreq);
+                    SumTotalTermFreq += stats.TotalTermFreq;
+                }
+                @in.FinishTerm(text, stats);
+            }
+
+            public override void Finish(long sumTotalTermFreq, long sumDocFreq, int docCount)
+            {
+                Debug.Assert(State == TermsConsumerState.INITIAL || State == TermsConsumerState.START && LastPostingsConsumer.DocFreq == 0);
+                State = TermsConsumerState.FINISHED;
+                Debug.Assert(docCount >= 0);
+                Debug.Assert(docCount == VisitedDocs.Cardinality());
+                Debug.Assert(sumDocFreq >= docCount);
+                Debug.Assert(sumDocFreq == this.SumDocFreq);
+                if (fieldInfo.IndexOptions == IndexOptions.DOCS_ONLY)
+                {
+                    Debug.Assert(sumTotalTermFreq == -1);
+                }
+                else
+                {
+                    Debug.Assert(sumTotalTermFreq >= sumDocFreq);
+                    Debug.Assert(sumTotalTermFreq == this.SumTotalTermFreq);
+                }
+                @in.Finish(sumTotalTermFreq, sumDocFreq, docCount);
+            }
+
+            public override IComparer<BytesRef> Comparer
+            {
+                get
+                {
+                    return @in.Comparer;
+                }
+            }
+        }
+
+        internal enum PostingsConsumerState
+        {
+            INITIAL,
+            START
+        }
+
+        internal class AssertingPostingsConsumer : PostingsConsumer
+        {
+            internal readonly PostingsConsumer @in;
+            private readonly FieldInfo fieldInfo;
+            internal readonly OpenBitSet VisitedDocs;
+            internal PostingsConsumerState State = PostingsConsumerState.INITIAL;
+            internal int Freq;
+            internal int PositionCount;
+            internal int LastPosition = 0;
+            internal int LastStartOffset = 0;
+            internal int DocFreq = 0;
+            internal long TotalTermFreq = 0;
+
+            internal AssertingPostingsConsumer(PostingsConsumer @in, FieldInfo fieldInfo, OpenBitSet visitedDocs)
+            {
+                this.@in = @in;
+                this.fieldInfo = fieldInfo;
+                this.VisitedDocs = visitedDocs;
+            }
+
+            public override void StartDoc(int docID, int freq)
+            {
+                Debug.Assert(State == PostingsConsumerState.INITIAL);
+                State = PostingsConsumerState.START;
+                Debug.Assert(docID >= 0);
+                if (fieldInfo.IndexOptions == IndexOptions.DOCS_ONLY)
+                {
+                    Debug.Assert(freq == -1);
+                    this.Freq = 0; // we don't expect any positions here
+                }
+                else
+                {
+                    Debug.Assert(freq > 0);
+                    this.Freq = freq;
+                    TotalTermFreq += freq;
+                }
+                this.PositionCount = 0;
+                this.LastPosition = 0;
+                this.LastStartOffset = 0;
+                DocFreq++;
+                VisitedDocs.Set(docID);
+                @in.StartDoc(docID, freq);
+            }
+
+            public override void AddPosition(int position, BytesRef payload, int startOffset, int endOffset)
+            {
+                Debug.Assert(State == PostingsConsumerState.START);
+                Debug.Assert(PositionCount < Freq);
+                PositionCount++;
+                Debug.Assert(position >= LastPosition || position == -1); // we still allow -1 from old 3.x indexes
+                LastPosition = position;
+                if (fieldInfo.IndexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS)
+                {
+                    Debug.Assert(startOffset >= 0);
+                    Debug.Assert(startOffset >= LastStartOffset);
+                    LastStartOffset = startOffset;
+                    Debug.Assert(endOffset >= startOffset);
+                }
+                else
+                {
+                    Debug.Assert(startOffset == -1);
+                    Debug.Assert(endOffset == -1);
+                }
+                if (payload != null)
+                {
+                    Debug.Assert(fieldInfo.HasPayloads);
+                }
+                @in.AddPosition(position, payload, startOffset, endOffset);
+            }
+
+            public override void FinishDoc()
+            {
+                Debug.Assert(State == PostingsConsumerState.START);
+                State = PostingsConsumerState.INITIAL;
+                if (fieldInfo.IndexOptions < IndexOptions.DOCS_AND_FREQS_AND_POSITIONS)
+                {
+                    Debug.Assert(PositionCount == 0); // we should not have fed any positions!
+                }
+                else
+                {
+                    Debug.Assert(PositionCount == Freq);
+                }
+                @in.FinishDoc();
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/77e95ccc/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingStoredFieldsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingStoredFieldsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingStoredFieldsFormat.cs
new file mode 100644
index 0000000..1ad05ef
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingStoredFieldsFormat.cs
@@ -0,0 +1,154 @@
+using System.Diagnostics;
+
+namespace Lucene.Net.Codecs.Asserting
+{
+    using Directory = Lucene.Net.Store.Directory;
+    using FieldInfo = Lucene.Net.Index.FieldInfo;
+    using FieldInfos = Lucene.Net.Index.FieldInfos;
+    using IIndexableField = Lucene.Net.Index.IIndexableField;
+    using IOContext = Lucene.Net.Store.IOContext;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Lucene41StoredFieldsFormat = Lucene.Net.Codecs.Lucene41.Lucene41StoredFieldsFormat;
+    using SegmentInfo = Lucene.Net.Index.SegmentInfo;
+    using StoredFieldVisitor = Lucene.Net.Index.StoredFieldVisitor;
+
+    /// <summary>
+    /// Just like <seealso cref="Lucene41StoredFieldsFormat"/> but with additional asserts.
+    /// </summary>
+    public class AssertingStoredFieldsFormat : StoredFieldsFormat
+    {
+        private readonly StoredFieldsFormat @in = new Lucene41StoredFieldsFormat();
+
+        public override StoredFieldsReader FieldsReader(Directory directory, SegmentInfo si, FieldInfos fn, IOContext context)
+        {
+            return new AssertingStoredFieldsReader(@in.FieldsReader(directory, si, fn, context), si.DocCount);
+        }
+
+        public override StoredFieldsWriter FieldsWriter(Directory directory, SegmentInfo si, IOContext context)
+        {
+            return new AssertingStoredFieldsWriter(@in.FieldsWriter(directory, si, context));
+        }
+
+        internal class AssertingStoredFieldsReader : StoredFieldsReader
+        {
+            internal readonly StoredFieldsReader @in;
+            internal readonly int MaxDoc;
+
+            internal AssertingStoredFieldsReader(StoredFieldsReader @in, int maxDoc)
+            {
+                this.@in = @in;
+                this.MaxDoc = maxDoc;
+            }
+
+            protected override void Dispose(bool disposing)
+            {
+                if (disposing)
+                    @in.Dispose();
+            }
+
+            public override void VisitDocument(int n, StoredFieldVisitor visitor)
+            {
+                Debug.Assert(n >= 0 && n < MaxDoc);
+                @in.VisitDocument(n, visitor);
+            }
+
+            public override object Clone()
+            {
+                return new AssertingStoredFieldsReader((StoredFieldsReader)@in.Clone(), MaxDoc);
+            }
+
+            public override long RamBytesUsed()
+            {
+                return @in.RamBytesUsed();
+            }
+
+            public override void CheckIntegrity()
+            {
+                @in.CheckIntegrity();
+            }
+        }
+
+        internal enum Status
+        {
+            UNDEFINED,
+            STARTED,
+            FINISHED
+        }
+
+        internal class AssertingStoredFieldsWriter : StoredFieldsWriter
+        {
+            internal readonly StoredFieldsWriter @in;
+            internal int NumWritten;
+            internal int FieldCount;
+            internal Status DocStatus;
+
+            internal AssertingStoredFieldsWriter(StoredFieldsWriter @in)
+            {
+                this.@in = @in;
+                this.DocStatus = Status.UNDEFINED;
+            }
+
+            public override void StartDocument(int numStoredFields)
+            {
+                Debug.Assert(DocStatus != Status.STARTED);
+                @in.StartDocument(numStoredFields);
+                Debug.Assert(FieldCount == 0);
+                FieldCount = numStoredFields;
+                NumWritten++;
+                DocStatus = Status.STARTED;
+            }
+
+            public override void FinishDocument()
+            {
+                Debug.Assert(DocStatus == Status.STARTED);
+                Debug.Assert(FieldCount == 0);
+                @in.FinishDocument();
+                DocStatus = Status.FINISHED;
+            }
+
+            public override void WriteField(FieldInfo info, IIndexableField field)
+            {
+                Debug.Assert(DocStatus == Status.STARTED);
+                @in.WriteField(info, field);
+                Debug.Assert(FieldCount > 0);
+                FieldCount--;
+            }
+
+            public override void Abort()
+            {
+                @in.Abort();
+            }
+
+            public override void Finish(FieldInfos fis, int numDocs)
+            {
+                Debug.Assert(DocStatus == (numDocs > 0 ? Status.FINISHED : Status.UNDEFINED));
+                @in.Finish(fis, numDocs);
+                Debug.Assert(FieldCount == 0);
+                Debug.Assert(numDocs == NumWritten);
+            }
+
+            protected override void Dispose(bool disposing)
+            {
+                if (disposing)
+                    @in.Dispose();
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/77e95ccc/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingTermVectorsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingTermVectorsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingTermVectorsFormat.cs
new file mode 100644
index 0000000..cb90f52
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Asserting/AssertingTermVectorsFormat.cs
@@ -0,0 +1,208 @@
+using System.Collections.Generic;
+using System.Diagnostics;
+
+namespace Lucene.Net.Codecs.Asserting
+{
+    using AssertingAtomicReader = Lucene.Net.Index.AssertingAtomicReader;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using FieldInfo = Lucene.Net.Index.FieldInfo;
+    using FieldInfos = Lucene.Net.Index.FieldInfos;
+    using Fields = Lucene.Net.Index.Fields;
+    using IOContext = Lucene.Net.Store.IOContext;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Lucene40TermVectorsFormat = Lucene.Net.Codecs.Lucene40.Lucene40TermVectorsFormat;
+    using SegmentInfo = Lucene.Net.Index.SegmentInfo;
+
+    /// <summary>
+    /// Just like <seealso cref="Lucene40TermVectorsFormat"/> but with additional asserts.
+    /// </summary>
+    public class AssertingTermVectorsFormat : TermVectorsFormat
+    {
+        private readonly TermVectorsFormat @in = new Lucene40TermVectorsFormat();
+
+        public override TermVectorsReader VectorsReader(Directory directory, SegmentInfo segmentInfo, FieldInfos fieldInfos, IOContext context)
+        {
+            return new AssertingTermVectorsReader(@in.VectorsReader(directory, segmentInfo, fieldInfos, context));
+        }
+
+        public override TermVectorsWriter VectorsWriter(Directory directory, SegmentInfo segmentInfo, IOContext context)
+        {
+            return new AssertingTermVectorsWriter(@in.VectorsWriter(directory, segmentInfo, context));
+        }
+
+        internal class AssertingTermVectorsReader : TermVectorsReader
+        {
+            internal readonly TermVectorsReader @in;
+
+            internal AssertingTermVectorsReader(TermVectorsReader @in)
+            {
+                this.@in = @in;
+            }
+
+            protected override void Dispose(bool disposing)
+            {
+                if (disposing)
+                    @in.Dispose();
+            }
+
+            public override Fields Get(int doc)
+            {
+                Fields fields = @in.Get(doc);
+                return fields == null ? null : new AssertingAtomicReader.AssertingFields(fields);
+            }
+
+            public override object Clone()
+            {
+                return new AssertingTermVectorsReader((TermVectorsReader)@in.Clone());
+            }
+
+            public override long RamBytesUsed()
+            {
+                return @in.RamBytesUsed();
+            }
+
+            public override void CheckIntegrity()
+            {
+                @in.CheckIntegrity();
+            }
+        }
+
+        internal enum Status
+        {
+            UNDEFINED,
+            STARTED,
+            FINISHED
+        }
+
+        internal class AssertingTermVectorsWriter : TermVectorsWriter
+        {
+            internal readonly TermVectorsWriter @in;
+            internal Status DocStatus, FieldStatus, TermStatus;
+            internal int DocCount, FieldCount, TermCount, PositionCount;
+            internal bool HasPositions;
+
+            internal AssertingTermVectorsWriter(TermVectorsWriter @in)
+            {
+                this.@in = @in;
+                DocStatus = Status.UNDEFINED;
+                FieldStatus = Status.UNDEFINED;
+                TermStatus = Status.UNDEFINED;
+                FieldCount = TermCount = PositionCount = 0;
+            }
+
+            public override void StartDocument(int numVectorFields)
+            {
+                Debug.Assert(FieldCount == 0);
+                Debug.Assert(DocStatus != Status.STARTED);
+                @in.StartDocument(numVectorFields);
+                DocStatus = Status.STARTED;
+                FieldCount = numVectorFields;
+                DocCount++;
+            }
+
+            public override void FinishDocument()
+            {
+                Debug.Assert(FieldCount == 0);
+                Debug.Assert(DocStatus == Status.STARTED);
+                @in.FinishDocument();
+                DocStatus = Status.FINISHED;
+            }
+
+            public override void StartField(FieldInfo info, int numTerms, bool positions, bool offsets, bool payloads)
+            {
+                Debug.Assert(TermCount == 0);
+                Debug.Assert(DocStatus == Status.STARTED);
+                Debug.Assert(FieldStatus != Status.STARTED);
+                @in.StartField(info, numTerms, positions, offsets, payloads);
+                FieldStatus = Status.STARTED;
+                TermCount = numTerms;
+                HasPositions = positions || offsets || payloads;
+            }
+
+            public override void FinishField()
+            {
+                Debug.Assert(TermCount == 0);
+                Debug.Assert(FieldStatus == Status.STARTED);
+                @in.FinishField();
+                FieldStatus = Status.FINISHED;
+                --FieldCount;
+            }
+
+            public override void StartTerm(BytesRef term, int freq)
+            {
+                Debug.Assert(DocStatus == Status.STARTED);
+                Debug.Assert(FieldStatus == Status.STARTED);
+                Debug.Assert(TermStatus != Status.STARTED);
+                @in.StartTerm(term, freq);
+                TermStatus = Status.STARTED;
+                PositionCount = HasPositions ? freq : 0;
+            }
+
+            public override void FinishTerm()
+            {
+                Debug.Assert(PositionCount == 0);
+                Debug.Assert(DocStatus == Status.STARTED);
+                Debug.Assert(FieldStatus == Status.STARTED);
+                Debug.Assert(TermStatus == Status.STARTED);
+                @in.FinishTerm();
+                TermStatus = Status.FINISHED;
+                --TermCount;
+            }
+
+            public override void AddPosition(int position, int startOffset, int endOffset, BytesRef payload)
+            {
+                Debug.Assert(DocStatus == Status.STARTED);
+                Debug.Assert(FieldStatus == Status.STARTED);
+                Debug.Assert(TermStatus == Status.STARTED);
+                @in.AddPosition(position, startOffset, endOffset, payload);
+                --PositionCount;
+            }
+
+            public override void Abort()
+            {
+                @in.Abort();
+            }
+
+            public override void Finish(FieldInfos fis, int numDocs)
+            {
+                Debug.Assert(DocCount == numDocs);
+                Debug.Assert(DocStatus == (numDocs > 0 ? Status.FINISHED : Status.UNDEFINED));
+                Debug.Assert(FieldStatus != Status.STARTED);
+                Debug.Assert(TermStatus != Status.STARTED);
+                @in.Finish(fis, numDocs);
+            }
+
+            public override IComparer<BytesRef> Comparer
+            {
+                get
+                {
+                    return @in.Comparer;
+                }
+            }
+
+            protected override void Dispose(bool disposing)
+            {
+                if (disposing)
+                    @in.Dispose();
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/77e95ccc/src/Lucene.Net.TestFramework/Codecs/asserting/AssertingCodec.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/asserting/AssertingCodec.cs b/src/Lucene.Net.TestFramework/Codecs/asserting/AssertingCodec.cs
deleted file mode 100644
index 3917c46..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/asserting/AssertingCodec.cs
+++ /dev/null
@@ -1,64 +0,0 @@
-namespace Lucene.Net.Codecs.asserting
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    using Lucene46Codec = Lucene.Net.Codecs.Lucene46.Lucene46Codec;
-
-    /// <summary>
-    /// Acts like <seealso cref="Lucene46Codec"/> but with additional asserts.
-    /// </summary>
-    [CodecName("Asserting")]
-    public sealed class AssertingCodec : FilterCodec
-    {
-        private readonly PostingsFormat Postings = new AssertingPostingsFormat();
-        private readonly TermVectorsFormat Vectors = new AssertingTermVectorsFormat();
-        private readonly StoredFieldsFormat StoredFields = new AssertingStoredFieldsFormat();
-        private readonly DocValuesFormat DocValues = new AssertingDocValuesFormat();
-        private readonly NormsFormat Norms = new AssertingNormsFormat();
-
-        public AssertingCodec()
-            : base(new Lucene46Codec())
-        {
-        }
-
-        public override PostingsFormat PostingsFormat
-        {
-            get { return Postings; }
-        }
-
-        public override TermVectorsFormat TermVectorsFormat
-        {
-            get { return Vectors; }
-        }
-
-        public override StoredFieldsFormat StoredFieldsFormat
-        {
-            get { return StoredFields; }
-        }
-
-        public override DocValuesFormat DocValuesFormat
-        {
-            get { return DocValues; }
-        }
-
-        public override NormsFormat NormsFormat
-        {
-            get { return Norms; }
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/77e95ccc/src/Lucene.Net.TestFramework/Codecs/asserting/AssertingDocValuesFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/asserting/AssertingDocValuesFormat.cs b/src/Lucene.Net.TestFramework/Codecs/asserting/AssertingDocValuesFormat.cs
deleted file mode 100644
index 7a46b5c..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/asserting/AssertingDocValuesFormat.cs
+++ /dev/null
@@ -1,346 +0,0 @@
-using System.Collections.Generic;
-using System.Diagnostics;
-
-namespace Lucene.Net.Codecs.asserting
-{
-    using System;
-    using AssertingAtomicReader = Lucene.Net.Index.AssertingAtomicReader;
-    using BinaryDocValues = Lucene.Net.Index.BinaryDocValues;
-    using IBits = Lucene.Net.Util.IBits;
-    using BytesRef = Lucene.Net.Util.BytesRef;
-    using DocValuesType = Lucene.Net.Index.DocValuesType;
-    using FieldInfo = Lucene.Net.Index.FieldInfo;
-    using FixedBitSet = Lucene.Net.Util.FixedBitSet;
-    using Int64BitSet = Lucene.Net.Util.Int64BitSet;
-
-    /*
-         * Licensed to the Apache Software Foundation (ASF) under one or more
-         * contributor license agreements.  See the NOTICE file distributed with
-         * this work for additional information regarding copyright ownership.
-         * The ASF licenses this file to You under the Apache License, Version 2.0
-         * (the "License"); you may not use this file except in compliance with
-         * the License.  You may obtain a copy of the License at
-         *
-         *     http://www.apache.org/licenses/LICENSE-2.0
-         *
-         * Unless required by applicable law or agreed to in writing, software
-         * distributed under the License is distributed on an "AS IS" BASIS,
-         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-         * See the License for the specific language governing permissions and
-         * limitations under the License.
-         */
-
-    using Lucene45DocValuesFormat = Lucene.Net.Codecs.Lucene45.Lucene45DocValuesFormat;
-    using NumericDocValues = Lucene.Net.Index.NumericDocValues;
-    using SegmentReadState = Lucene.Net.Index.SegmentReadState;
-    using SegmentWriteState = Lucene.Net.Index.SegmentWriteState;
-    using SortedDocValues = Lucene.Net.Index.SortedDocValues;
-    using SortedSetDocValues = Lucene.Net.Index.SortedSetDocValues;
-
-    /// <summary>
-    /// Just like <seealso cref="Lucene45DocValuesFormat"/> but with additional asserts.
-    /// </summary>
-    [DocValuesFormatName("Asserting")]
-    public class AssertingDocValuesFormat : DocValuesFormat
-    {
-        private readonly DocValuesFormat @in = new Lucene45DocValuesFormat();
-
-        public AssertingDocValuesFormat()
-            : base()
-        {
-        }
-
-        public override DocValuesConsumer FieldsConsumer(SegmentWriteState state)
-        {
-            DocValuesConsumer consumer = @in.FieldsConsumer(state);
-            Debug.Assert(consumer != null);
-            return new AssertingDocValuesConsumer(consumer, state.SegmentInfo.DocCount);
-        }
-
-        public override DocValuesProducer FieldsProducer(SegmentReadState state)
-        {
-            Debug.Assert(state.FieldInfos.HasDocValues);
-            DocValuesProducer producer = @in.FieldsProducer(state);
-            Debug.Assert(producer != null);
-            return new AssertingDocValuesProducer(producer, state.SegmentInfo.DocCount);
-        }
-
-        internal class AssertingDocValuesConsumer : DocValuesConsumer
-        {
-            internal readonly DocValuesConsumer @in;
-            internal readonly int MaxDoc;
-
-            internal AssertingDocValuesConsumer(DocValuesConsumer @in, int maxDoc)
-            {
-                this.@in = @in;
-                this.MaxDoc = maxDoc;
-            }
-
-            public override void AddNumericField(FieldInfo field, IEnumerable<long?> values)
-            {
-                int count = 0;
-                foreach (var v in values)
-                {
-                    count++;
-                }
-                Debug.Assert(count == MaxDoc);
-                CheckIterator(values.GetEnumerator(), MaxDoc, true);
-                @in.AddNumericField(field, values);
-            }
-
-            public override void AddBinaryField(FieldInfo field, IEnumerable<BytesRef> values)
-            {
-                int count = 0;
-                foreach (BytesRef b in values)
-                {
-                    Debug.Assert(b == null || b.IsValid());
-                    count++;
-                }
-                Debug.Assert(count == MaxDoc);
-                CheckIterator(values.GetEnumerator(), MaxDoc, true);
-                @in.AddBinaryField(field, values);
-            }
-
-            public override void AddSortedField(FieldInfo field, IEnumerable<BytesRef> values, IEnumerable<long?> docToOrd)
-            {
-                int valueCount = 0;
-                BytesRef lastValue = null;
-                foreach (BytesRef b in values)
-                {
-                    Debug.Assert(b != null);
-                    Debug.Assert(b.IsValid());
-                    if (valueCount > 0)
-                    {
-                        Debug.Assert(b.CompareTo(lastValue) > 0);
-                    }
-                    lastValue = BytesRef.DeepCopyOf(b);
-                    valueCount++;
-                }
-                Debug.Assert(valueCount <= MaxDoc);
-
-                FixedBitSet seenOrds = new FixedBitSet(valueCount);
-
-                int count = 0;
-                foreach (long? v in docToOrd)
-                {
-                    Debug.Assert(v != null);
-                    int ord = (int)v.Value;
-                    Debug.Assert(ord >= -1 && ord < valueCount);
-                    if (ord >= 0)
-                    {
-                        seenOrds.Set(ord);
-                    }
-                    count++;
-                }
-
-                Debug.Assert(count == MaxDoc);
-                Debug.Assert(seenOrds.Cardinality() == valueCount);
-                CheckIterator(values.GetEnumerator(), valueCount, false);
-                CheckIterator(docToOrd.GetEnumerator(), MaxDoc, false);
-                @in.AddSortedField(field, values, docToOrd);
-            }
-
-            public override void AddSortedSetField(FieldInfo field, IEnumerable<BytesRef> values, IEnumerable<long?> docToOrdCount, IEnumerable<long?> ords)
-            {
-                long valueCount = 0;
-                BytesRef lastValue = null;
-                foreach (BytesRef b in values)
-                {
-                    Debug.Assert(b != null);
-                    Debug.Assert(b.IsValid());
-                    if (valueCount > 0)
-                    {
-                        Debug.Assert(b.CompareTo(lastValue) > 0);
-                    }
-                    lastValue = BytesRef.DeepCopyOf(b);
-                    valueCount++;
-                }
-
-                int docCount = 0;
-                long ordCount = 0;
-                Int64BitSet seenOrds = new Int64BitSet(valueCount);
-                IEnumerator<long?> ordIterator = ords.GetEnumerator();
-                foreach (long? v in docToOrdCount)
-                {
-                    Debug.Assert(v != null);
-                    int count = (int)v.Value;
-                    Debug.Assert(count >= 0);
-                    docCount++;
-                    ordCount += count;
-
-                    long lastOrd = -1;
-                    for (int i = 0; i < count; i++)
-                    {
-                        ordIterator.MoveNext();
-                        long? o = ordIterator.Current;
-                        Debug.Assert(o != null);
-                        long ord = o.Value;
-                        Debug.Assert(ord >= 0 && ord < valueCount);
-                        Debug.Assert(ord > lastOrd, "ord=" + ord + ",lastOrd=" + lastOrd);
-                        seenOrds.Set(ord);
-                        lastOrd = ord;
-                    }
-                }
-                Debug.Assert(ordIterator.MoveNext() == false);
-
-                Debug.Assert(docCount == MaxDoc);
-                Debug.Assert(seenOrds.Cardinality() == valueCount);
-                CheckIterator(values.GetEnumerator(), valueCount, false);
-                CheckIterator(docToOrdCount.GetEnumerator(), MaxDoc, false);
-                CheckIterator(ords.GetEnumerator(), ordCount, false);
-                @in.AddSortedSetField(field, values, docToOrdCount, ords);
-            }
-
-            protected override void Dispose(bool disposing)
-            {
-                if (disposing)
-                    @in.Dispose();
-            }
-        }
-
-        internal class AssertingNormsConsumer : DocValuesConsumer
-        {
-            internal readonly DocValuesConsumer @in;
-            internal readonly int MaxDoc;
-
-            internal AssertingNormsConsumer(DocValuesConsumer @in, int maxDoc)
-            {
-                this.@in = @in;
-                this.MaxDoc = maxDoc;
-            }
-
-            public override void AddNumericField(FieldInfo field, IEnumerable<long?> values)
-            {
-                int count = 0;
-                foreach (long? v in values)
-                {
-                    Debug.Assert(v != null);
-                    count++;
-                }
-                Debug.Assert(count == MaxDoc);
-                CheckIterator(values.GetEnumerator(), MaxDoc, false);
-                @in.AddNumericField(field, values);
-            }
-
-            protected override void Dispose(bool disposing)
-            {
-                if (disposing)
-                    @in.Dispose();
-            }
-
-            public override void AddBinaryField(FieldInfo field, IEnumerable<BytesRef> values)
-            {
-                throw new InvalidOperationException();
-            }
-
-            public override void AddSortedField(FieldInfo field, IEnumerable<BytesRef> values, IEnumerable<long?> docToOrd)
-            {
-                throw new InvalidOperationException();
-            }
-
-            public override void AddSortedSetField(FieldInfo field, IEnumerable<BytesRef> values, IEnumerable<long?> docToOrdCount, IEnumerable<long?> ords)
-            {
-                throw new InvalidOperationException();
-            }
-        }
-
-        private static void CheckIterator<T>(IEnumerator<T> iterator, long expectedSize, bool allowNull)
-        {
-            for (long i = 0; i < expectedSize; i++)
-            {
-                bool hasNext = iterator.MoveNext();
-                Debug.Assert(hasNext);
-                T v = iterator.Current;
-                Debug.Assert(allowNull || v != null);
-                try
-                {
-                    iterator.Reset();
-                    throw new InvalidOperationException("broken iterator (supports remove): " + iterator);
-                }
-                catch (System.NotSupportedException)
-                {
-                    // ok
-                }
-            }
-            Debug.Assert(!iterator.MoveNext());
-            /*try
-            {
-              //iterator.next();
-              throw new InvalidOperationException("broken iterator (allows next() when hasNext==false) " + iterator);
-            }
-            catch (Exception)
-            {
-              // ok
-            }*/
-        }
-
-        internal class AssertingDocValuesProducer : DocValuesProducer
-        {
-            internal readonly DocValuesProducer @in;
-            internal readonly int MaxDoc;
-
-            internal AssertingDocValuesProducer(DocValuesProducer @in, int maxDoc)
-            {
-                this.@in = @in;
-                this.MaxDoc = maxDoc;
-            }
-
-            public override NumericDocValues GetNumeric(FieldInfo field)
-            {
-                Debug.Assert(field.DocValuesType == DocValuesType.NUMERIC || field.NormType == DocValuesType.NUMERIC);
-                NumericDocValues values = @in.GetNumeric(field);
-                Debug.Assert(values != null);
-                return new AssertingAtomicReader.AssertingNumericDocValues(values, MaxDoc);
-            }
-
-            public override BinaryDocValues GetBinary(FieldInfo field)
-            {
-                Debug.Assert(field.DocValuesType == DocValuesType.BINARY);
-                BinaryDocValues values = @in.GetBinary(field);
-                Debug.Assert(values != null);
-                return new AssertingAtomicReader.AssertingBinaryDocValues(values, MaxDoc);
-            }
-
-            public override SortedDocValues GetSorted(FieldInfo field)
-            {
-                Debug.Assert(field.DocValuesType == DocValuesType.SORTED);
-                SortedDocValues values = @in.GetSorted(field);
-                Debug.Assert(values != null);
-                return new AssertingAtomicReader.AssertingSortedDocValues(values, MaxDoc);
-            }
-
-            public override SortedSetDocValues GetSortedSet(FieldInfo field)
-            {
-                Debug.Assert(field.DocValuesType == DocValuesType.SORTED_SET);
-                SortedSetDocValues values = @in.GetSortedSet(field);
-                Debug.Assert(values != null);
-                return new AssertingAtomicReader.AssertingSortedSetDocValues(values, MaxDoc);
-            }
-
-            public override IBits GetDocsWithField(FieldInfo field)
-            {
-                Debug.Assert(field.DocValuesType != null);
-                IBits bits = @in.GetDocsWithField(field);
-                Debug.Assert(bits != null);
-                Debug.Assert(bits.Length == MaxDoc);
-                return new AssertingAtomicReader.AssertingBits(bits);
-            }
-
-            protected override void Dispose(bool disposing)
-            {
-                if (disposing)
-                    @in.Dispose();
-            }
-
-            public override long RamBytesUsed()
-            {
-                return @in.RamBytesUsed();
-            }
-
-            public override void CheckIntegrity()
-            {
-                @in.CheckIntegrity();
-            }
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/77e95ccc/src/Lucene.Net.TestFramework/Codecs/asserting/AssertingNormsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/asserting/AssertingNormsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/asserting/AssertingNormsFormat.cs
deleted file mode 100644
index 0137dce..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/asserting/AssertingNormsFormat.cs
+++ /dev/null
@@ -1,51 +0,0 @@
-using System.Diagnostics;
-
-namespace Lucene.Net.Codecs.asserting
-{
-    using AssertingDocValuesProducer = Lucene.Net.Codecs.asserting.AssertingDocValuesFormat.AssertingDocValuesProducer;
-
-    /*
-         * Licensed to the Apache Software Foundation (ASF) under one or more
-         * contributor license agreements.  See the NOTICE file distributed with
-         * this work for additional information regarding copyright ownership.
-         * The ASF licenses this file to You under the Apache License, Version 2.0
-         * (the "License"); you may not use this file except in compliance with
-         * the License.  You may obtain a copy of the License at
-         *
-         *     http://www.apache.org/licenses/LICENSE-2.0
-         *
-         * Unless required by applicable law or agreed to in writing, software
-         * distributed under the License is distributed on an "AS IS" BASIS,
-         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-         * See the License for the specific language governing permissions and
-         * limitations under the License.
-         */
-
-    using AssertingNormsConsumer = Lucene.Net.Codecs.asserting.AssertingDocValuesFormat.AssertingNormsConsumer;
-    using Lucene42NormsFormat = Lucene.Net.Codecs.Lucene42.Lucene42NormsFormat;
-    using SegmentReadState = Lucene.Net.Index.SegmentReadState;
-    using SegmentWriteState = Lucene.Net.Index.SegmentWriteState;
-
-    /// <summary>
-    /// Just like <seealso cref="Lucene42NormsFormat"/> but with additional asserts.
-    /// </summary>
-    public class AssertingNormsFormat : NormsFormat
-    {
-        private readonly NormsFormat @in = new Lucene42NormsFormat();
-
-        public override DocValuesConsumer NormsConsumer(SegmentWriteState state)
-        {
-            DocValuesConsumer consumer = @in.NormsConsumer(state);
-            Debug.Assert(consumer != null);
-            return new AssertingNormsConsumer(consumer, state.SegmentInfo.DocCount);
-        }
-
-        public override DocValuesProducer NormsProducer(SegmentReadState state)
-        {
-            Debug.Assert(state.FieldInfos.HasNorms);
-            DocValuesProducer producer = @in.NormsProducer(state);
-            Debug.Assert(producer != null);
-            return new AssertingDocValuesProducer(producer, state.SegmentInfo.DocCount);
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/77e95ccc/src/Lucene.Net.TestFramework/Codecs/asserting/AssertingPostingsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/asserting/AssertingPostingsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/asserting/AssertingPostingsFormat.cs
deleted file mode 100644
index ef582b7..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/asserting/AssertingPostingsFormat.cs
+++ /dev/null
@@ -1,318 +0,0 @@
-using System;
-using System.Collections.Generic;
-using System.Diagnostics;
-
-namespace Lucene.Net.Codecs.asserting
-{
-    using AssertingAtomicReader = Lucene.Net.Index.AssertingAtomicReader;
-    using BytesRef = Lucene.Net.Util.BytesRef;
-    using FieldInfo = Lucene.Net.Index.FieldInfo;
-    using IndexOptions = Lucene.Net.Index.IndexOptions;
-
-    /*
-         * Licensed to the Apache Software Foundation (ASF) under one or more
-         * contributor license agreements.  See the NOTICE file distributed with
-         * this work for additional information regarding copyright ownership.
-         * The ASF licenses this file to You under the Apache License, Version 2.0
-         * (the "License"); you may not use this file except in compliance with
-         * the License.  You may obtain a copy of the License at
-         *
-         *     http://www.apache.org/licenses/LICENSE-2.0
-         *
-         * Unless required by applicable law or agreed to in writing, software
-         * distributed under the License is distributed on an "AS IS" BASIS,
-         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-         * See the License for the specific language governing permissions and
-         * limitations under the License.
-         */
-
-    using Lucene41PostingsFormat = Lucene.Net.Codecs.Lucene41.Lucene41PostingsFormat;
-    using OpenBitSet = Lucene.Net.Util.OpenBitSet;
-    using SegmentReadState = Lucene.Net.Index.SegmentReadState;
-    using SegmentWriteState = Lucene.Net.Index.SegmentWriteState;
-    using Terms = Lucene.Net.Index.Terms;
-
-    /// <summary>
-    /// Just like <seealso cref="Lucene41PostingsFormat"/> but with additional asserts.
-    /// </summary>
-    [PostingsFormatName("Asserting")] // LUCENENET specific - using PostingsFormatName attribute to ensure the default name passed from subclasses is the same as this class name
-    public sealed class AssertingPostingsFormat : PostingsFormat
-    {
-        private readonly PostingsFormat @in = new Lucene41PostingsFormat();
-
-        public AssertingPostingsFormat()
-            : base()
-        {
-        }
-
-        public override FieldsConsumer FieldsConsumer(SegmentWriteState state)
-        {
-            return new AssertingFieldsConsumer(@in.FieldsConsumer(state));
-        }
-
-        public override FieldsProducer FieldsProducer(SegmentReadState state)
-        {
-            return new AssertingFieldsProducer(@in.FieldsProducer(state));
-        }
-
-        internal class AssertingFieldsProducer : FieldsProducer
-        {
-            internal readonly FieldsProducer @in;
-
-            internal AssertingFieldsProducer(FieldsProducer @in)
-            {
-                this.@in = @in;
-            }
-
-            public override void Dispose()
-            {
-                Dispose(true);
-            }
-
-            protected void Dispose(bool disposing)
-            {
-                if (disposing)
-                    @in.Dispose();
-            }
-
-            public override IEnumerator<string> GetEnumerator()
-            {
-                IEnumerator<string> iterator = @in.GetEnumerator();
-                Debug.Assert(iterator != null);
-                return iterator;
-            }
-
-            public override Terms GetTerms(string field)
-            {
-                Terms terms = @in.GetTerms(field);
-                return terms == null ? null : new AssertingAtomicReader.AssertingTerms(terms);
-            }
-
-            public override int Count
-            {
-                get { return @in.Count; }
-            }
-
-            [Obsolete("iterate fields and add their Count instead.")]
-            public override long UniqueTermCount
-            {
-                get
-                {
-                    return @in.UniqueTermCount;
-                }
-            }
-
-            public override long RamBytesUsed()
-            {
-                return @in.RamBytesUsed();
-            }
-
-            public override void CheckIntegrity()
-            {
-                @in.CheckIntegrity();
-            }
-        }
-
-        internal class AssertingFieldsConsumer : FieldsConsumer
-        {
-            internal readonly FieldsConsumer @in;
-
-            internal AssertingFieldsConsumer(FieldsConsumer @in)
-            {
-                this.@in = @in;
-            }
-
-            public override TermsConsumer AddField(FieldInfo field)
-            {
-                TermsConsumer consumer = @in.AddField(field);
-                Debug.Assert(consumer != null);
-                return new AssertingTermsConsumer(consumer, field);
-            }
-
-            public override void Dispose()
-            {
-                Dispose(true);
-            }
-
-            protected void Dispose(bool disposing)
-            {
-                if (disposing)
-                    @in.Dispose();
-            }
-        }
-
-        internal enum TermsConsumerState
-        {
-            INITIAL,
-            START,
-            FINISHED
-        }
-
-        internal class AssertingTermsConsumer : TermsConsumer
-        {
-            internal readonly TermsConsumer @in;
-            private readonly FieldInfo fieldInfo;
-            internal BytesRef LastTerm = null;
-            internal TermsConsumerState State = TermsConsumerState.INITIAL;
-            internal AssertingPostingsConsumer LastPostingsConsumer = null;
-            internal long SumTotalTermFreq = 0;
-            internal long SumDocFreq = 0;
-            internal OpenBitSet VisitedDocs = new OpenBitSet();
-
-            internal AssertingTermsConsumer(TermsConsumer @in, FieldInfo fieldInfo)
-            {
-                this.@in = @in;
-                this.fieldInfo = fieldInfo;
-            }
-
-            public override PostingsConsumer StartTerm(BytesRef text)
-            {
-                Debug.Assert(State == TermsConsumerState.INITIAL || State == TermsConsumerState.START && LastPostingsConsumer.DocFreq == 0);
-                State = TermsConsumerState.START;
-                Debug.Assert(LastTerm == null || @in.Comparer.Compare(text, LastTerm) > 0);
-                LastTerm = BytesRef.DeepCopyOf(text);
-                return LastPostingsConsumer = new AssertingPostingsConsumer(@in.StartTerm(text), fieldInfo, VisitedDocs);
-            }
-
-            public override void FinishTerm(BytesRef text, TermStats stats)
-            {
-                Debug.Assert(State == TermsConsumerState.START);
-                State = TermsConsumerState.INITIAL;
-                Debug.Assert(text.Equals(LastTerm));
-                Debug.Assert(stats.DocFreq > 0); // otherwise, this method should not be called.
-                Debug.Assert(stats.DocFreq == LastPostingsConsumer.DocFreq);
-                SumDocFreq += stats.DocFreq;
-                if (fieldInfo.IndexOptions == IndexOptions.DOCS_ONLY)
-                {
-                    Debug.Assert(stats.TotalTermFreq == -1);
-                }
-                else
-                {
-                    Debug.Assert(stats.TotalTermFreq == LastPostingsConsumer.TotalTermFreq);
-                    SumTotalTermFreq += stats.TotalTermFreq;
-                }
-                @in.FinishTerm(text, stats);
-            }
-
-            public override void Finish(long sumTotalTermFreq, long sumDocFreq, int docCount)
-            {
-                Debug.Assert(State == TermsConsumerState.INITIAL || State == TermsConsumerState.START && LastPostingsConsumer.DocFreq == 0);
-                State = TermsConsumerState.FINISHED;
-                Debug.Assert(docCount >= 0);
-                Debug.Assert(docCount == VisitedDocs.Cardinality());
-                Debug.Assert(sumDocFreq >= docCount);
-                Debug.Assert(sumDocFreq == this.SumDocFreq);
-                if (fieldInfo.IndexOptions == IndexOptions.DOCS_ONLY)
-                {
-                    Debug.Assert(sumTotalTermFreq == -1);
-                }
-                else
-                {
-                    Debug.Assert(sumTotalTermFreq >= sumDocFreq);
-                    Debug.Assert(sumTotalTermFreq == this.SumTotalTermFreq);
-                }
-                @in.Finish(sumTotalTermFreq, sumDocFreq, docCount);
-            }
-
-            public override IComparer<BytesRef> Comparer
-            {
-                get
-                {
-                    return @in.Comparer;
-                }
-            }
-        }
-
-        internal enum PostingsConsumerState
-        {
-            INITIAL,
-            START
-        }
-
-        internal class AssertingPostingsConsumer : PostingsConsumer
-        {
-            internal readonly PostingsConsumer @in;
-            private readonly FieldInfo fieldInfo;
-            internal readonly OpenBitSet VisitedDocs;
-            internal PostingsConsumerState State = PostingsConsumerState.INITIAL;
-            internal int Freq;
-            internal int PositionCount;
-            internal int LastPosition = 0;
-            internal int LastStartOffset = 0;
-            internal int DocFreq = 0;
-            internal long TotalTermFreq = 0;
-
-            internal AssertingPostingsConsumer(PostingsConsumer @in, FieldInfo fieldInfo, OpenBitSet visitedDocs)
-            {
-                this.@in = @in;
-                this.fieldInfo = fieldInfo;
-                this.VisitedDocs = visitedDocs;
-            }
-
-            public override void StartDoc(int docID, int freq)
-            {
-                Debug.Assert(State == PostingsConsumerState.INITIAL);
-                State = PostingsConsumerState.START;
-                Debug.Assert(docID >= 0);
-                if (fieldInfo.IndexOptions == IndexOptions.DOCS_ONLY)
-                {
-                    Debug.Assert(freq == -1);
-                    this.Freq = 0; // we don't expect any positions here
-                }
-                else
-                {
-                    Debug.Assert(freq > 0);
-                    this.Freq = freq;
-                    TotalTermFreq += freq;
-                }
-                this.PositionCount = 0;
-                this.LastPosition = 0;
-                this.LastStartOffset = 0;
-                DocFreq++;
-                VisitedDocs.Set(docID);
-                @in.StartDoc(docID, freq);
-            }
-
-            public override void AddPosition(int position, BytesRef payload, int startOffset, int endOffset)
-            {
-                Debug.Assert(State == PostingsConsumerState.START);
-                Debug.Assert(PositionCount < Freq);
-                PositionCount++;
-                Debug.Assert(position >= LastPosition || position == -1); // we still allow -1 from old 3.x indexes
-                LastPosition = position;
-                if (fieldInfo.IndexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS)
-                {
-                    Debug.Assert(startOffset >= 0);
-                    Debug.Assert(startOffset >= LastStartOffset);
-                    LastStartOffset = startOffset;
-                    Debug.Assert(endOffset >= startOffset);
-                }
-                else
-                {
-                    Debug.Assert(startOffset == -1);
-                    Debug.Assert(endOffset == -1);
-                }
-                if (payload != null)
-                {
-                    Debug.Assert(fieldInfo.HasPayloads);
-                }
-                @in.AddPosition(position, payload, startOffset, endOffset);
-            }
-
-            public override void FinishDoc()
-            {
-                Debug.Assert(State == PostingsConsumerState.START);
-                State = PostingsConsumerState.INITIAL;
-                if (fieldInfo.IndexOptions < IndexOptions.DOCS_AND_FREQS_AND_POSITIONS)
-                {
-                    Debug.Assert(PositionCount == 0); // we should not have fed any positions!
-                }
-                else
-                {
-                    Debug.Assert(PositionCount == Freq);
-                }
-                @in.FinishDoc();
-            }
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/77e95ccc/src/Lucene.Net.TestFramework/Codecs/asserting/AssertingStoredFieldsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/asserting/AssertingStoredFieldsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/asserting/AssertingStoredFieldsFormat.cs
deleted file mode 100644
index 808ed9a..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/asserting/AssertingStoredFieldsFormat.cs
+++ /dev/null
@@ -1,154 +0,0 @@
-using System.Diagnostics;
-
-namespace Lucene.Net.Codecs.asserting
-{
-    using Directory = Lucene.Net.Store.Directory;
-    using FieldInfo = Lucene.Net.Index.FieldInfo;
-    using FieldInfos = Lucene.Net.Index.FieldInfos;
-    using IIndexableField = Lucene.Net.Index.IIndexableField;
-    using IOContext = Lucene.Net.Store.IOContext;
-
-    /*
-         * Licensed to the Apache Software Foundation (ASF) under one or more
-         * contributor license agreements.  See the NOTICE file distributed with
-         * this work for additional information regarding copyright ownership.
-         * The ASF licenses this file to You under the Apache License, Version 2.0
-         * (the "License"); you may not use this file except in compliance with
-         * the License.  You may obtain a copy of the License at
-         *
-         *     http://www.apache.org/licenses/LICENSE-2.0
-         *
-         * Unless required by applicable law or agreed to in writing, software
-         * distributed under the License is distributed on an "AS IS" BASIS,
-         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-         * See the License for the specific language governing permissions and
-         * limitations under the License.
-         */
-
-    using Lucene41StoredFieldsFormat = Lucene.Net.Codecs.Lucene41.Lucene41StoredFieldsFormat;
-    using SegmentInfo = Lucene.Net.Index.SegmentInfo;
-    using StoredFieldVisitor = Lucene.Net.Index.StoredFieldVisitor;
-
-    /// <summary>
-    /// Just like <seealso cref="Lucene41StoredFieldsFormat"/> but with additional asserts.
-    /// </summary>
-    public class AssertingStoredFieldsFormat : StoredFieldsFormat
-    {
-        private readonly StoredFieldsFormat @in = new Lucene41StoredFieldsFormat();
-
-        public override StoredFieldsReader FieldsReader(Directory directory, SegmentInfo si, FieldInfos fn, IOContext context)
-        {
-            return new AssertingStoredFieldsReader(@in.FieldsReader(directory, si, fn, context), si.DocCount);
-        }
-
-        public override StoredFieldsWriter FieldsWriter(Directory directory, SegmentInfo si, IOContext context)
-        {
-            return new AssertingStoredFieldsWriter(@in.FieldsWriter(directory, si, context));
-        }
-
-        internal class AssertingStoredFieldsReader : StoredFieldsReader
-        {
-            internal readonly StoredFieldsReader @in;
-            internal readonly int MaxDoc;
-
-            internal AssertingStoredFieldsReader(StoredFieldsReader @in, int maxDoc)
-            {
-                this.@in = @in;
-                this.MaxDoc = maxDoc;
-            }
-
-            protected override void Dispose(bool disposing)
-            {
-                if (disposing)
-                    @in.Dispose();
-            }
-
-            public override void VisitDocument(int n, StoredFieldVisitor visitor)
-            {
-                Debug.Assert(n >= 0 && n < MaxDoc);
-                @in.VisitDocument(n, visitor);
-            }
-
-            public override object Clone()
-            {
-                return new AssertingStoredFieldsReader((StoredFieldsReader)@in.Clone(), MaxDoc);
-            }
-
-            public override long RamBytesUsed()
-            {
-                return @in.RamBytesUsed();
-            }
-
-            public override void CheckIntegrity()
-            {
-                @in.CheckIntegrity();
-            }
-        }
-
-        internal enum Status
-        {
-            UNDEFINED,
-            STARTED,
-            FINISHED
-        }
-
-        internal class AssertingStoredFieldsWriter : StoredFieldsWriter
-        {
-            internal readonly StoredFieldsWriter @in;
-            internal int NumWritten;
-            internal int FieldCount;
-            internal Status DocStatus;
-
-            internal AssertingStoredFieldsWriter(StoredFieldsWriter @in)
-            {
-                this.@in = @in;
-                this.DocStatus = Status.UNDEFINED;
-            }
-
-            public override void StartDocument(int numStoredFields)
-            {
-                Debug.Assert(DocStatus != Status.STARTED);
-                @in.StartDocument(numStoredFields);
-                Debug.Assert(FieldCount == 0);
-                FieldCount = numStoredFields;
-                NumWritten++;
-                DocStatus = Status.STARTED;
-            }
-
-            public override void FinishDocument()
-            {
-                Debug.Assert(DocStatus == Status.STARTED);
-                Debug.Assert(FieldCount == 0);
-                @in.FinishDocument();
-                DocStatus = Status.FINISHED;
-            }
-
-            public override void WriteField(FieldInfo info, IIndexableField field)
-            {
-                Debug.Assert(DocStatus == Status.STARTED);
-                @in.WriteField(info, field);
-                Debug.Assert(FieldCount > 0);
-                FieldCount--;
-            }
-
-            public override void Abort()
-            {
-                @in.Abort();
-            }
-
-            public override void Finish(FieldInfos fis, int numDocs)
-            {
-                Debug.Assert(DocStatus == (numDocs > 0 ? Status.FINISHED : Status.UNDEFINED));
-                @in.Finish(fis, numDocs);
-                Debug.Assert(FieldCount == 0);
-                Debug.Assert(numDocs == NumWritten);
-            }
-
-            protected override void Dispose(bool disposing)
-            {
-                if (disposing)
-                    @in.Dispose();
-            }
-        }
-    }
-}
\ No newline at end of file


[33/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestFlushByRamOrCountsPolicy.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestFlushByRamOrCountsPolicy.cs b/src/Lucene.Net.Tests/Index/TestFlushByRamOrCountsPolicy.cs
new file mode 100644
index 0000000..001f9e9
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestFlushByRamOrCountsPolicy.cs
@@ -0,0 +1,477 @@
+using System;
+using System.Collections.Generic;
+
+namespace Lucene.Net.Index
+{
+
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using Document = Lucene.Net.Documents.Document;
+    using ThreadState = Lucene.Net.Index.DocumentsWriterPerThreadPool.ThreadState;
+    using Directory = Lucene.Net.Store.Directory;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+    using LineFileDocs = Lucene.Net.Util.LineFileDocs;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+
+    [TestFixture]
+    public class TestFlushByRamOrCountsPolicy : LuceneTestCase
+    {
+
+        private static LineFileDocs LineDocFile;
+
+        [OneTimeSetUp]
+        public static void BeforeClass()
+        {
+            LineDocFile = new LineFileDocs(Random(), DefaultCodecSupportsDocValues());
+        }
+
+        [OneTimeTearDown]
+        public static void AfterClass()
+        {
+            LineDocFile.Dispose();
+            LineDocFile = null;
+        }
+
+        [Test]
+        public virtual void TestFlushByRam()
+        {
+            double ramBuffer = (TEST_NIGHTLY ? 1 : 10) + AtLeast(2) + Random().NextDouble();
+            RunFlushByRam(1 + Random().Next(TEST_NIGHTLY ? 5 : 1), ramBuffer, false);
+        }
+
+        [Test]
+        public virtual void TestFlushByRamLargeBuffer()
+        {
+            // with a 256 mb ram buffer we should never stall
+            RunFlushByRam(1 + Random().Next(TEST_NIGHTLY ? 5 : 1), 256d, true);
+        }
+
+        protected internal virtual void RunFlushByRam(int numThreads, double maxRamMB, bool ensureNotStalled)
+        {
+            int numDocumentsToIndex = 10 + AtLeast(30);
+            AtomicInt32 numDocs = new AtomicInt32(numDocumentsToIndex);
+            Directory dir = NewDirectory();
+            MockDefaultFlushPolicy flushPolicy = new MockDefaultFlushPolicy();
+            MockAnalyzer analyzer = new MockAnalyzer(Random());
+            analyzer.MaxTokenLength = TestUtil.NextInt(Random(), 1, IndexWriter.MAX_TERM_LENGTH);
+
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetFlushPolicy(flushPolicy);
+            int numDWPT = 1 + AtLeast(2);
+            DocumentsWriterPerThreadPool threadPool = new ThreadAffinityDocumentsWriterThreadPool(numDWPT);
+            iwc.SetIndexerThreadPool(threadPool);
+            iwc.SetRAMBufferSizeMB(maxRamMB);
+            iwc.SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH);
+            iwc.SetMaxBufferedDeleteTerms(IndexWriterConfig.DISABLE_AUTO_FLUSH);
+            IndexWriter writer = new IndexWriter(dir, iwc);
+            flushPolicy = (MockDefaultFlushPolicy)writer.Config.FlushPolicy;
+            Assert.IsFalse(flushPolicy.FlushOnDocCount);
+            Assert.IsFalse(flushPolicy.FlushOnDeleteTerms);
+            Assert.IsTrue(flushPolicy.FlushOnRAM);
+            DocumentsWriter docsWriter = writer.DocsWriter;
+            Assert.IsNotNull(docsWriter);
+            DocumentsWriterFlushControl flushControl = docsWriter.flushControl;
+            Assert.AreEqual(0, flushControl.FlushBytes, " bytes must be 0 after init");
+
+            IndexThread[] threads = new IndexThread[numThreads];
+            for (int x = 0; x < threads.Length; x++)
+            {
+                threads[x] = new IndexThread(this, numDocs, numThreads, writer, LineDocFile, false);
+                threads[x].Start();
+            }
+
+            for (int x = 0; x < threads.Length; x++)
+            {
+                threads[x].Join();
+            }
+            long maxRAMBytes = (long)(iwc.RAMBufferSizeMB * 1024.0 * 1024.0);
+            Assert.AreEqual(0, flushControl.FlushBytes, " all flushes must be due numThreads=" + numThreads);
+            Assert.AreEqual(numDocumentsToIndex, writer.NumDocs);
+            Assert.AreEqual(numDocumentsToIndex, writer.MaxDoc);
+            Assert.IsTrue(flushPolicy.PeakBytesWithoutFlush <= maxRAMBytes, "peak bytes without flush exceeded watermark");
+            AssertActiveBytesAfter(flushControl);
+            if (flushPolicy.HasMarkedPending)
+            {
+                Assert.IsTrue(maxRAMBytes < flushControl.peakActiveBytes);
+            }
+            if (ensureNotStalled)
+            {
+                Assert.IsFalse(docsWriter.flushControl.stallControl.WasStalled);
+            }
+            writer.Dispose();
+            Assert.AreEqual(0, flushControl.ActiveBytes);
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestFlushDocCount()
+        {
+            int[] numThreads = new int[] { 2 + AtLeast(1), 1 };
+            for (int i = 0; i < numThreads.Length; i++)
+            {
+
+                int numDocumentsToIndex = 50 + AtLeast(30);
+                AtomicInt32 numDocs = new AtomicInt32(numDocumentsToIndex);
+                Directory dir = NewDirectory();
+                MockDefaultFlushPolicy flushPolicy = new MockDefaultFlushPolicy();
+                IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetFlushPolicy(flushPolicy);
+
+                int numDWPT = 1 + AtLeast(2);
+                DocumentsWriterPerThreadPool threadPool = new ThreadAffinityDocumentsWriterThreadPool(numDWPT);
+                iwc.SetIndexerThreadPool(threadPool);
+                iwc.SetMaxBufferedDocs(2 + AtLeast(10));
+                iwc.SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH);
+                iwc.SetMaxBufferedDeleteTerms(IndexWriterConfig.DISABLE_AUTO_FLUSH);
+                IndexWriter writer = new IndexWriter(dir, iwc);
+                flushPolicy = (MockDefaultFlushPolicy)writer.Config.FlushPolicy;
+                Assert.IsTrue(flushPolicy.FlushOnDocCount);
+                Assert.IsFalse(flushPolicy.FlushOnDeleteTerms);
+                Assert.IsFalse(flushPolicy.FlushOnRAM);
+                DocumentsWriter docsWriter = writer.DocsWriter;
+                Assert.IsNotNull(docsWriter);
+                DocumentsWriterFlushControl flushControl = docsWriter.flushControl;
+                Assert.AreEqual(0, flushControl.FlushBytes, " bytes must be 0 after init");
+
+                IndexThread[] threads = new IndexThread[numThreads[i]];
+                for (int x = 0; x < threads.Length; x++)
+                {
+                    threads[x] = new IndexThread(this, numDocs, numThreads[i], writer, LineDocFile, false);
+                    threads[x].Start();
+                }
+
+                for (int x = 0; x < threads.Length; x++)
+                {
+                    threads[x].Join();
+                }
+
+                Assert.AreEqual(0, flushControl.FlushBytes, " all flushes must be due numThreads=" + numThreads[i]);
+                Assert.AreEqual(numDocumentsToIndex, writer.NumDocs);
+                Assert.AreEqual(numDocumentsToIndex, writer.MaxDoc);
+                Assert.IsTrue(flushPolicy.PeakDocCountWithoutFlush <= iwc.MaxBufferedDocs, "peak bytes without flush exceeded watermark");
+                AssertActiveBytesAfter(flushControl);
+                writer.Dispose();
+                Assert.AreEqual(0, flushControl.ActiveBytes);
+                dir.Dispose();
+            }
+        }
+
+        [Test]
+        public virtual void TestRandom()
+        {
+            int numThreads = 1 + Random().Next(8);
+            int numDocumentsToIndex = 50 + AtLeast(70);
+            AtomicInt32 numDocs = new AtomicInt32(numDocumentsToIndex);
+            Directory dir = NewDirectory();
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            MockDefaultFlushPolicy flushPolicy = new MockDefaultFlushPolicy();
+            iwc.SetFlushPolicy(flushPolicy);
+
+            int numDWPT = 1 + Random().Next(8);
+            DocumentsWriterPerThreadPool threadPool = new ThreadAffinityDocumentsWriterThreadPool(numDWPT);
+            iwc.SetIndexerThreadPool(threadPool);
+
+            IndexWriter writer = new IndexWriter(dir, iwc);
+            flushPolicy = (MockDefaultFlushPolicy)writer.Config.FlushPolicy;
+            DocumentsWriter docsWriter = writer.DocsWriter;
+            Assert.IsNotNull(docsWriter);
+            DocumentsWriterFlushControl flushControl = docsWriter.flushControl;
+
+            Assert.AreEqual(0, flushControl.FlushBytes, " bytes must be 0 after init");
+
+            IndexThread[] threads = new IndexThread[numThreads];
+            for (int x = 0; x < threads.Length; x++)
+            {
+                threads[x] = new IndexThread(this, numDocs, numThreads, writer, LineDocFile, true);
+                threads[x].Start();
+            }
+
+            for (int x = 0; x < threads.Length; x++)
+            {
+                threads[x].Join();
+            }
+            Assert.AreEqual(0, flushControl.FlushBytes, " all flushes must be due");
+            Assert.AreEqual(numDocumentsToIndex, writer.NumDocs);
+            Assert.AreEqual(numDocumentsToIndex, writer.MaxDoc);
+            if (flushPolicy.FlushOnRAM && !flushPolicy.FlushOnDocCount && !flushPolicy.FlushOnDeleteTerms)
+            {
+                long maxRAMBytes = (long)(iwc.RAMBufferSizeMB * 1024.0 * 1024.0);
+                Assert.IsTrue(flushPolicy.PeakBytesWithoutFlush <= maxRAMBytes, "peak bytes without flush exceeded watermark");
+                if (flushPolicy.HasMarkedPending)
+                {
+                    assertTrue("max: " + maxRAMBytes + " " + flushControl.peakActiveBytes, maxRAMBytes <= flushControl.peakActiveBytes);
+                }
+            }
+            AssertActiveBytesAfter(flushControl);
+            writer.Commit();
+            Assert.AreEqual(0, flushControl.ActiveBytes);
+            IndexReader r = DirectoryReader.Open(dir);
+            Assert.AreEqual(numDocumentsToIndex, r.NumDocs);
+            Assert.AreEqual(numDocumentsToIndex, r.MaxDoc);
+            if (!flushPolicy.FlushOnRAM)
+            {
+                assertFalse("never stall if we don't flush on RAM", docsWriter.flushControl.stallControl.WasStalled);
+                assertFalse("never block if we don't flush on RAM", docsWriter.flushControl.stallControl.HasBlocked);
+            }
+            r.Dispose();
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestStallControl()
+        {
+
+            int[] numThreads = new int[] { 4 + Random().Next(8), 1 };
+            int numDocumentsToIndex = 50 + Random().Next(50);
+            for (int i = 0; i < numThreads.Length; i++)
+            {
+                AtomicInt32 numDocs = new AtomicInt32(numDocumentsToIndex);
+                MockDirectoryWrapper dir = NewMockDirectory();
+                // mock a very slow harddisk sometimes here so that flushing is very slow
+                dir.Throttling = MockDirectoryWrapper.Throttling_e.SOMETIMES;
+                IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+                iwc.SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH);
+                iwc.SetMaxBufferedDeleteTerms(IndexWriterConfig.DISABLE_AUTO_FLUSH);
+                FlushPolicy flushPolicy = new FlushByRamOrCountsPolicy();
+                iwc.SetFlushPolicy(flushPolicy);
+
+                DocumentsWriterPerThreadPool threadPool = new ThreadAffinityDocumentsWriterThreadPool(numThreads[i] == 1 ? 1 : 2);
+                iwc.SetIndexerThreadPool(threadPool);
+                // with such a small ram buffer we should be stalled quiet quickly
+                iwc.SetRAMBufferSizeMB(0.25);
+                IndexWriter writer = new IndexWriter(dir, iwc);
+                IndexThread[] threads = new IndexThread[numThreads[i]];
+                for (int x = 0; x < threads.Length; x++)
+                {
+                    threads[x] = new IndexThread(this, numDocs, numThreads[i], writer, LineDocFile, false);
+                    threads[x].Start();
+                }
+
+                for (int x = 0; x < threads.Length; x++)
+                {
+                    threads[x].Join();
+                }
+                DocumentsWriter docsWriter = writer.DocsWriter;
+                Assert.IsNotNull(docsWriter);
+                DocumentsWriterFlushControl flushControl = docsWriter.flushControl;
+                Assert.AreEqual(0, flushControl.FlushBytes, " all flushes must be due");
+                Assert.AreEqual(numDocumentsToIndex, writer.NumDocs);
+                Assert.AreEqual(numDocumentsToIndex, writer.MaxDoc);
+                if (numThreads[i] == 1)
+                {
+                    assertFalse("single thread must not block numThreads: " + numThreads[i], docsWriter.flushControl.stallControl.HasBlocked);
+                }
+                if (docsWriter.flushControl.peakNetBytes > (2d * iwc.RAMBufferSizeMB * 1024d * 1024d))
+                {
+                    Assert.IsTrue(docsWriter.flushControl.stallControl.WasStalled);
+                }
+                AssertActiveBytesAfter(flushControl);
+                writer.Dispose(true);
+                dir.Dispose();
+            }
+        }
+
+        internal virtual void AssertActiveBytesAfter(DocumentsWriterFlushControl flushControl)
+        {
+            IEnumerator<ThreadState> allActiveThreads = flushControl.AllActiveThreadStates();
+            long bytesUsed = 0;
+            while (allActiveThreads.MoveNext())
+            {
+                ThreadState next = allActiveThreads.Current;
+                if (next.DocumentsWriterPerThread != null)
+                {
+                    bytesUsed += next.DocumentsWriterPerThread.BytesUsed;
+                }
+            }
+            Assert.AreEqual(bytesUsed, flushControl.ActiveBytes);
+        }
+
+        public class IndexThread : ThreadClass
+        {
+            private readonly TestFlushByRamOrCountsPolicy OuterInstance;
+
+            internal IndexWriter Writer;
+            internal LiveIndexWriterConfig Iwc;
+            internal LineFileDocs Docs;
+            internal AtomicInt32 PendingDocs;
+            internal readonly bool DoRandomCommit;
+
+            public IndexThread(TestFlushByRamOrCountsPolicy outerInstance, AtomicInt32 pendingDocs, int numThreads, IndexWriter writer, LineFileDocs docs, bool doRandomCommit)
+            {
+                this.OuterInstance = outerInstance;
+                this.PendingDocs = pendingDocs;
+                this.Writer = writer;
+                Iwc = writer.Config;
+                this.Docs = docs;
+                this.DoRandomCommit = doRandomCommit;
+            }
+
+            public override void Run()
+            {
+                try
+                {
+                    long ramSize = 0;
+                    while (PendingDocs.DecrementAndGet() > -1)
+                    {
+                        Document doc = Docs.NextDoc();
+                        Writer.AddDocument(doc);
+                        long newRamSize = Writer.RamSizeInBytes();
+                        if (newRamSize != ramSize)
+                        {
+                            ramSize = newRamSize;
+                        }
+                        if (DoRandomCommit)
+                        {
+                            if (Rarely())
+                            {
+                                Writer.Commit();
+                            }
+                        }
+                    }
+                    Writer.Commit();
+                }
+                catch (Exception ex)
+                {
+                    Console.WriteLine("FAILED exc:");
+                    Console.WriteLine(ex.StackTrace);
+                    throw new Exception(ex.Message, ex);
+                }
+            }
+        }
+
+        private class MockDefaultFlushPolicy : FlushByRamOrCountsPolicy
+        {
+            internal long PeakBytesWithoutFlush = int.MinValue;
+            internal long PeakDocCountWithoutFlush = int.MinValue;
+            internal bool HasMarkedPending = false;
+
+            public override void OnDelete(DocumentsWriterFlushControl control, ThreadState state)
+            {
+                List<ThreadState> pending = new List<ThreadState>();
+                List<ThreadState> notPending = new List<ThreadState>();
+                FindPending(control, pending, notPending);
+                bool flushCurrent = state.IsFlushPending;
+                ThreadState toFlush;
+                if (state.IsFlushPending)
+                {
+                    toFlush = state;
+                }
+                else if (FlushOnDeleteTerms && state.DocumentsWriterPerThread.NumDeleteTerms >= m_indexWriterConfig.MaxBufferedDeleteTerms)
+                {
+                    toFlush = state;
+                }
+                else
+                {
+                    toFlush = null;
+                }
+                base.OnDelete(control, state);
+                if (toFlush != null)
+                {
+                    if (flushCurrent)
+                    {
+                        Assert.IsTrue(pending.Remove(toFlush));
+                    }
+                    else
+                    {
+                        Assert.IsTrue(notPending.Remove(toFlush));
+                    }
+                    Assert.IsTrue(toFlush.IsFlushPending);
+                    HasMarkedPending = true;
+                }
+
+                foreach (ThreadState threadState in notPending)
+                {
+                    Assert.IsFalse(threadState.IsFlushPending);
+                }
+            }
+
+            public override void OnInsert(DocumentsWriterFlushControl control, ThreadState state)
+            {
+                List<ThreadState> pending = new List<ThreadState>();
+                List<ThreadState> notPending = new List<ThreadState>();
+                FindPending(control, pending, notPending);
+                bool flushCurrent = state.IsFlushPending;
+                long activeBytes = control.ActiveBytes;
+                ThreadState toFlush;
+                if (state.IsFlushPending)
+                {
+                    toFlush = state;
+                }
+                else if (FlushOnDocCount && state.DocumentsWriterPerThread.NumDocsInRAM >= m_indexWriterConfig.MaxBufferedDocs)
+                {
+                    toFlush = state;
+                }
+                else if (FlushOnRAM && activeBytes >= (long)(m_indexWriterConfig.RAMBufferSizeMB * 1024.0 * 1024.0))
+                {
+                    toFlush = FindLargestNonPendingWriter(control, state);
+                    Assert.IsFalse(toFlush.IsFlushPending);
+                }
+                else
+                {
+                    toFlush = null;
+                }
+                base.OnInsert(control, state);
+                if (toFlush != null)
+                {
+                    if (flushCurrent)
+                    {
+                        Assert.IsTrue(pending.Remove(toFlush));
+                    }
+                    else
+                    {
+                        Assert.IsTrue(notPending.Remove(toFlush));
+                    }
+                    Assert.IsTrue(toFlush.IsFlushPending);
+                    HasMarkedPending = true;
+                }
+                else
+                {
+                    PeakBytesWithoutFlush = Math.Max(activeBytes, PeakBytesWithoutFlush);
+                    PeakDocCountWithoutFlush = Math.Max(state.DocumentsWriterPerThread.NumDocsInRAM, PeakDocCountWithoutFlush);
+                }
+
+                foreach (ThreadState threadState in notPending)
+                {
+                    Assert.IsFalse(threadState.IsFlushPending);
+                }
+            }
+        }
+
+        internal static void FindPending(DocumentsWriterFlushControl flushControl, List<ThreadState> pending, List<ThreadState> notPending)
+        {
+            IEnumerator<ThreadState> allActiveThreads = flushControl.AllActiveThreadStates();
+            while (allActiveThreads.MoveNext())
+            {
+                ThreadState next = allActiveThreads.Current;
+                if (next.IsFlushPending)
+                {
+                    pending.Add(next);
+                }
+                else
+                {
+                    notPending.Add(next);
+                }
+            }
+        }
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestForTooMuchCloning.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestForTooMuchCloning.cs b/src/Lucene.Net.Tests/Index/TestForTooMuchCloning.cs
new file mode 100644
index 0000000..4218d04
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestForTooMuchCloning.cs
@@ -0,0 +1,86 @@
+using System.Text;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+    using TermRangeQuery = Lucene.Net.Search.TermRangeQuery;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using TextField = TextField;
+    using TopDocs = Lucene.Net.Search.TopDocs;
+
+    [TestFixture]
+    public class TestForTooMuchCloning : LuceneTestCase
+    {
+        // Make sure we don't clone IndexInputs too frequently
+        // during merging:
+        [Test]
+        public virtual void Test()
+        {
+            // NOTE: if we see a fail on this test with "NestedPulsing" its because its
+            // reuse isnt perfect (but reasonable). see TestPulsingReuse.testNestedPulsing
+            // for more details
+            MockDirectoryWrapper dir = NewMockDirectory();
+            TieredMergePolicy tmp = new TieredMergePolicy();
+            tmp.MaxMergeAtOnce = 2;
+            RandomIndexWriter w = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2).SetMergePolicy(tmp));
+            const int numDocs = 20;
+            for (int docs = 0; docs < numDocs; docs++)
+            {
+                StringBuilder sb = new StringBuilder();
+                for (int terms = 0; terms < 100; terms++)
+                {
+                    sb.Append(TestUtil.RandomRealisticUnicodeString(Random()));
+                    sb.Append(' ');
+                }
+                Document doc = new Document();
+                doc.Add(new TextField("field", sb.ToString(), Field.Store.NO));
+                w.AddDocument(doc);
+            }
+            IndexReader r = w.Reader;
+            w.Dispose();
+
+            int cloneCount = dir.InputCloneCount;
+            //System.out.println("merge clone count=" + cloneCount);
+            Assert.IsTrue(cloneCount < 500, "too many calls to IndexInput.clone during merging: " + dir.InputCloneCount);
+
+            IndexSearcher s = NewSearcher(r);
+
+            // MTQ that matches all terms so the AUTO_REWRITE should
+            // cutover to filter rewrite and reuse a single DocsEnum
+            // across all terms;
+            TopDocs hits = s.Search(new TermRangeQuery("field", new BytesRef(), new BytesRef("\uFFFF"), true, true), 10);
+            Assert.IsTrue(hits.TotalHits > 0);
+            int queryCloneCount = dir.InputCloneCount - cloneCount;
+            //System.out.println("query clone count=" + queryCloneCount);
+            Assert.IsTrue(queryCloneCount < 50, "too many calls to IndexInput.clone during TermRangeQuery: " + queryCloneCount);
+            r.Dispose();
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestForceMergeForever.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestForceMergeForever.cs b/src/Lucene.Net.Tests/Index/TestForceMergeForever.cs
new file mode 100644
index 0000000..12145b1
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestForceMergeForever.cs
@@ -0,0 +1,144 @@
+using System;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using Directory = Lucene.Net.Store.Directory;
+    using LineFileDocs = Lucene.Net.Util.LineFileDocs;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    [TestFixture]
+    public class TestForceMergeForever : LuceneTestCase
+    {
+        // Just counts how many merges are done
+        private class MyIndexWriter : IndexWriter
+        {
+            internal AtomicInt32 MergeCount = new AtomicInt32();
+            internal bool First;
+
+            public MyIndexWriter(Directory dir, IndexWriterConfig conf)
+                : base(dir, conf)
+            {
+            }
+
+            public override void Merge(MergePolicy.OneMerge merge)
+            {
+                if (merge.MaxNumSegments != -1 && (First || merge.Segments.Count == 1))
+                {
+                    First = false;
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("TEST: maxNumSegments merge");
+                    }
+                    MergeCount.IncrementAndGet();
+                }
+                base.Merge(merge);
+            }
+        }
+
+        [Test]
+        public virtual void Test()
+        {
+            Directory d = NewDirectory();
+            MockAnalyzer analyzer = new MockAnalyzer(Random());
+            analyzer.MaxTokenLength = TestUtil.NextInt(Random(), 1, IndexWriter.MAX_TERM_LENGTH);
+
+            MyIndexWriter w = new MyIndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
+
+            // Try to make an index that requires merging:
+            w.Config.SetMaxBufferedDocs(TestUtil.NextInt(Random(), 2, 11));
+            int numStartDocs = AtLeast(20);
+            LineFileDocs docs = new LineFileDocs(Random(), DefaultCodecSupportsDocValues());
+            for (int docIDX = 0; docIDX < numStartDocs; docIDX++)
+            {
+                w.AddDocument(docs.NextDoc());
+            }
+            MergePolicy mp = w.Config.MergePolicy;
+            int mergeAtOnce = 1 + w.segmentInfos.Count;
+            if (mp is TieredMergePolicy)
+            {
+                ((TieredMergePolicy)mp).MaxMergeAtOnce = mergeAtOnce;
+            }
+            else if (mp is LogMergePolicy)
+            {
+                ((LogMergePolicy)mp).MergeFactor = mergeAtOnce;
+            }
+            else
+            {
+                // skip test
+                w.Dispose();
+                d.Dispose();
+                return;
+            }
+
+            AtomicBoolean doStop = new AtomicBoolean();
+            w.Config.SetMaxBufferedDocs(2);
+            ThreadClass t = new ThreadAnonymousInnerClassHelper(this, w, numStartDocs, docs, doStop);
+            t.Start();
+            w.ForceMerge(1);
+            doStop.Set(true);
+            t.Join();
+            Assert.IsTrue(w.MergeCount.Get() <= 1, "merge count is " + w.MergeCount.Get());
+            w.Dispose();
+            d.Dispose();
+            docs.Dispose();
+        }
+
+        private class ThreadAnonymousInnerClassHelper : ThreadClass
+        {
+            private readonly TestForceMergeForever OuterInstance;
+
+            private Lucene.Net.Index.TestForceMergeForever.MyIndexWriter w;
+            private int NumStartDocs;
+            private LineFileDocs Docs;
+            private AtomicBoolean DoStop;
+
+            public ThreadAnonymousInnerClassHelper(TestForceMergeForever outerInstance, Lucene.Net.Index.TestForceMergeForever.MyIndexWriter w, int numStartDocs, LineFileDocs docs, AtomicBoolean doStop)
+            {
+                this.OuterInstance = outerInstance;
+                this.w = w;
+                this.NumStartDocs = numStartDocs;
+                this.Docs = docs;
+                this.DoStop = doStop;
+            }
+
+            public override void Run()
+            {
+                try
+                {
+                    while (!DoStop.Get())
+                    {
+                        w.UpdateDocument(new Term("docid", "" + Random().Next(NumStartDocs)), Docs.NextDoc());
+                        // Force deletes to apply
+                        w.Reader.Dispose();
+                    }
+                }
+                catch (Exception t)
+                {
+                    throw new Exception(t.Message, t);
+                }
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestIndexCommit.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestIndexCommit.cs b/src/Lucene.Net.Tests/Index/TestIndexCommit.cs
new file mode 100644
index 0000000..2cf47cb
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestIndexCommit.cs
@@ -0,0 +1,191 @@
+using System.Collections.Generic;
+
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Directory = Lucene.Net.Store.Directory;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    [TestFixture]
+    public class TestIndexCommit : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestEqualsHashCode()
+        {
+            // LUCENE-2417: equals and hashCode() impl was inconsistent
+            Directory dir = NewDirectory();
+
+            IndexCommit ic1 = new IndexCommitAnonymousInnerClassHelper(this, dir);
+
+            IndexCommit ic2 = new IndexCommitAnonymousInnerClassHelper2(this, dir);
+
+            Assert.AreEqual(ic1, ic2);
+            Assert.AreEqual(ic1.GetHashCode(), ic2.GetHashCode(), "hash codes are not equals");
+            dir.Dispose();
+        }
+
+        private class IndexCommitAnonymousInnerClassHelper : IndexCommit
+        {
+            private readonly TestIndexCommit OuterInstance;
+
+            private Directory Dir;
+
+            public IndexCommitAnonymousInnerClassHelper(TestIndexCommit outerInstance, Directory dir)
+            {
+                this.OuterInstance = outerInstance;
+                this.Dir = dir;
+            }
+
+            public override string SegmentsFileName
+            {
+                get
+                {
+                    return "a";
+                }
+            }
+
+            public override Directory Directory
+            {
+                get
+                {
+                    return Dir;
+                }
+            }
+
+            public override ICollection<string> FileNames
+            {
+                get
+                {
+                    return null;
+                }
+            }
+
+            public override void Delete()
+            {
+            }
+
+            public override long Generation
+            {
+                get
+                {
+                    return 0;
+                }
+            }
+
+            public override IDictionary<string, string> UserData
+            {
+                get
+                {
+                    return null;
+                }
+            }
+
+            public override bool IsDeleted
+            {
+                get
+                {
+                    return false;
+                }
+            }
+
+            public override int SegmentCount
+            {
+                get
+                {
+                    return 2;
+                }
+            }
+        }
+
+        private class IndexCommitAnonymousInnerClassHelper2 : IndexCommit
+        {
+            private readonly TestIndexCommit OuterInstance;
+
+            private Directory Dir;
+
+            public IndexCommitAnonymousInnerClassHelper2(TestIndexCommit outerInstance, Directory dir)
+            {
+                this.OuterInstance = outerInstance;
+                this.Dir = dir;
+            }
+
+            public override string SegmentsFileName
+            {
+                get
+                {
+                    return "b";
+                }
+            }
+
+            public override Directory Directory
+            {
+                get
+                {
+                    return Dir;
+                }
+            }
+
+            public override ICollection<string> FileNames
+            {
+                get
+                {
+                    return null;
+                }
+            }
+
+            public override void Delete()
+            {
+            }
+
+            public override long Generation
+            {
+                get
+                {
+                    return 0;
+                }
+            }
+
+            public override IDictionary<string, string> UserData
+            {
+                get
+                {
+                    return null;
+                }
+            }
+
+            public override bool IsDeleted
+            {
+                get
+                {
+                    return false;
+                }
+            }
+
+            public override int SegmentCount
+            {
+                get
+                {
+                    return 2;
+                }
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestIndexFileDeleter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestIndexFileDeleter.cs b/src/Lucene.Net.Tests/Index/TestIndexFileDeleter.cs
new file mode 100644
index 0000000..7b83e68
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestIndexFileDeleter.cs
@@ -0,0 +1,218 @@
+using Lucene.Net.Documents;
+using Lucene.Net.Support;
+using System;
+using System.Collections.Generic;
+
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+    using Codec = Lucene.Net.Codecs.Codec;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexInput = Lucene.Net.Store.IndexInput;
+    using IndexOutput = Lucene.Net.Store.IndexOutput;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+
+    /*
+      Verify we can read the pre-2.1 file format, do searches
+      against it, and add documents to it.
+    */
+
+    [TestFixture]
+    public class TestIndexFileDeleter : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestDeleteLeftoverFiles()
+        {
+            Directory dir = NewDirectory();
+            if (dir is MockDirectoryWrapper)
+            {
+                ((MockDirectoryWrapper)dir).PreventDoubleWrite = false;
+            }
+
+            MergePolicy mergePolicy = NewLogMergePolicy(true, 10);
+
+            // this test expects all of its segments to be in CFS
+            mergePolicy.NoCFSRatio = 1.0;
+            mergePolicy.MaxCFSSegmentSizeMB = double.PositiveInfinity;
+
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(10).SetMergePolicy(mergePolicy).SetUseCompoundFile(true));
+
+            int i;
+            for (i = 0; i < 35; i++)
+            {
+                AddDoc(writer, i);
+            }
+            writer.Config.MergePolicy.NoCFSRatio = 0.0;
+            writer.Config.SetUseCompoundFile(false);
+            for (; i < 45; i++)
+            {
+                AddDoc(writer, i);
+            }
+            writer.Dispose();
+
+            // Delete one doc so we get a .del file:
+            writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NoMergePolicy.NO_COMPOUND_FILES).SetUseCompoundFile(true));
+            Term searchTerm = new Term("id", "7");
+            writer.DeleteDocuments(searchTerm);
+            writer.Dispose();
+
+            // Now, artificially create an extra .del file & extra
+            // .s0 file:
+            string[] files = dir.ListAll();
+
+            /*
+            for(int j=0;j<files.Length;j++) {
+              System.out.println(j + ": " + files[j]);
+            }
+            */
+
+            // TODO: fix this test better
+            string ext = Codec.Default.Name.Equals("SimpleText") ? ".liv" : ".del";
+
+            // Create a bogus separate del file for a
+            // segment that already has a separate del file:
+            CopyFile(dir, "_0_1" + ext, "_0_2" + ext);
+
+            // Create a bogus separate del file for a
+            // segment that does not yet have a separate del file:
+            CopyFile(dir, "_0_1" + ext, "_1_1" + ext);
+
+            // Create a bogus separate del file for a
+            // non-existent segment:
+            CopyFile(dir, "_0_1" + ext, "_188_1" + ext);
+
+            // Create a bogus segment file:
+            CopyFile(dir, "_0.cfs", "_188.cfs");
+
+            // Create a bogus fnm file when the CFS already exists:
+            CopyFile(dir, "_0.cfs", "_0.fnm");
+
+            // Create some old segments file:
+            CopyFile(dir, "segments_2", "segments");
+            CopyFile(dir, "segments_2", "segments_1");
+
+            // Create a bogus cfs file shadowing a non-cfs segment:
+
+            // TODO: assert is bogus (relies upon codec-specific filenames)
+            Assert.IsTrue(SlowFileExists(dir, "_3.fdt") || SlowFileExists(dir, "_3.fld"));
+            Assert.IsTrue(!SlowFileExists(dir, "_3.cfs"));
+            CopyFile(dir, "_1.cfs", "_3.cfs");
+
+            string[] filesPre = dir.ListAll();
+
+            // Open & close a writer: it should delete the above 4
+            // files and nothing more:
+            writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND));
+            writer.Dispose();
+
+            string[] files2 = dir.ListAll();
+            dir.Dispose();
+
+            Array.Sort(files);
+            Array.Sort(files2);
+
+            HashSet<string> dif = DifFiles(files, files2);
+
+            if (!Arrays.Equals(files, files2))
+            {
+                Assert.Fail("IndexFileDeleter failed to delete unreferenced extra files: should have deleted " + (filesPre.Length - files.Length) + " files but only deleted " + (filesPre.Length - files2.Length) + "; expected files:\n    " + AsString(files) + "\n  actual files:\n    " + AsString(files2) + "\ndiff: " + dif);
+            }
+        }
+
+        private static HashSet<string> DifFiles(string[] files1, string[] files2)
+        {
+            HashSet<string> set1 = new HashSet<string>();
+            HashSet<string> set2 = new HashSet<string>();
+            HashSet<string> extra = new HashSet<string>();
+
+            for (int x = 0; x < files1.Length; x++)
+            {
+                set1.Add(files1[x]);
+            }
+            for (int x = 0; x < files2.Length; x++)
+            {
+                set2.Add(files2[x]);
+            }
+            IEnumerator<string> i1 = set1.GetEnumerator();
+            while (i1.MoveNext())
+            {
+                string o = i1.Current;
+                if (!set2.Contains(o))
+                {
+                    extra.Add(o);
+                }
+            }
+            IEnumerator<string> i2 = set2.GetEnumerator();
+            while (i2.MoveNext())
+            {
+                string o = i2.Current;
+                if (!set1.Contains(o))
+                {
+                    extra.Add(o);
+                }
+            }
+            return extra;
+        }
+
+        private string AsString(string[] l)
+        {
+            string s = "";
+            for (int i = 0; i < l.Length; i++)
+            {
+                if (i > 0)
+                {
+                    s += "\n    ";
+                }
+                s += l[i];
+            }
+            return s;
+        }
+
+        public virtual void CopyFile(Directory dir, string src, string dest)
+        {
+            IndexInput @in = dir.OpenInput(src, NewIOContext(Random()));
+            IndexOutput @out = dir.CreateOutput(dest, NewIOContext(Random()));
+            var b = new byte[1024];
+            long remainder = @in.Length;
+            while (remainder > 0)
+            {
+                int len = (int)Math.Min(b.Length, remainder);
+                @in.ReadBytes(b, 0, len);
+                @out.WriteBytes(b, len);
+                remainder -= len;
+            }
+            @in.Dispose();
+            @out.Dispose();
+        }
+
+        private void AddDoc(IndexWriter writer, int id)
+        {
+            Document doc = new Document();
+            doc.Add(NewTextField("content", "aaa", Field.Store.NO));
+            doc.Add(NewStringField("id", Convert.ToString(id), Field.Store.NO));
+            writer.AddDocument(doc);
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestIndexInput.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestIndexInput.cs b/src/Lucene.Net.Tests/Index/TestIndexInput.cs
new file mode 100644
index 0000000..32cd6a5
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestIndexInput.cs
@@ -0,0 +1,186 @@
+using System;
+
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+    using System.IO;
+    using System.Reflection;
+    using ByteArrayDataInput = Lucene.Net.Store.ByteArrayDataInput;
+    using ByteArrayDataOutput = Lucene.Net.Store.ByteArrayDataOutput;
+    using DataInput = Lucene.Net.Store.DataInput;
+    using IndexInput = Lucene.Net.Store.IndexInput;
+    using IndexOutput = Lucene.Net.Store.IndexOutput;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    [TestFixture]
+    public class TestIndexInput : LuceneTestCase
+    {
+        internal static readonly byte[] READ_TEST_BYTES = new byte[] { unchecked((byte)(sbyte)0x80), 0x01, unchecked((byte)(sbyte)0xFF), 0x7F, unchecked((byte)(sbyte)0x80), unchecked((byte)(sbyte)0x80), 0x01, unchecked((byte)(sbyte)0x81), unchecked((byte)(sbyte)0x80), 0x01, unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), 0x07, unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), 0x0F, unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), 0x07, unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), (byte)0x7F, 0x06, (byte)'L', (byte)'u', (byte)'c', (byte)'e', (byte)'n', (byte)'e', 0x02, unchecked((byt
 e)(sbyte)0xC2), unchecked((byte)(sbyte)0xBF), 0x0A, (byte)'L', (byte)'u', unchecked((byte)(sbyte)0xC2), unchecked((byte)(sbyte)0xBF), (byte)(sbyte)'c', (byte)'e', unchecked((byte)(sbyte)0xC2), unchecked((byte)(sbyte)0xBF), (byte)'n', (byte)'e', 0x03, unchecked((byte)(sbyte)0xE2), unchecked((byte)(sbyte)0x98), unchecked((byte)(sbyte)0xA0), 0x0C, (byte)'L', (byte)'u', unchecked((byte)(sbyte)0xE2), unchecked((byte)(sbyte)0x98), unchecked((byte)(sbyte)0xA0), (byte)'c', (byte)'e', unchecked((byte)(sbyte)0xE2), unchecked((byte)(sbyte)0x98), unchecked((byte)(sbyte)0xA0), (byte)'n', (byte)'e', 0x04, unchecked((byte)(sbyte)0xF0), unchecked((byte)(sbyte)0x9D), unchecked((byte)(sbyte)0x84), unchecked((byte)(sbyte)0x9E), 0x08, unchecked((byte)(sbyte)0xF0), unchecked((byte)(sbyte)0x9D), unchecked((byte)(sbyte)0x84), unchecked((byte)(sbyte)0x9E), unchecked((byte)(sbyte)0xF0), unchecked((byte)(sbyte)0x9D), unchecked((byte)(sbyte)0x85), unchecked((byte)(sbyte)0xA0), 0x0E, (byte)'L', (byte)'u', unch
 ecked((byte)(sbyte)0xF0), unchecked((byte)(sbyte)0x9D), unchecked((byte)(sbyte)0x84), unchecked((byte)(sbyte)0x9E), (byte)'c', (byte)'e', unchecked((byte)(sbyte)0xF0), unchecked((byte)(sbyte)0x9D), unchecked((byte)(sbyte)0x85), unchecked((byte)(sbyte)0xA0), (byte)'n', (byte)'e', 0x01, 0x00, 0x08, (byte)'L', (byte)'u', 0x00, (byte)'c', (byte)'e', 0x00, (byte)'n', (byte)'e', unchecked((byte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), (byte)0x17, (byte)0x01, unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), unchecked((byte)(sbyte)0xFF), 0x01 };
+
+        internal static readonly int COUNT = RANDOM_MULTIPLIER * 65536;
+        internal static int[] INTS;
+        internal static long[] LONGS;
+        internal static byte[] RANDOM_TEST_BYTES;
+
+        [OneTimeSetUp]
+        public static void BeforeClass()
+        {
+            Random random = Random();
+            INTS = new int[COUNT];
+            LONGS = new long[COUNT];
+            RANDOM_TEST_BYTES = new byte[COUNT * (5 + 4 + 9 + 8)];
+            ByteArrayDataOutput bdo = new ByteArrayDataOutput(RANDOM_TEST_BYTES);
+            for (int i = 0; i < COUNT; i++)
+            {
+                int i1 = INTS[i] = random.Next();
+                bdo.WriteVInt32(i1);
+                bdo.WriteInt32(i1);
+
+                long l1;
+                if (Rarely())
+                {
+                    // a long with lots of zeroes at the end
+                    l1 = LONGS[i] = TestUtil.NextLong(random, 0, int.MaxValue) << 32;
+                }
+                else
+                {
+                    l1 = LONGS[i] = TestUtil.NextLong(random, 0, long.MaxValue);
+                }
+                bdo.WriteVInt64(l1);
+                bdo.WriteInt64(l1);
+            }
+        }
+
+        [OneTimeTearDown]
+        public static void AfterClass()
+        {
+            INTS = null;
+            LONGS = null;
+            RANDOM_TEST_BYTES = null;
+        }
+
+        private void CheckReads(DataInput @is, Type expectedEx)
+        {
+            Assert.AreEqual(128, @is.ReadVInt32());
+            Assert.AreEqual(16383, @is.ReadVInt32());
+            Assert.AreEqual(16384, @is.ReadVInt32());
+            Assert.AreEqual(16385, @is.ReadVInt32());
+            Assert.AreEqual(int.MaxValue, @is.ReadVInt32());
+            Assert.AreEqual(-1, @is.ReadVInt32());
+            Assert.AreEqual((long)int.MaxValue, @is.ReadVInt64());
+            Assert.AreEqual(long.MaxValue, @is.ReadVInt64());
+            Assert.AreEqual("Lucene", @is.ReadString());
+
+            Assert.AreEqual("\u00BF", @is.ReadString());
+            Assert.AreEqual("Lu\u00BFce\u00BFne", @is.ReadString());
+
+            Assert.AreEqual("\u2620", @is.ReadString());
+            Assert.AreEqual("Lu\u2620ce\u2620ne", @is.ReadString());
+
+            Assert.AreEqual("\uD834\uDD1E", @is.ReadString());
+            Assert.AreEqual("\uD834\uDD1E\uD834\uDD60", @is.ReadString());
+            Assert.AreEqual("Lu\uD834\uDD1Ece\uD834\uDD60ne", @is.ReadString());
+
+            Assert.AreEqual("\u0000", @is.ReadString());
+            Assert.AreEqual("Lu\u0000ce\u0000ne", @is.ReadString());
+
+            try
+            {
+                @is.ReadVInt32();
+                Assert.Fail("Should throw " + expectedEx.Name);
+            }
+            catch (Exception e)
+            {
+                Assert.IsTrue(e.Message.StartsWith("Invalid vInt"));
+                Assert.IsTrue(expectedEx.IsInstanceOfType(e));
+            }
+            Assert.AreEqual(1, @is.ReadVInt32()); // guard value
+
+            try
+            {
+                @is.ReadVInt64();
+                Assert.Fail("Should throw " + expectedEx.Name);
+            }
+            catch (Exception e)
+            {
+                Assert.IsTrue(e.Message.StartsWith("Invalid vLong"));
+                Assert.IsTrue(expectedEx.IsInstanceOfType(e));
+            }
+            Assert.AreEqual(1L, @is.ReadVInt64()); // guard value
+        }
+
+        private void CheckRandomReads(DataInput @is)
+        {
+            for (int i = 0; i < COUNT; i++)
+            {
+                Assert.AreEqual(INTS[i], @is.ReadVInt32());
+                Assert.AreEqual(INTS[i], @is.ReadInt32());
+                Assert.AreEqual(LONGS[i], @is.ReadVInt64());
+                Assert.AreEqual(LONGS[i], @is.ReadInt64());
+            }
+        }
+
+        // this test only checks BufferedIndexInput because MockIndexInput extends BufferedIndexInput
+        [Test]
+        public virtual void TestBufferedIndexInputRead()
+        {
+            IndexInput @is = new MockIndexInput(READ_TEST_BYTES);
+            CheckReads(@is, typeof(IOException));
+            @is.Dispose();
+            @is = new MockIndexInput(RANDOM_TEST_BYTES);
+            CheckRandomReads(@is);
+            @is.Dispose();
+        }
+
+        // this test checks the raw IndexInput methods as it uses RAMIndexInput which extends IndexInput directly
+        [Test]
+        public virtual void TestRawIndexInputRead()
+        {
+            Random random = Random();
+            RAMDirectory dir = new RAMDirectory();
+            IndexOutput os = dir.CreateOutput("foo", NewIOContext(random));
+            os.WriteBytes(READ_TEST_BYTES, READ_TEST_BYTES.Length);
+            os.Dispose();
+            IndexInput @is = dir.OpenInput("foo", NewIOContext(random));
+            CheckReads(@is, typeof(IOException));
+            @is.Dispose();
+
+            os = dir.CreateOutput("bar", NewIOContext(random));
+            os.WriteBytes(RANDOM_TEST_BYTES, RANDOM_TEST_BYTES.Length);
+            os.Dispose();
+            @is = dir.OpenInput("bar", NewIOContext(random));
+            CheckRandomReads(@is);
+            @is.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestByteArrayDataInput()
+        {
+            ByteArrayDataInput @is = new ByteArrayDataInput((byte[])(Array)READ_TEST_BYTES);
+            CheckReads(@is, typeof(Exception));
+            @is = new ByteArrayDataInput(RANDOM_TEST_BYTES);
+            CheckRandomReads(@is);
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestIndexReaderClose.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestIndexReaderClose.cs b/src/Lucene.Net.Tests/Index/TestIndexReaderClose.cs
new file mode 100644
index 0000000..9c96ced
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestIndexReaderClose.cs
@@ -0,0 +1,155 @@
+using System.Collections.Generic;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Randomized.Generators;
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using System;
+    using AlreadyClosedException = Lucene.Net.Store.AlreadyClosedException;
+    using Directory = Lucene.Net.Store.Directory;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+
+    [TestFixture]
+    public class TestIndexReaderClose : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestCloseUnderException()
+        {
+            int iters = 1000 + 1 + Random().nextInt(20);
+            for (int j = 0; j < iters; j++)
+            {
+                Directory dir = NewDirectory();
+                IndexWriter writer = new IndexWriter(dir,
+                    NewIndexWriterConfig(Random(), TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+                writer.Commit();
+                writer.Dispose();
+                DirectoryReader open = DirectoryReader.Open(dir);
+                bool throwOnClose = !Rarely();
+                AtomicReader wrap = SlowCompositeReaderWrapper.Wrap(open);
+                FilterAtomicReader reader = new FilterAtomicReaderAnonymousInnerClassHelper(this, wrap, throwOnClose);
+                IList<IndexReader.IReaderClosedListener> listeners = new List<IndexReader.IReaderClosedListener>();
+                int listenerCount = Random().Next(20);
+                AtomicInt32 count = new AtomicInt32();
+                bool faultySet = false;
+                for (int i = 0; i < listenerCount; i++)
+                {
+                    if (Rarely())
+                    {
+                        faultySet = true;
+                        reader.AddReaderClosedListener(new FaultyListener());
+                    }
+                    else
+                    {
+                        count.IncrementAndGet();
+                        reader.AddReaderClosedListener(new CountListener(count));
+                    }
+                }
+                if (!faultySet && !throwOnClose)
+                {
+                    reader.AddReaderClosedListener(new FaultyListener());
+                }
+                try
+                {
+                    reader.Dispose();
+                    Assert.Fail("expected Exception");
+                }
+                catch (InvalidOperationException ex)
+                {
+                    if (throwOnClose)
+                    {
+                        Assert.AreEqual("BOOM!", ex.Message);
+                    }
+                    else
+                    {
+                        Assert.AreEqual("GRRRRRRRRRRRR!", ex.Message);
+                    }
+                }
+
+                try
+                {
+                    var aaa = reader.Fields;
+                    Assert.Fail("we are closed");
+                }
+#pragma warning disable 168
+                catch (AlreadyClosedException ex)
+#pragma warning restore 168
+                {
+                }
+
+                if (Random().NextBoolean())
+                {
+                    reader.Dispose(); // call it again
+                }
+                Assert.AreEqual(0, count.Get());
+                wrap.Dispose();
+                dir.Dispose();
+            }
+        }
+
+        private class FilterAtomicReaderAnonymousInnerClassHelper : FilterAtomicReader
+        {
+            private readonly TestIndexReaderClose OuterInstance;
+
+            private bool ThrowOnClose;
+
+            public FilterAtomicReaderAnonymousInnerClassHelper(TestIndexReaderClose outerInstance, AtomicReader wrap, bool throwOnClose)
+                : base(wrap)
+            {
+                this.OuterInstance = outerInstance;
+                this.ThrowOnClose = throwOnClose;
+            }
+
+            protected internal override void DoClose()
+            {
+                base.DoClose();
+                if (ThrowOnClose)
+                {
+                    throw new InvalidOperationException("BOOM!");
+                }
+            }
+        }
+
+        private sealed class CountListener : IndexReader.IReaderClosedListener
+        {
+            internal readonly AtomicInt32 Count;
+
+            public CountListener(AtomicInt32 count)
+            {
+                this.Count = count;
+            }
+
+            public void OnClose(IndexReader reader)
+            {
+                Count.DecrementAndGet();
+            }
+        }
+
+        private sealed class FaultyListener : IndexReader.IReaderClosedListener
+        {
+            public void OnClose(IndexReader reader)
+            {
+                throw new InvalidOperationException("GRRRRRRRRRRRR!");
+            }
+        }
+    }
+}
\ No newline at end of file


[37/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestDirectoryReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestDirectoryReader.cs b/src/Lucene.Net.Tests/Index/TestDirectoryReader.cs
new file mode 100644
index 0000000..14fea69
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestDirectoryReader.cs
@@ -0,0 +1,1341 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Threading;
+using Lucene.Net.Documents;
+using Lucene.Net.Search;
+
+namespace Lucene.Net.Index
+{
+    using Attributes;
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using IBits = Lucene.Net.Util.IBits;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldType = FieldType;
+    using Lucene41PostingsFormat = Lucene.Net.Codecs.Lucene41.Lucene41PostingsFormat;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using NoSuchDirectoryException = Lucene.Net.Store.NoSuchDirectoryException;
+    using StoredField = StoredField;
+    using StringField = StringField;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using TextField = TextField;
+
+    [TestFixture]
+    public class TestDirectoryReader : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestDocument()
+        {
+            SegmentReader[] readers = new SegmentReader[2];
+            Directory dir = NewDirectory();
+            Document doc1 = new Document();
+            Document doc2 = new Document();
+            DocHelper.SetupDoc(doc1);
+            DocHelper.SetupDoc(doc2);
+            DocHelper.WriteDoc(Random(), dir, doc1);
+            DocHelper.WriteDoc(Random(), dir, doc2);
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            Assert.IsTrue(reader != null);
+            Assert.IsTrue(reader is StandardDirectoryReader);
+
+            Document newDoc1 = reader.Document(0);
+            Assert.IsTrue(newDoc1 != null);
+            Assert.IsTrue(DocHelper.NumFields(newDoc1) == DocHelper.NumFields(doc1) - DocHelper.Unstored.Count);
+            Document newDoc2 = reader.Document(1);
+            Assert.IsTrue(newDoc2 != null);
+            Assert.IsTrue(DocHelper.NumFields(newDoc2) == DocHelper.NumFields(doc2) - DocHelper.Unstored.Count);
+            Terms vector = reader.GetTermVectors(0).GetTerms(DocHelper.TEXT_FIELD_2_KEY);
+            Assert.IsNotNull(vector);
+
+            reader.Dispose();
+            if (readers[0] != null)
+            {
+                readers[0].Dispose();
+            }
+            if (readers[1] != null)
+            {
+                readers[1].Dispose();
+            }
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestMultiTermDocs()
+        {
+            Directory ramDir1 = NewDirectory();
+            AddDoc(Random(), ramDir1, "test foo", true);
+            Directory ramDir2 = NewDirectory();
+            AddDoc(Random(), ramDir2, "test blah", true);
+            Directory ramDir3 = NewDirectory();
+            AddDoc(Random(), ramDir3, "test wow", true);
+
+            IndexReader[] readers1 = new IndexReader[] { DirectoryReader.Open(ramDir1), DirectoryReader.Open(ramDir3) };
+            IndexReader[] readers2 = new IndexReader[] { DirectoryReader.Open(ramDir1), DirectoryReader.Open(ramDir2), DirectoryReader.Open(ramDir3) };
+            MultiReader mr2 = new MultiReader(readers1);
+            MultiReader mr3 = new MultiReader(readers2);
+
+            // test mixing up TermDocs and TermEnums from different readers.
+            TermsEnum te2 = MultiFields.GetTerms(mr2, "body").GetIterator(null);
+            te2.SeekCeil(new BytesRef("wow"));
+            DocsEnum td = TestUtil.Docs(Random(), mr2, "body", te2.Term, MultiFields.GetLiveDocs(mr2), null, 0);
+
+            TermsEnum te3 = MultiFields.GetTerms(mr3, "body").GetIterator(null);
+            te3.SeekCeil(new BytesRef("wow"));
+            td = TestUtil.Docs(Random(), te3, MultiFields.GetLiveDocs(mr3), td, 0);
+
+            int ret = 0;
+
+            // this should blow up if we forget to check that the TermEnum is from the same
+            // reader as the TermDocs.
+            while (td.NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
+            {
+                ret += td.DocID;
+            }
+
+            // really a dummy assert to ensure that we got some docs and to ensure that
+            // nothing is eliminated by hotspot
+            Assert.IsTrue(ret > 0);
+            readers1[0].Dispose();
+            readers1[1].Dispose();
+            readers2[0].Dispose();
+            readers2[1].Dispose();
+            readers2[2].Dispose();
+            ramDir1.Dispose();
+            ramDir2.Dispose();
+            ramDir3.Dispose();
+        }
+
+        private void AddDoc(Random random, Directory ramDir1, string s, bool create)
+        {
+            IndexWriter iw = new IndexWriter(ramDir1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).SetOpenMode(create ? OpenMode.CREATE : OpenMode.APPEND));
+            Document doc = new Document();
+            doc.Add(NewTextField("body", s, Field.Store.NO));
+            iw.AddDocument(doc);
+            iw.Dispose();
+        }
+
+        [Test]
+        public virtual void TestIsCurrent()
+        {
+            Directory d = NewDirectory();
+            IndexWriter writer = new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            AddDocumentWithFields(writer);
+            writer.Dispose();
+            // set up reader:
+            DirectoryReader reader = DirectoryReader.Open(d);
+            Assert.IsTrue(reader.IsCurrent);
+            // modify index by adding another document:
+            writer = new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND));
+            AddDocumentWithFields(writer);
+            writer.Dispose();
+            Assert.IsFalse(reader.IsCurrent);
+            // re-create index:
+            writer = new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE));
+            AddDocumentWithFields(writer);
+            writer.Dispose();
+            Assert.IsFalse(reader.IsCurrent);
+            reader.Dispose();
+            d.Dispose();
+        }
+
+        /// <summary>
+        /// Tests the IndexReader.getFieldNames implementation </summary>
+        /// <exception cref="Exception"> on error </exception>
+        [Test]
+        public virtual void TestGetFieldNames()
+        {
+            Directory d = NewDirectory();
+            // set up writer
+            IndexWriter writer = new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+
+            Document doc = new Document();
+
+            FieldType customType3 = new FieldType();
+            customType3.IsStored = true;
+
+            doc.Add(new StringField("keyword", "test1", Field.Store.YES));
+            doc.Add(new TextField("text", "test1", Field.Store.YES));
+            doc.Add(new Field("unindexed", "test1", customType3));
+            doc.Add(new TextField("unstored", "test1", Field.Store.NO));
+            writer.AddDocument(doc);
+
+            writer.Dispose();
+            // set up reader
+            DirectoryReader reader = DirectoryReader.Open(d);
+            FieldInfos fieldInfos = MultiFields.GetMergedFieldInfos(reader);
+            Assert.IsNotNull(fieldInfos.FieldInfo("keyword"));
+            Assert.IsNotNull(fieldInfos.FieldInfo("text"));
+            Assert.IsNotNull(fieldInfos.FieldInfo("unindexed"));
+            Assert.IsNotNull(fieldInfos.FieldInfo("unstored"));
+            reader.Dispose();
+            // add more documents
+            writer = new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND).SetMergePolicy(NewLogMergePolicy()));
+            // want to get some more segments here
+            int mergeFactor = ((LogMergePolicy)writer.Config.MergePolicy).MergeFactor;
+            for (int i = 0; i < 5 * mergeFactor; i++)
+            {
+                doc = new Document();
+                doc.Add(new StringField("keyword", "test1", Field.Store.YES));
+                doc.Add(new TextField("text", "test1", Field.Store.YES));
+                doc.Add(new Field("unindexed", "test1", customType3));
+                doc.Add(new TextField("unstored", "test1", Field.Store.NO));
+                writer.AddDocument(doc);
+            }
+            // new fields are in some different segments (we hope)
+            for (int i = 0; i < 5 * mergeFactor; i++)
+            {
+                doc = new Document();
+                doc.Add(new StringField("keyword2", "test1", Field.Store.YES));
+                doc.Add(new TextField("text2", "test1", Field.Store.YES));
+                doc.Add(new Field("unindexed2", "test1", customType3));
+                doc.Add(new TextField("unstored2", "test1", Field.Store.NO));
+                writer.AddDocument(doc);
+            }
+            // new termvector fields
+
+            FieldType customType5 = new FieldType(TextField.TYPE_STORED);
+            customType5.StoreTermVectors = true;
+            FieldType customType6 = new FieldType(TextField.TYPE_STORED);
+            customType6.StoreTermVectors = true;
+            customType6.StoreTermVectorOffsets = true;
+            FieldType customType7 = new FieldType(TextField.TYPE_STORED);
+            customType7.StoreTermVectors = true;
+            customType7.StoreTermVectorPositions = true;
+            FieldType customType8 = new FieldType(TextField.TYPE_STORED);
+            customType8.StoreTermVectors = true;
+            customType8.StoreTermVectorOffsets = true;
+            customType8.StoreTermVectorPositions = true;
+
+            for (int i = 0; i < 5 * mergeFactor; i++)
+            {
+                doc = new Document();
+                doc.Add(new TextField("tvnot", "tvnot", Field.Store.YES));
+                doc.Add(new Field("termvector", "termvector", customType5));
+                doc.Add(new Field("tvoffset", "tvoffset", customType6));
+                doc.Add(new Field("tvposition", "tvposition", customType7));
+                doc.Add(new Field("tvpositionoffset", "tvpositionoffset", customType8));
+                writer.AddDocument(doc);
+            }
+
+            writer.Dispose();
+
+            // verify fields again
+            reader = DirectoryReader.Open(d);
+            fieldInfos = MultiFields.GetMergedFieldInfos(reader);
+
+            ICollection<string> allFieldNames = new HashSet<string>();
+            ICollection<string> indexedFieldNames = new HashSet<string>();
+            ICollection<string> notIndexedFieldNames = new HashSet<string>();
+            ICollection<string> tvFieldNames = new HashSet<string>();
+
+            foreach (FieldInfo fieldInfo in fieldInfos)
+            {
+                string name = fieldInfo.Name;
+                allFieldNames.Add(name);
+                if (fieldInfo.IsIndexed)
+                {
+                    indexedFieldNames.Add(name);
+                }
+                else
+                {
+                    notIndexedFieldNames.Add(name);
+                }
+                if (fieldInfo.HasVectors)
+                {
+                    tvFieldNames.Add(name);
+                }
+            }
+
+            Assert.IsTrue(allFieldNames.Contains("keyword"));
+            Assert.IsTrue(allFieldNames.Contains("text"));
+            Assert.IsTrue(allFieldNames.Contains("unindexed"));
+            Assert.IsTrue(allFieldNames.Contains("unstored"));
+            Assert.IsTrue(allFieldNames.Contains("keyword2"));
+            Assert.IsTrue(allFieldNames.Contains("text2"));
+            Assert.IsTrue(allFieldNames.Contains("unindexed2"));
+            Assert.IsTrue(allFieldNames.Contains("unstored2"));
+            Assert.IsTrue(allFieldNames.Contains("tvnot"));
+            Assert.IsTrue(allFieldNames.Contains("termvector"));
+            Assert.IsTrue(allFieldNames.Contains("tvposition"));
+            Assert.IsTrue(allFieldNames.Contains("tvoffset"));
+            Assert.IsTrue(allFieldNames.Contains("tvpositionoffset"));
+
+            // verify that only indexed fields were returned
+            Assert.AreEqual(11, indexedFieldNames.Count); // 6 original + the 5 termvector fields
+            Assert.IsTrue(indexedFieldNames.Contains("keyword"));
+            Assert.IsTrue(indexedFieldNames.Contains("text"));
+            Assert.IsTrue(indexedFieldNames.Contains("unstored"));
+            Assert.IsTrue(indexedFieldNames.Contains("keyword2"));
+            Assert.IsTrue(indexedFieldNames.Contains("text2"));
+            Assert.IsTrue(indexedFieldNames.Contains("unstored2"));
+            Assert.IsTrue(indexedFieldNames.Contains("tvnot"));
+            Assert.IsTrue(indexedFieldNames.Contains("termvector"));
+            Assert.IsTrue(indexedFieldNames.Contains("tvposition"));
+            Assert.IsTrue(indexedFieldNames.Contains("tvoffset"));
+            Assert.IsTrue(indexedFieldNames.Contains("tvpositionoffset"));
+
+            // verify that only unindexed fields were returned
+            Assert.AreEqual(2, notIndexedFieldNames.Count); // the following fields
+            Assert.IsTrue(notIndexedFieldNames.Contains("unindexed"));
+            Assert.IsTrue(notIndexedFieldNames.Contains("unindexed2"));
+
+            // verify index term vector fields
+            Assert.AreEqual(4, tvFieldNames.Count, tvFieldNames.ToString()); // 4 field has term vector only
+            Assert.IsTrue(tvFieldNames.Contains("termvector"));
+
+            reader.Dispose();
+            d.Dispose();
+        }
+
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(40000)]
+#endif
+        [Test, HasTimeout]
+        public virtual void TestTermVectors()
+        {
+            Directory d = NewDirectory();
+            // set up writer
+            IndexWriter writer = new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()));
+            // want to get some more segments here
+            // new termvector fields
+            int mergeFactor = ((LogMergePolicy)writer.Config.MergePolicy).MergeFactor;
+            FieldType customType5 = new FieldType(TextField.TYPE_STORED);
+            customType5.StoreTermVectors = true;
+            FieldType customType6 = new FieldType(TextField.TYPE_STORED);
+            customType6.StoreTermVectors = true;
+            customType6.StoreTermVectorOffsets = true;
+            FieldType customType7 = new FieldType(TextField.TYPE_STORED);
+            customType7.StoreTermVectors = true;
+            customType7.StoreTermVectorPositions = true;
+            FieldType customType8 = new FieldType(TextField.TYPE_STORED);
+            customType8.StoreTermVectors = true;
+            customType8.StoreTermVectorOffsets = true;
+            customType8.StoreTermVectorPositions = true;
+            for (int i = 0; i < 5 * mergeFactor; i++)
+            {
+                Document doc = new Document();
+                doc.Add(new TextField("tvnot", "one two two three three three", Field.Store.YES));
+                doc.Add(new Field("termvector", "one two two three three three", customType5));
+                doc.Add(new Field("tvoffset", "one two two three three three", customType6));
+                doc.Add(new Field("tvposition", "one two two three three three", customType7));
+                doc.Add(new Field("tvpositionoffset", "one two two three three three", customType8));
+
+                writer.AddDocument(doc);
+            }
+            writer.Dispose();
+            d.Dispose();
+        }
+
+        internal virtual void AssertTermDocsCount(string msg, IndexReader reader, Term term, int expected)
+        {
+            DocsEnum tdocs = TestUtil.Docs(Random(), reader, term.Field, new BytesRef(term.Text()), MultiFields.GetLiveDocs(reader), null, 0);
+            int count = 0;
+            if (tdocs != null)
+            {
+                while (tdocs.NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
+                {
+                    count++;
+                }
+            }
+            Assert.AreEqual(expected, count, msg + ", count mismatch");
+        }
+
+        [Test]
+        public virtual void TestBinaryFields()
+        {
+            Directory dir = NewDirectory();
+            byte[] bin = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 };
+
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()));
+
+            for (int i = 0; i < 10; i++)
+            {
+                AddDoc(writer, "document number " + (i + 1));
+                AddDocumentWithFields(writer);
+                AddDocumentWithDifferentFields(writer);
+                AddDocumentWithTermVectorFields(writer);
+            }
+            writer.Dispose();
+            writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND).SetMergePolicy(NewLogMergePolicy()));
+            Document doc = new Document();
+            doc.Add(new StoredField("bin1", bin));
+            doc.Add(new TextField("junk", "junk text", Field.Store.NO));
+            writer.AddDocument(doc);
+            writer.Dispose();
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            Document doc2 = reader.Document(reader.MaxDoc - 1);
+            IIndexableField[] fields = doc2.GetFields("bin1");
+            Assert.IsNotNull(fields);
+            Assert.AreEqual(1, fields.Length);
+            IIndexableField b1 = fields[0];
+            Assert.IsTrue(b1.GetBinaryValue() != null);
+            BytesRef bytesRef = b1.GetBinaryValue();
+            Assert.AreEqual(bin.Length, bytesRef.Length);
+            for (int i = 0; i < bin.Length; i++)
+            {
+                Assert.AreEqual(bin[i], bytesRef.Bytes[i + bytesRef.Offset]);
+            }
+            reader.Dispose();
+            // force merge
+
+            writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND).SetMergePolicy(NewLogMergePolicy()));
+            writer.ForceMerge(1);
+            writer.Dispose();
+            reader = DirectoryReader.Open(dir);
+            doc2 = reader.Document(reader.MaxDoc - 1);
+            fields = doc2.GetFields("bin1");
+            Assert.IsNotNull(fields);
+            Assert.AreEqual(1, fields.Length);
+            b1 = fields[0];
+            Assert.IsTrue(b1.GetBinaryValue() != null);
+            bytesRef = b1.GetBinaryValue();
+            Assert.AreEqual(bin.Length, bytesRef.Length);
+            for (int i = 0; i < bin.Length; i++)
+            {
+                Assert.AreEqual(bin[i], bytesRef.Bytes[i + bytesRef.Offset]);
+            }
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        /* ??? public void testOpenEmptyDirectory() throws IOException{
+          String dirName = "test.empty";
+          File fileDirName = new File(dirName);
+          if (!fileDirName.exists()) {
+            fileDirName.mkdir();
+          }
+          try {
+            DirectoryReader.Open(fileDirName);
+            Assert.Fail("opening DirectoryReader on empty directory failed to produce FileNotFoundException/NoSuchFileException");
+          } catch (FileNotFoundException | NoSuchFileException e) {
+            // GOOD
+          }
+          rmDir(fileDirName);
+        }*/
+
+        [Test]
+        public virtual void TestFilesOpenClose()
+        {
+            // Create initial data set
+            DirectoryInfo dirFile = CreateTempDir("TestIndexReader.testFilesOpenClose");
+            Directory dir = NewFSDirectory(dirFile);
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            AddDoc(writer, "test");
+            writer.Dispose();
+            dir.Dispose();
+
+            // Try to erase the data - this ensures that the writer closed all files
+            System.IO.Directory.Delete(dirFile.FullName, true);
+            dir = NewFSDirectory(dirFile);
+
+            // Now create the data set again, just as before
+            writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE));
+            AddDoc(writer, "test");
+            writer.Dispose();
+            dir.Dispose();
+
+            // Now open existing directory and test that reader closes all files
+            dir = NewFSDirectory(dirFile);
+            DirectoryReader reader1 = DirectoryReader.Open(dir);
+            reader1.Dispose();
+            dir.Dispose();
+
+            // The following will fail if reader did not close
+            // all files
+            System.IO.Directory.Delete(dirFile.FullName, true);
+        }
+
+        [Test]
+        public virtual void TestOpenReaderAfterDelete()
+        {
+            DirectoryInfo dirFile = CreateTempDir("deletetest");
+            Directory dir = NewFSDirectory(dirFile);
+            try
+            {
+                DirectoryReader.Open(dir);
+                Assert.Fail("expected FileNotFoundException/NoSuchFileException");
+            }
+#pragma warning disable 168
+            catch (System.IO.FileNotFoundException /*| NoSuchFileException*/ e)
+#pragma warning restore 168
+            {
+                // expected
+            }
+
+            dirFile.Delete();
+
+            // Make sure we still get a CorruptIndexException (not NPE):
+            try
+            {
+                DirectoryReader.Open(dir);
+                Assert.Fail("expected FileNotFoundException/NoSuchFileException");
+            }
+#pragma warning disable 168
+            catch (System.IO.FileNotFoundException /*| NoSuchFileException*/ e)
+#pragma warning restore 168
+            {
+                // expected
+            }
+
+            dir.Dispose();
+        }
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Is non-static because NewStringField, NewTextField, NewField methods
+        /// are no longer static.
+        /// </summary>
+        internal void AddDocumentWithFields(IndexWriter writer)
+        {
+            Document doc = new Document();
+
+            FieldType customType3 = new FieldType();
+            customType3.IsStored = true;
+            doc.Add(NewStringField("keyword", "test1", Field.Store.YES));
+            doc.Add(NewTextField("text", "test1", Field.Store.YES));
+            doc.Add(NewField("unindexed", "test1", customType3));
+            doc.Add(new TextField("unstored", "test1", Field.Store.NO));
+            writer.AddDocument(doc);
+        }
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Is non-static because NewStringField, NewTextField, NewField methods
+        /// are no longer static.
+        /// </summary>
+        internal void AddDocumentWithDifferentFields(IndexWriter writer)
+        {
+            Document doc = new Document();
+
+            FieldType customType3 = new FieldType();
+            customType3.IsStored = true;
+            doc.Add(NewStringField("keyword2", "test1", Field.Store.YES));
+            doc.Add(NewTextField("text2", "test1", Field.Store.YES));
+            doc.Add(NewField("unindexed2", "test1", customType3));
+            doc.Add(new TextField("unstored2", "test1", Field.Store.NO));
+            writer.AddDocument(doc);
+        }
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Is non-static because NewTextField, NewField methods are no longer
+        /// static.
+        /// </summary>
+        internal void AddDocumentWithTermVectorFields(IndexWriter writer)
+        {
+            Document doc = new Document();
+            FieldType customType5 = new FieldType(TextField.TYPE_STORED);
+            customType5.StoreTermVectors = true;
+            FieldType customType6 = new FieldType(TextField.TYPE_STORED);
+            customType6.StoreTermVectors = true;
+            customType6.StoreTermVectorOffsets = true;
+            FieldType customType7 = new FieldType(TextField.TYPE_STORED);
+            customType7.StoreTermVectors = true;
+            customType7.StoreTermVectorPositions = true;
+            FieldType customType8 = new FieldType(TextField.TYPE_STORED);
+            customType8.StoreTermVectors = true;
+            customType8.StoreTermVectorOffsets = true;
+            customType8.StoreTermVectorPositions = true;
+            doc.Add(NewTextField("tvnot", "tvnot", Field.Store.YES));
+            doc.Add(NewField("termvector", "termvector", customType5));
+            doc.Add(NewField("tvoffset", "tvoffset", customType6));
+            doc.Add(NewField("tvposition", "tvposition", customType7));
+            doc.Add(NewField("tvpositionoffset", "tvpositionoffset", customType8));
+
+            writer.AddDocument(doc);
+        }
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Is non-static because NewTextField is no longer static.
+        /// </summary>
+        internal void AddDoc(IndexWriter writer, string value)
+        {
+            Document doc = new Document();
+            doc.Add(NewTextField("content", value, Field.Store.NO));
+            writer.AddDocument(doc);
+        }
+
+        // TODO: maybe this can reuse the logic of test dueling codecs?
+        public static void AssertIndexEquals(DirectoryReader index1, DirectoryReader index2)
+        {
+            Assert.AreEqual(index1.NumDocs, index2.NumDocs, "IndexReaders have different values for numDocs.");
+            Assert.AreEqual(index1.MaxDoc, index2.MaxDoc, "IndexReaders have different values for maxDoc.");
+            Assert.AreEqual(index1.HasDeletions, index2.HasDeletions, "Only one IndexReader has deletions.");
+            Assert.AreEqual(index1.Leaves.Count == 1, index2.Leaves.Count == 1, "Single segment test differs.");
+
+            // check field names
+            FieldInfos fieldInfos1 = MultiFields.GetMergedFieldInfos(index1);
+            FieldInfos fieldInfos2 = MultiFields.GetMergedFieldInfos(index2);
+            Assert.AreEqual(fieldInfos1.Count, fieldInfos2.Count, "IndexReaders have different numbers of fields.");
+            int numFields = fieldInfos1.Count;
+            for (int fieldID = 0; fieldID < numFields; fieldID++)
+            {
+                FieldInfo fieldInfo1 = fieldInfos1.FieldInfo(fieldID);
+                FieldInfo fieldInfo2 = fieldInfos2.FieldInfo(fieldID);
+                Assert.AreEqual(fieldInfo1.Name, fieldInfo2.Name, "Different field names.");
+            }
+
+            // check norms
+            foreach (FieldInfo fieldInfo in fieldInfos1)
+            {
+                string curField = fieldInfo.Name;
+                NumericDocValues norms1 = MultiDocValues.GetNormValues(index1, curField);
+                NumericDocValues norms2 = MultiDocValues.GetNormValues(index2, curField);
+                if (norms1 != null && norms2 != null)
+                {
+                    // todo: generalize this (like TestDuelingCodecs assert)
+                    for (int i = 0; i < index1.MaxDoc; i++)
+                    {
+                        Assert.AreEqual(norms1.Get(i), norms2.Get(i), "Norm different for doc " + i + " and field '" + curField + "'.");
+                    }
+                }
+                else
+                {
+                    Assert.IsNull(norms1);
+                    Assert.IsNull(norms2);
+                }
+            }
+
+            // check deletions
+            IBits liveDocs1 = MultiFields.GetLiveDocs(index1);
+            IBits liveDocs2 = MultiFields.GetLiveDocs(index2);
+            for (int i = 0; i < index1.MaxDoc; i++)
+            {
+                Assert.AreEqual(liveDocs1 == null || !liveDocs1.Get(i), liveDocs2 == null || !liveDocs2.Get(i), "Doc " + i + " only deleted in one index.");
+            }
+
+            // check stored fields
+            for (int i = 0; i < index1.MaxDoc; i++)
+            {
+                if (liveDocs1 == null || liveDocs1.Get(i))
+                {
+                    Document doc1 = index1.Document(i);
+                    Document doc2 = index2.Document(i);
+                    IList<IIndexableField> field1 = doc1.Fields;
+                    IList<IIndexableField> field2 = doc2.Fields;
+                    Assert.AreEqual(field1.Count, field2.Count, "Different numbers of fields for doc " + i + ".");
+                    IEnumerator<IIndexableField> itField1 = field1.GetEnumerator();
+                    IEnumerator<IIndexableField> itField2 = field2.GetEnumerator();
+                    while (itField1.MoveNext())
+                    {
+                        Field curField1 = (Field)itField1.Current;
+                        itField2.MoveNext();
+                        Field curField2 = (Field)itField2.Current;
+                        Assert.AreEqual(curField1.Name, curField2.Name, "Different fields names for doc " + i + ".");
+                        Assert.AreEqual(curField1.GetStringValue(), curField2.GetStringValue(), "Different field values for doc " + i + ".");
+                    }
+                }
+            }
+
+            // check dictionary and posting lists
+            Fields fields1 = MultiFields.GetFields(index1);
+            Fields fields2 = MultiFields.GetFields(index2);
+            IEnumerator<string> fenum2 = fields2.GetEnumerator();
+            IBits liveDocs = MultiFields.GetLiveDocs(index1);
+            foreach (string field1 in fields1)
+            {
+                fenum2.MoveNext();
+                Assert.AreEqual(field1, fenum2.Current, "Different fields");
+                Terms terms1 = fields1.GetTerms(field1);
+                if (terms1 == null)
+                {
+                    Assert.IsNull(fields2.GetTerms(field1));
+                    continue;
+                }
+                TermsEnum enum1 = terms1.GetIterator(null);
+
+                Terms terms2 = fields2.GetTerms(field1);
+                Assert.IsNotNull(terms2);
+                TermsEnum enum2 = terms2.GetIterator(null);
+
+                while (enum1.Next() != null)
+                {
+                    Assert.AreEqual(enum1.Term, enum2.Next(), "Different terms");
+                    DocsAndPositionsEnum tp1 = enum1.DocsAndPositions(liveDocs, null);
+                    DocsAndPositionsEnum tp2 = enum2.DocsAndPositions(liveDocs, null);
+
+                    while (tp1.NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
+                    {
+                        Assert.IsTrue(tp2.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+                        Assert.AreEqual(tp1.DocID, tp2.DocID, "Different doc id in postinglist of term " + enum1.Term + ".");
+                        Assert.AreEqual(tp1.Freq, tp2.Freq, "Different term frequence in postinglist of term " + enum1.Term + ".");
+                        for (int i = 0; i < tp1.Freq; i++)
+                        {
+                            Assert.AreEqual(tp1.NextPosition(), tp2.NextPosition(), "Different positions in postinglist of term " + enum1.Term + ".");
+                        }
+                    }
+                }
+            }
+            Assert.IsFalse(fenum2.MoveNext());
+        }
+
+        [Test]
+        public virtual void TestGetIndexCommit()
+        {
+            Directory d = NewDirectory();
+
+            // set up writer
+            IndexWriter writer = new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy(10)));
+            for (int i = 0; i < 27; i++)
+            {
+                AddDocumentWithFields(writer);
+            }
+            writer.Dispose();
+
+            SegmentInfos sis = new SegmentInfos();
+            sis.Read(d);
+            DirectoryReader r = DirectoryReader.Open(d);
+            IndexCommit c = r.IndexCommit;
+
+            Assert.AreEqual(sis.GetSegmentsFileName(), c.SegmentsFileName);
+
+            Assert.IsTrue(c.Equals(r.IndexCommit));
+
+            // Change the index
+            writer = new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy(10)));
+            for (int i = 0; i < 7; i++)
+            {
+                AddDocumentWithFields(writer);
+            }
+            writer.Dispose();
+
+            DirectoryReader r2 = DirectoryReader.OpenIfChanged(r);
+            Assert.IsNotNull(r2);
+            Assert.IsFalse(c.Equals(r2.IndexCommit));
+            Assert.IsFalse(r2.IndexCommit.SegmentCount == 1);
+            r2.Dispose();
+
+            writer = new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND));
+            writer.ForceMerge(1);
+            writer.Dispose();
+
+            r2 = DirectoryReader.OpenIfChanged(r);
+            Assert.IsNotNull(r2);
+            Assert.IsNull(DirectoryReader.OpenIfChanged(r2));
+            Assert.AreEqual(1, r2.IndexCommit.SegmentCount);
+
+            r.Dispose();
+            r2.Dispose();
+            d.Dispose();
+        }
+
+        internal Document CreateDocument(string id)
+        {
+            Document doc = new Document();
+            FieldType customType = new FieldType(TextField.TYPE_STORED);
+            customType.IsTokenized = false;
+            customType.OmitNorms = true;
+
+            doc.Add(NewField("id", id, customType));
+            return doc;
+        }
+
+        // LUCENE-1468 -- make sure on attempting to open an
+        // DirectoryReader on a non-existent directory, you get a
+        // good exception
+        [Test]
+        public virtual void TestNoDir()
+        {
+            DirectoryInfo tempDir = CreateTempDir("doesnotexist");
+            System.IO.Directory.Delete(tempDir.FullName, true);
+            Directory dir = NewFSDirectory(tempDir);
+            try
+            {
+                DirectoryReader.Open(dir);
+                Assert.Fail("did not hit expected exception");
+            }
+#pragma warning disable 168
+            catch (NoSuchDirectoryException nsde)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            dir.Dispose();
+        }
+
+        // LUCENE-1509
+        [Test]
+        public virtual void TestNoDupCommitFileNames()
+        {
+            Directory dir = NewDirectory();
+
+            IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2));
+            writer.AddDocument(CreateDocument("a"));
+            writer.AddDocument(CreateDocument("a"));
+            writer.AddDocument(CreateDocument("a"));
+            writer.Dispose();
+
+            ICollection<IndexCommit> commits = DirectoryReader.ListCommits(dir);
+            foreach (IndexCommit commit in commits)
+            {
+                ICollection<string> files = commit.FileNames;
+                HashSet<string> seen = new HashSet<string>();
+                foreach (String fileName in files)
+                {
+                    Assert.IsTrue(!seen.Contains(fileName), "file " + fileName + " was duplicated");
+                    seen.Add(fileName);
+                }
+            }
+
+            dir.Dispose();
+        }
+
+        // LUCENE-1579: Ensure that on a reopened reader, that any
+        // shared segments reuse the doc values arrays in
+        // FieldCache
+        [Test]
+        public virtual void TestFieldCacheReuseAfterReopen()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy(10)));
+            Document doc = new Document();
+            doc.Add(NewStringField("number", "17", Field.Store.NO));
+            writer.AddDocument(doc);
+            writer.Commit();
+
+            // Open reader1
+            DirectoryReader r = DirectoryReader.Open(dir);
+            AtomicReader r1 = GetOnlySegmentReader(r);
+            FieldCache.Int32s ints = FieldCache.DEFAULT.GetInt32s(r1, "number", false);
+            Assert.AreEqual(17, ints.Get(0));
+
+            // Add new segment
+            writer.AddDocument(doc);
+            writer.Commit();
+
+            // Reopen reader1 --> reader2
+            DirectoryReader r2 = DirectoryReader.OpenIfChanged(r);
+            Assert.IsNotNull(r2);
+            r.Dispose();
+            AtomicReader sub0 = (AtomicReader)r2.Leaves[0].Reader;
+            FieldCache.Int32s ints2 = FieldCache.DEFAULT.GetInt32s(sub0, "number", false);
+            r2.Dispose();
+            Assert.IsTrue(ints == ints2);
+
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        // LUCENE-1586: getUniqueTermCount
+        [Test]
+        public virtual void TestUniqueTermCount()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document doc = new Document();
+            doc.Add(NewTextField("field", "a b c d e f g h i j k l m n o p q r s t u v w x y z", Field.Store.NO));
+            doc.Add(NewTextField("number", "0 1 2 3 4 5 6 7 8 9", Field.Store.NO));
+            writer.AddDocument(doc);
+            writer.AddDocument(doc);
+            writer.Commit();
+
+            DirectoryReader r = DirectoryReader.Open(dir);
+            AtomicReader r1 = GetOnlySegmentReader(r);
+#pragma warning disable 612, 618
+            Assert.AreEqual(36, r1.Fields.UniqueTermCount);
+#pragma warning restore 612, 618
+            writer.AddDocument(doc);
+            writer.Commit();
+            DirectoryReader r2 = DirectoryReader.OpenIfChanged(r);
+            Assert.IsNotNull(r2);
+            r.Dispose();
+
+            foreach (AtomicReaderContext s in r2.Leaves)
+            {
+#pragma warning disable 612, 618
+                Assert.AreEqual(36, ((AtomicReader)s.Reader).Fields.UniqueTermCount);
+#pragma warning restore 612, 618
+            }
+            r2.Dispose();
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        // LUCENE-1609: don't load terms index
+        [Test]
+        public virtual void TestNoTermsIndex()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetCodec(TestUtil.AlwaysPostingsFormat(new Lucene41PostingsFormat())));
+            Document doc = new Document();
+            doc.Add(NewTextField("field", "a b c d e f g h i j k l m n o p q r s t u v w x y z", Field.Store.NO));
+            doc.Add(NewTextField("number", "0 1 2 3 4 5 6 7 8 9", Field.Store.NO));
+            writer.AddDocument(doc);
+            writer.AddDocument(doc);
+            writer.Dispose();
+
+            DirectoryReader r = DirectoryReader.Open(dir, -1);
+            try
+            {
+                r.DocFreq(new Term("field", "f"));
+                Assert.Fail("did not hit expected exception");
+            }
+#pragma warning disable 168
+            catch (InvalidOperationException ise)
+#pragma warning restore 168
+            {
+                // expected
+            }
+
+            Assert.AreEqual(-1, ((SegmentReader)r.Leaves[0].Reader).TermInfosIndexDivisor);
+            writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetCodec(TestUtil.AlwaysPostingsFormat(new Lucene41PostingsFormat())).SetMergePolicy(NewLogMergePolicy(10)));
+            writer.AddDocument(doc);
+            writer.Dispose();
+
+            // LUCENE-1718: ensure re-open carries over no terms index:
+            DirectoryReader r2 = DirectoryReader.OpenIfChanged(r);
+            Assert.IsNotNull(r2);
+            Assert.IsNull(DirectoryReader.OpenIfChanged(r2));
+            r.Dispose();
+            IList<AtomicReaderContext> leaves = r2.Leaves;
+            Assert.AreEqual(2, leaves.Count);
+            foreach (AtomicReaderContext ctx in leaves)
+            {
+                try
+                {
+                    ctx.Reader.DocFreq(new Term("field", "f"));
+                    Assert.Fail("did not hit expected exception");
+                }
+#pragma warning disable 168
+                catch (InvalidOperationException ise)
+#pragma warning restore 168
+                {
+                    // expected
+                }
+            }
+            r2.Dispose();
+            dir.Dispose();
+        }
+
+        // LUCENE-2046
+        [Test]
+        public virtual void TestPrepareCommitIsCurrent()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            writer.Commit();
+            Document doc = new Document();
+            writer.AddDocument(doc);
+            DirectoryReader r = DirectoryReader.Open(dir);
+            Assert.IsTrue(r.IsCurrent);
+            writer.AddDocument(doc);
+            writer.PrepareCommit();
+            Assert.IsTrue(r.IsCurrent);
+            DirectoryReader r2 = DirectoryReader.OpenIfChanged(r);
+            Assert.IsNull(r2);
+            writer.Commit();
+            Assert.IsFalse(r.IsCurrent);
+            writer.Dispose();
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        // LUCENE-2753
+        [Test]
+        public virtual void TestListCommits()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, null).SetIndexDeletionPolicy(new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy())));
+            SnapshotDeletionPolicy sdp = (SnapshotDeletionPolicy)writer.Config.IndexDeletionPolicy;
+            writer.AddDocument(new Document());
+            writer.Commit();
+            sdp.Snapshot();
+            writer.AddDocument(new Document());
+            writer.Commit();
+            sdp.Snapshot();
+            writer.AddDocument(new Document());
+            writer.Commit();
+            sdp.Snapshot();
+            writer.Dispose();
+            long currentGen = 0;
+            foreach (IndexCommit ic in DirectoryReader.ListCommits(dir))
+            {
+                Assert.IsTrue(currentGen < ic.Generation, "currentGen=" + currentGen + " commitGen=" + ic.Generation);
+                currentGen = ic.Generation;
+            }
+            dir.Dispose();
+        }
+
+        // Make sure totalTermFreq works correctly in the terms
+        // dict cache
+        [Test]
+        public virtual void TestTotalTermFreqCached()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document d = new Document();
+            d.Add(NewTextField("f", "a a b", Field.Store.NO));
+            writer.AddDocument(d);
+            DirectoryReader r = writer.Reader;
+            writer.Dispose();
+            try
+            {
+                // Make sure codec impls totalTermFreq (eg PreFlex doesn't)
+                Assume.That(r.TotalTermFreq(new Term("f", new BytesRef("b"))) != -1);
+                Assert.AreEqual(1, r.TotalTermFreq(new Term("f", new BytesRef("b"))));
+                Assert.AreEqual(2, r.TotalTermFreq(new Term("f", new BytesRef("a"))));
+                Assert.AreEqual(1, r.TotalTermFreq(new Term("f", new BytesRef("b"))));
+            }
+            finally
+            {
+                r.Dispose();
+                dir.Dispose();
+            }
+        }
+
+        [Test]
+        public virtual void TestGetSumDocFreq()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document d = new Document();
+            d.Add(NewTextField("f", "a", Field.Store.NO));
+            writer.AddDocument(d);
+            d = new Document();
+            d.Add(NewTextField("f", "b", Field.Store.NO));
+            writer.AddDocument(d);
+            DirectoryReader r = writer.Reader;
+            writer.Dispose();
+            try
+            {
+                // Make sure codec impls getSumDocFreq (eg PreFlex doesn't)
+                Assume.That(r.GetSumDocFreq("f") != -1);
+                Assert.AreEqual(2, r.GetSumDocFreq("f"));
+            }
+            finally
+            {
+                r.Dispose();
+                dir.Dispose();
+            }
+        }
+
+        [Test]
+        public virtual void TestGetDocCount()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document d = new Document();
+            d.Add(NewTextField("f", "a", Field.Store.NO));
+            writer.AddDocument(d);
+            d = new Document();
+            d.Add(NewTextField("f", "a", Field.Store.NO));
+            writer.AddDocument(d);
+            DirectoryReader r = writer.Reader;
+            writer.Dispose();
+            try
+            {
+                // Make sure codec impls getSumDocFreq (eg PreFlex doesn't)
+                Assume.That(r.GetDocCount("f") != -1);
+                Assert.AreEqual(2, r.GetDocCount("f"));
+            }
+            finally
+            {
+                r.Dispose();
+                dir.Dispose();
+            }
+        }
+
+        [Test]
+        public virtual void TestGetSumTotalTermFreq()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document d = new Document();
+            d.Add(NewTextField("f", "a b b", Field.Store.NO));
+            writer.AddDocument(d);
+            d = new Document();
+            d.Add(NewTextField("f", "a a b", Field.Store.NO));
+            writer.AddDocument(d);
+            DirectoryReader r = writer.Reader;
+            writer.Dispose();
+            try
+            {
+                // Make sure codec impls getSumDocFreq (eg PreFlex doesn't)
+                Assume.That(r.GetSumTotalTermFreq("f") != -1);
+                Assert.AreEqual(6, r.GetSumTotalTermFreq("f"));
+            }
+            finally
+            {
+                r.Dispose();
+                dir.Dispose();
+            }
+        }
+
+        // LUCENE-2474
+        [Test]
+        public virtual void TestReaderFinishedListener()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()));
+            ((LogMergePolicy)writer.Config.MergePolicy).MergeFactor = 3;
+            writer.AddDocument(new Document());
+            writer.Commit();
+            writer.AddDocument(new Document());
+            writer.Commit();
+            DirectoryReader reader = writer.Reader;
+            int[] closeCount = new int[1];
+            IndexReader.IReaderClosedListener listener = new ReaderClosedListenerAnonymousInnerClassHelper(this, reader, closeCount);
+
+            reader.AddReaderClosedListener(listener);
+
+            reader.Dispose();
+
+            // Close the top reader, its the only one that should be closed
+            Assert.AreEqual(1, closeCount[0]);
+            writer.Dispose();
+
+            DirectoryReader reader2 = DirectoryReader.Open(dir);
+            reader2.AddReaderClosedListener(listener);
+
+            closeCount[0] = 0;
+            reader2.Dispose();
+            Assert.AreEqual(1, closeCount[0]);
+            dir.Dispose();
+        }
+
+        private class ReaderClosedListenerAnonymousInnerClassHelper : IndexReader.IReaderClosedListener
+        {
+            private readonly TestDirectoryReader OuterInstance;
+
+            private DirectoryReader Reader;
+            private int[] CloseCount;
+
+            public ReaderClosedListenerAnonymousInnerClassHelper(TestDirectoryReader outerInstance, DirectoryReader reader, int[] closeCount)
+            {
+                this.OuterInstance = outerInstance;
+                this.Reader = reader;
+                this.CloseCount = closeCount;
+            }
+
+            public void OnClose(IndexReader reader)
+            {
+                CloseCount[0]++;
+            }
+        }
+
+        [Test]
+        public virtual void TestOOBDocID()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            writer.AddDocument(new Document());
+            DirectoryReader r = writer.Reader;
+            writer.Dispose();
+            r.Document(0);
+            try
+            {
+                r.Document(1);
+                Assert.Fail("did not hit exception");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestTryIncRef()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            writer.AddDocument(new Document());
+            writer.Commit();
+            DirectoryReader r = DirectoryReader.Open(dir);
+            Assert.IsTrue(r.TryIncRef());
+            r.DecRef();
+            r.Dispose();
+            Assert.IsFalse(r.TryIncRef());
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestStressTryIncRef()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            writer.AddDocument(new Document());
+            writer.Commit();
+            DirectoryReader r = DirectoryReader.Open(dir);
+            int numThreads = AtLeast(2);
+
+            IncThread[] threads = new IncThread[numThreads];
+            for (int i = 0; i < threads.Length; i++)
+            {
+                threads[i] = new IncThread(r, Random());
+                threads[i].Start();
+            }
+            Thread.Sleep(100);
+
+            Assert.IsTrue(r.TryIncRef());
+            r.DecRef();
+            r.Dispose();
+
+            for (int i = 0; i < threads.Length; i++)
+            {
+                threads[i].Join();
+                Assert.IsNull(threads[i].Failed);
+            }
+            Assert.IsFalse(r.TryIncRef());
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        internal class IncThread : ThreadClass
+        {
+            internal readonly IndexReader ToInc;
+            internal readonly Random Random;
+            internal Exception Failed;
+
+            internal IncThread(IndexReader toInc, Random random)
+            {
+                this.ToInc = toInc;
+                this.Random = random;
+            }
+
+            public override void Run()
+            {
+                try
+                {
+                    while (ToInc.TryIncRef())
+                    {
+                        Assert.IsFalse(ToInc.HasDeletions);
+                        ToInc.DecRef();
+                    }
+                    Assert.IsFalse(ToInc.TryIncRef());
+                }
+                catch (Exception e)
+                {
+                    Failed = e;
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestLoadCertainFields()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            Document doc = new Document();
+            doc.Add(NewStringField("field1", "foobar", Field.Store.YES));
+            doc.Add(NewStringField("field2", "foobaz", Field.Store.YES));
+            writer.AddDocument(doc);
+            DirectoryReader r = writer.Reader;
+            writer.Dispose();
+            HashSet<string> fieldsToLoad = new HashSet<string>();
+            Assert.AreEqual(0, r.Document(0, fieldsToLoad).Fields.Count);
+            fieldsToLoad.Add("field1");
+            Document doc2 = r.Document(0, fieldsToLoad);
+            Assert.AreEqual(1, doc2.Fields.Count);
+            Assert.AreEqual("foobar", doc2.Get("field1"));
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        /// @deprecated just to ensure IndexReader static methods work
+        [Obsolete("just to ensure IndexReader static methods work")]
+        [Test]
+        public virtual void TestBackwards()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetCodec(TestUtil.AlwaysPostingsFormat(new Lucene41PostingsFormat())));
+            Document doc = new Document();
+            doc.Add(NewTextField("field", "a b c d e f g h i j k l m n o p q r s t u v w x y z", Field.Store.NO));
+            doc.Add(NewTextField("number", "0 1 2 3 4 5 6 7 8 9", Field.Store.NO));
+            writer.AddDocument(doc);
+
+            // open(IndexWriter, boolean)
+            DirectoryReader r = IndexReader.Open(writer, true);
+            Assert.AreEqual(1, r.DocFreq(new Term("field", "f")));
+            r.Dispose();
+            writer.AddDocument(doc);
+            writer.Dispose();
+
+            // open(Directory)
+            r = IndexReader.Open(dir);
+            Assert.AreEqual(2, r.DocFreq(new Term("field", "f")));
+            r.Dispose();
+
+            // open(IndexCommit)
+            IList<IndexCommit> commits = DirectoryReader.ListCommits(dir);
+            Assert.AreEqual(1, commits.Count);
+            r = IndexReader.Open(commits[0]);
+            Assert.AreEqual(2, r.DocFreq(new Term("field", "f")));
+            r.Dispose();
+
+            // open(Directory, int)
+            r = IndexReader.Open(dir, -1);
+            try
+            {
+                r.DocFreq(new Term("field", "f"));
+                Assert.Fail("did not hit expected exception");
+            }
+#pragma warning disable 168
+            catch (InvalidOperationException ise)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            Assert.AreEqual(-1, ((SegmentReader)r.Leaves[0].Reader).TermInfosIndexDivisor);
+            r.Dispose();
+
+            // open(IndexCommit, int)
+            r = IndexReader.Open(commits[0], -1);
+            try
+            {
+                r.DocFreq(new Term("field", "f"));
+                Assert.Fail("did not hit expected exception");
+            }
+#pragma warning disable 168
+            catch (InvalidOperationException ise)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            Assert.AreEqual(-1, ((SegmentReader)r.Leaves[0].Reader).TermInfosIndexDivisor);
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestIndexExistsOnNonExistentDirectory()
+        {
+            DirectoryInfo tempDir = CreateTempDir("testIndexExistsOnNonExistentDirectory");
+            tempDir.Delete();
+            Directory dir = NewFSDirectory(tempDir);
+            Console.WriteLine("dir=" + dir);
+            Assert.IsFalse(DirectoryReader.IndexExists(dir));
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestDirectoryReaderReopen.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestDirectoryReaderReopen.cs b/src/Lucene.Net.Tests/Index/TestDirectoryReaderReopen.cs
new file mode 100644
index 0000000..bb52f58
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestDirectoryReaderReopen.cs
@@ -0,0 +1,785 @@
+using Lucene.Net.Documents;
+using System;
+using System.Collections.Generic;
+using System.Text;
+using System.Threading;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldType = FieldType;
+    using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using ScoreDoc = Lucene.Net.Search.ScoreDoc;
+    using TermQuery = Lucene.Net.Search.TermQuery;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using TextField = TextField;
+
+    [TestFixture]
+    public class TestDirectoryReaderReopen : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestReopen_Mem()
+        {
+            Directory dir1 = NewDirectory();
+
+            CreateIndex(Random(), dir1, false);
+            PerformDefaultTests(new TestReopenAnonymousInnerClassHelper(this, dir1));
+            dir1.Dispose();
+
+            Directory dir2 = NewDirectory();
+
+            CreateIndex(Random(), dir2, true);
+            PerformDefaultTests(new TestReopenAnonymousInnerClassHelper2(this, dir2));
+            dir2.Dispose();
+        }
+
+        private class TestReopenAnonymousInnerClassHelper : TestReopen
+        {
+            private readonly TestDirectoryReaderReopen OuterInstance;
+
+            private Directory Dir1;
+
+            public TestReopenAnonymousInnerClassHelper(TestDirectoryReaderReopen outerInstance, Directory dir1)
+            {
+                this.OuterInstance = outerInstance;
+                this.Dir1 = dir1;
+            }
+
+            protected internal override void ModifyIndex(int i)
+            {
+                TestDirectoryReaderReopen.ModifyIndex(i, Dir1);
+            }
+
+            protected internal override DirectoryReader OpenReader()
+            {
+                return DirectoryReader.Open(Dir1);
+            }
+        }
+
+        private class TestReopenAnonymousInnerClassHelper2 : TestReopen
+        {
+            private readonly TestDirectoryReaderReopen OuterInstance;
+
+            private Directory Dir2;
+
+            public TestReopenAnonymousInnerClassHelper2(TestDirectoryReaderReopen outerInstance, Directory dir2)
+            {
+                this.OuterInstance = outerInstance;
+                this.Dir2 = dir2;
+            }
+
+            protected internal override void ModifyIndex(int i)
+            {
+                TestDirectoryReaderReopen.ModifyIndex(i, Dir2);
+            }
+
+            protected internal override DirectoryReader OpenReader()
+            {
+                return DirectoryReader.Open(Dir2);
+            }
+        }
+
+        // LUCENE-1228: IndexWriter.Commit() does not update the index version
+        // populate an index in iterations.
+        // at the end of every iteration, commit the index and reopen/recreate the reader.
+        // in each iteration verify the work of previous iteration.
+        // try this once with reopen once recreate, on both RAMDir and FSDir.
+        [Test]
+        public virtual void TestCommitReopen()
+        {
+            Directory dir = NewDirectory();
+            DoTestReopenWithCommit(Random(), dir, true);
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestCommitRecreate()
+        {
+            Directory dir = NewDirectory();
+            DoTestReopenWithCommit(Random(), dir, false);
+            dir.Dispose();
+        }
+
+        private void DoTestReopenWithCommit(Random random, Directory dir, bool withReopen)
+        {
+            IndexWriter iwriter = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).SetOpenMode(OpenMode.CREATE).SetMergeScheduler(new SerialMergeScheduler()).SetMergePolicy(NewLogMergePolicy()));
+            iwriter.Commit();
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            try
+            {
+                int M = 3;
+                FieldType customType = new FieldType(TextField.TYPE_STORED);
+                customType.IsTokenized = false;
+                FieldType customType2 = new FieldType(TextField.TYPE_STORED);
+                customType2.IsTokenized = false;
+                customType2.OmitNorms = true;
+                FieldType customType3 = new FieldType();
+                customType3.IsStored = true;
+                for (int i = 0; i < 4; i++)
+                {
+                    for (int j = 0; j < M; j++)
+                    {
+                        Document doc = new Document();
+                        doc.Add(NewField("id", i + "_" + j, customType));
+                        doc.Add(NewField("id2", i + "_" + j, customType2));
+                        doc.Add(NewField("id3", i + "_" + j, customType3));
+                        iwriter.AddDocument(doc);
+                        if (i > 0)
+                        {
+                            int k = i - 1;
+                            int n = j + k * M;
+                            Document prevItereationDoc = reader.Document(n);
+                            Assert.IsNotNull(prevItereationDoc);
+                            string id = prevItereationDoc.Get("id");
+                            Assert.AreEqual(k + "_" + j, id);
+                        }
+                    }
+                    iwriter.Commit();
+                    if (withReopen)
+                    {
+                        // reopen
+                        DirectoryReader r2 = DirectoryReader.OpenIfChanged(reader);
+                        if (r2 != null)
+                        {
+                            reader.Dispose();
+                            reader = r2;
+                        }
+                    }
+                    else
+                    {
+                        // recreate
+                        reader.Dispose();
+                        reader = DirectoryReader.Open(dir);
+                    }
+                }
+            }
+            finally
+            {
+                iwriter.Dispose();
+                reader.Dispose();
+            }
+        }
+
+        private void PerformDefaultTests(TestReopen test)
+        {
+            DirectoryReader index1 = test.OpenReader();
+            DirectoryReader index2 = test.OpenReader();
+
+            TestDirectoryReader.AssertIndexEquals(index1, index2);
+
+            // verify that reopen() does not return a new reader instance
+            // in case the index has no changes
+            ReaderCouple couple = RefreshReader(index2, false);
+            Assert.IsTrue(couple.RefreshedReader == index2);
+
+            couple = RefreshReader(index2, test, 0, true);
+            index1.Dispose();
+            index1 = couple.NewReader;
+
+            DirectoryReader index2_refreshed = couple.RefreshedReader;
+            index2.Dispose();
+
+            // test if refreshed reader and newly opened reader return equal results
+            TestDirectoryReader.AssertIndexEquals(index1, index2_refreshed);
+
+            index2_refreshed.Dispose();
+            AssertReaderClosed(index2, true);
+            AssertReaderClosed(index2_refreshed, true);
+
+            index2 = test.OpenReader();
+
+            for (int i = 1; i < 4; i++)
+            {
+                index1.Dispose();
+                couple = RefreshReader(index2, test, i, true);
+                // refresh DirectoryReader
+                index2.Dispose();
+
+                index2 = couple.RefreshedReader;
+                index1 = couple.NewReader;
+                TestDirectoryReader.AssertIndexEquals(index1, index2);
+            }
+
+            index1.Dispose();
+            index2.Dispose();
+            AssertReaderClosed(index1, true);
+            AssertReaderClosed(index2, true);
+        }
+
+        [Test]
+        public virtual void TestThreadSafety()
+        {
+            Directory dir = NewDirectory();
+            // NOTE: this also controls the number of threads!
+            int n = TestUtil.NextInt(Random(), 20, 40);
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            for (int i = 0; i < n; i++)
+            {
+                writer.AddDocument(CreateDocument(i, 3));
+            }
+            writer.ForceMerge(1);
+            writer.Dispose();
+
+            TestReopen test = new TestReopenAnonymousInnerClassHelper3(this, dir, n);
+
+            IList<ReaderCouple> readers = new SynchronizedList<ReaderCouple>();
+            DirectoryReader firstReader = DirectoryReader.Open(dir);
+            DirectoryReader reader = firstReader;
+
+            ReaderThread[] threads = new ReaderThread[n];
+            ISet<DirectoryReader> readersToClose = new ConcurrentHashSet<DirectoryReader>(new HashSet<DirectoryReader>());
+
+            for (int i = 0; i < n; i++)
+            {
+                if (i % 2 == 0)
+                {
+                    DirectoryReader refreshed = DirectoryReader.OpenIfChanged(reader);
+                    if (refreshed != null)
+                    {
+                        readersToClose.Add(reader);
+                        reader = refreshed;
+                    }
+                }
+                DirectoryReader r = reader;
+
+                int index = i;
+
+                ReaderThreadTask task;
+
+                if (i < 4 || (i >= 10 && i < 14) || i > 18)
+                {
+                    task = new ReaderThreadTaskAnonymousInnerClassHelper(this, test, readers, readersToClose, r, index);
+                }
+                else
+                {
+                    task = new ReaderThreadTaskAnonymousInnerClassHelper2(this, readers);
+                }
+
+                threads[i] = new ReaderThread(task);
+                threads[i].Start();
+            }
+
+            lock (this)
+            {
+                Monitor.Wait(this, TimeSpan.FromMilliseconds(1000));
+            }
+
+            for (int i = 0; i < n; i++)
+            {
+                if (threads[i] != null)
+                {
+                    threads[i].StopThread();
+                }
+            }
+
+            for (int i = 0; i < n; i++)
+            {
+                if (threads[i] != null)
+                {
+                    threads[i].Join();
+                    if (threads[i].Error != null)
+                    {
+                        string msg = "Error occurred in thread " + threads[i].Name + ":\n" + threads[i].Error.Message;
+                        Assert.Fail(msg);
+                    }
+                }
+            }
+
+            foreach (DirectoryReader readerToClose in readersToClose)
+            {
+                readerToClose.Dispose();
+            }
+
+            firstReader.Dispose();
+            reader.Dispose();
+
+            foreach (DirectoryReader readerToClose in readersToClose)
+            {
+                AssertReaderClosed(readerToClose, true);
+            }
+
+            AssertReaderClosed(reader, true);
+            AssertReaderClosed(firstReader, true);
+
+            dir.Dispose();
+        }
+
+        private class TestReopenAnonymousInnerClassHelper3 : TestReopen
+        {
+            private readonly TestDirectoryReaderReopen OuterInstance;
+
+            private Directory Dir;
+            private int n;
+
+            public TestReopenAnonymousInnerClassHelper3(TestDirectoryReaderReopen outerInstance, Directory dir, int n)
+            {
+                this.OuterInstance = outerInstance;
+                this.Dir = dir;
+                this.n = n;
+            }
+
+            protected internal override void ModifyIndex(int i)
+            {
+                IndexWriter modifier = new IndexWriter(Dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+                modifier.AddDocument(CreateDocument(n + i, 6));
+                modifier.Dispose();
+            }
+
+            protected internal override DirectoryReader OpenReader()
+            {
+                return DirectoryReader.Open(Dir);
+            }
+        }
+
+        private class ReaderThreadTaskAnonymousInnerClassHelper : ReaderThreadTask
+        {
+            private readonly TestDirectoryReaderReopen OuterInstance;
+
+            private Lucene.Net.Index.TestDirectoryReaderReopen.TestReopen Test;
+            private IList<ReaderCouple> Readers;
+            private ISet<DirectoryReader> ReadersToClose;
+            private DirectoryReader r;
+            private int Index;
+
+            public ReaderThreadTaskAnonymousInnerClassHelper(TestDirectoryReaderReopen outerInstance, Lucene.Net.Index.TestDirectoryReaderReopen.TestReopen test, IList<ReaderCouple> readers, ISet<DirectoryReader> readersToClose, DirectoryReader r, int index)
+            {
+                this.OuterInstance = outerInstance;
+                this.Test = test;
+                this.Readers = readers;
+                this.ReadersToClose = readersToClose;
+                this.r = r;
+                this.Index = index;
+            }
+
+            public override void Run()
+            {
+                Random rnd = LuceneTestCase.Random();
+                while (!Stopped)
+                {
+                    if (Index % 2 == 0)
+                    {
+                        // refresh reader synchronized
+                        ReaderCouple c = (OuterInstance.RefreshReader(r, Test, Index, true));
+                        ReadersToClose.Add(c.NewReader);
+                        ReadersToClose.Add(c.RefreshedReader);
+                        Readers.Add(c);
+                        // prevent too many readers
+                        break;
+                    }
+                    else
+                    {
+                        // not synchronized
+                        DirectoryReader refreshed = DirectoryReader.OpenIfChanged(r);
+                        if (refreshed == null)
+                        {
+                            refreshed = r;
+                        }
+
+                        IndexSearcher searcher = OuterInstance.NewSearcher(refreshed);
+                        ScoreDoc[] hits = searcher.Search(new TermQuery(new Term("field1", "a" + rnd.Next(refreshed.MaxDoc))), null, 1000).ScoreDocs;
+                        if (hits.Length > 0)
+                        {
+                            searcher.Doc(hits[0].Doc);
+                        }
+                        if (refreshed != r)
+                        {
+                            refreshed.Dispose();
+                        }
+                    }
+                    lock (this)
+                    {
+                        Monitor.Wait(this, TimeSpan.FromMilliseconds(TestUtil.NextInt(Random(), 1, 100)));
+                    }
+                }
+            }
+        }
+
+        private class ReaderThreadTaskAnonymousInnerClassHelper2 : ReaderThreadTask
+        {
+            private readonly TestDirectoryReaderReopen OuterInstance;
+
+            private IList<ReaderCouple> Readers;
+
+            public ReaderThreadTaskAnonymousInnerClassHelper2(TestDirectoryReaderReopen outerInstance, IList<ReaderCouple> readers)
+            {
+                this.OuterInstance = outerInstance;
+                this.Readers = readers;
+            }
+
+            public override void Run()
+            {
+                Random rnd = LuceneTestCase.Random();
+                while (!Stopped)
+                {
+                    int numReaders = Readers.Count;
+                    if (numReaders > 0)
+                    {
+                        ReaderCouple c = Readers[rnd.Next(numReaders)];
+                        TestDirectoryReader.AssertIndexEquals(c.NewReader, c.RefreshedReader);
+                    }
+
+                    lock (this)
+                    {
+                        Monitor.Wait(this, TimeSpan.FromMilliseconds(TestUtil.NextInt(Random(), 1, 100)));
+                    }
+                }
+            }
+        }
+
+        internal class ReaderCouple
+        {
+            internal ReaderCouple(DirectoryReader r1, DirectoryReader r2)
+            {
+                NewReader = r1;
+                RefreshedReader = r2;
+            }
+
+            internal DirectoryReader NewReader;
+            internal DirectoryReader RefreshedReader;
+        }
+
+        internal abstract class ReaderThreadTask
+        {
+            protected internal volatile bool Stopped;
+
+            public virtual void Stop()
+            {
+                this.Stopped = true;
+            }
+
+            public abstract void Run();
+        }
+
+        private class ReaderThread : ThreadClass
+        {
+            internal ReaderThreadTask Task;
+            internal Exception Error;
+
+            internal ReaderThread(ReaderThreadTask task)
+            {
+                this.Task = task;
+            }
+
+            public virtual void StopThread()
+            {
+                this.Task.Stop();
+            }
+
+            public override void Run()
+            {
+                try
+                {
+                    this.Task.Run();
+                }
+                catch (Exception r)
+                {
+                    Console.WriteLine(r.StackTrace);
+                    this.Error = r;
+                }
+            }
+        }
+
+        private object CreateReaderMutex = new object();
+
+        private ReaderCouple RefreshReader(DirectoryReader reader, bool hasChanges)
+        {
+            return RefreshReader(reader, null, -1, hasChanges);
+        }
+
+        internal virtual ReaderCouple RefreshReader(DirectoryReader reader, TestReopen test, int modify, bool hasChanges)
+        {
+            lock (CreateReaderMutex)
+            {
+                DirectoryReader r = null;
+                if (test != null)
+                {
+                    test.ModifyIndex(modify);
+                    r = test.OpenReader();
+                }
+
+                DirectoryReader refreshed = null;
+                try
+                {
+                    refreshed = DirectoryReader.OpenIfChanged(reader);
+                    if (refreshed == null)
+                    {
+                        refreshed = reader;
+                    }
+                }
+                finally
+                {
+                    if (refreshed == null && r != null)
+                    {
+                        // Hit exception -- close opened reader
+                        r.Dispose();
+                    }
+                }
+
+                if (hasChanges)
+                {
+                    if (refreshed == reader)
+                    {
+                        Assert.Fail("No new DirectoryReader instance created during refresh.");
+                    }
+                }
+                else
+                {
+                    if (refreshed != reader)
+                    {
+                        Assert.Fail("New DirectoryReader instance created during refresh even though index had no changes.");
+                    }
+                }
+
+                return new ReaderCouple(r, refreshed);
+            }
+        }
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Is non-static because NewIndexWriterConfig is no longer static.
+        /// </summary>
+        public void CreateIndex(Random random, Directory dir, bool multiSegment)
+        {
+            IndexWriter.Unlock(dir);
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer(random)).SetMergePolicy(new LogDocMergePolicy()));
+
+            for (int i = 0; i < 100; i++)
+            {
+                w.AddDocument(CreateDocument(i, 4));
+                if (multiSegment && (i % 10) == 0)
+                {
+                    w.Commit();
+                }
+            }
+
+            if (!multiSegment)
+            {
+                w.ForceMerge(1);
+            }
+
+            w.Dispose();
+
+            DirectoryReader r = DirectoryReader.Open(dir);
+            if (multiSegment)
+            {
+                Assert.IsTrue(r.Leaves.Count > 1);
+            }
+            else
+            {
+                Assert.IsTrue(r.Leaves.Count == 1);
+            }
+            r.Dispose();
+        }
+
+        public static Document CreateDocument(int n, int numFields)
+        {
+            StringBuilder sb = new StringBuilder();
+            Document doc = new Document();
+            sb.Append("a");
+            sb.Append(n);
+            FieldType customType2 = new FieldType(TextField.TYPE_STORED);
+            customType2.IsTokenized = false;
+            customType2.OmitNorms = true;
+            FieldType customType3 = new FieldType();
+            customType3.IsStored = true;
+            doc.Add(new TextField("field1", sb.ToString(), Field.Store.YES));
+            doc.Add(new Field("fielda", sb.ToString(), customType2));
+            doc.Add(new Field("fieldb", sb.ToString(), customType3));
+            sb.Append(" b");
+            sb.Append(n);
+            for (int i = 1; i < numFields; i++)
+            {
+                doc.Add(new TextField("field" + (i + 1), sb.ToString(), Field.Store.YES));
+            }
+            return doc;
+        }
+
+        internal static void ModifyIndex(int i, Directory dir)
+        {
+            switch (i)
+            {
+                case 0:
+                    {
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("TEST: modify index");
+                        }
+                        IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+                        w.DeleteDocuments(new Term("field2", "a11"));
+                        w.DeleteDocuments(new Term("field2", "b30"));
+                        w.Dispose();
+                        break;
+                    }
+                case 1:
+                    {
+                        IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+                        w.ForceMerge(1);
+                        w.Dispose();
+                        break;
+                    }
+                case 2:
+                    {
+                        IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+                        w.AddDocument(CreateDocument(101, 4));
+                        w.ForceMerge(1);
+                        w.AddDocument(CreateDocument(102, 4));
+                        w.AddDocument(CreateDocument(103, 4));
+                        w.Dispose();
+                        break;
+                    }
+                case 3:
+                    {
+                        IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+                        w.AddDocument(CreateDocument(101, 4));
+                        w.Dispose();
+                        break;
+                    }
+            }
+        }
+
+        internal static void AssertReaderClosed(IndexReader reader, bool checkSubReaders)
+        {
+            Assert.AreEqual(0, reader.RefCount);
+
+            if (checkSubReaders && reader is CompositeReader)
+            {
+                // we cannot use reader context here, as reader is
+                // already closed and calling getTopReaderContext() throws AlreadyClosed!
+                IList<IndexReader> subReaders = ((CompositeReader)reader).GetSequentialSubReaders();
+                foreach (IndexReader r in subReaders)
+                {
+                    AssertReaderClosed(r, checkSubReaders);
+                }
+            }
+        }
+
+        internal abstract class TestReopen
+        {
+            protected internal abstract DirectoryReader OpenReader();
+
+            protected internal abstract void ModifyIndex(int i);
+        }
+
+        internal class KeepAllCommits : IndexDeletionPolicy
+        {
+            public override void OnInit<T>(IList<T> commits)
+            {
+            }
+
+            public override void OnCommit<T>(IList<T> commits)
+            {
+            }
+        }
+
+        [Test]
+        public virtual void TestReopenOnCommit()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetIndexDeletionPolicy(new KeepAllCommits()).SetMaxBufferedDocs(-1).SetMergePolicy(NewLogMergePolicy(10)));
+            for (int i = 0; i < 4; i++)
+            {
+                Document doc = new Document();
+                doc.Add(NewStringField("id", "" + i, Field.Store.NO));
+                writer.AddDocument(doc);
+                IDictionary<string, string> data = new Dictionary<string, string>();
+                data["index"] = i + "";
+                writer.CommitData = data;
+                writer.Commit();
+            }
+            for (int i = 0; i < 4; i++)
+            {
+                writer.DeleteDocuments(new Term("id", "" + i));
+                IDictionary<string, string> data = new Dictionary<string, string>();
+                data["index"] = (4 + i) + "";
+                writer.CommitData = data;
+                writer.Commit();
+            }
+            writer.Dispose();
+
+            DirectoryReader r = DirectoryReader.Open(dir);
+            Assert.AreEqual(0, r.NumDocs);
+
+            ICollection<IndexCommit> commits = DirectoryReader.ListCommits(dir);
+            foreach (IndexCommit commit in commits)
+            {
+                DirectoryReader r2 = DirectoryReader.OpenIfChanged(r, commit);
+                Assert.IsNotNull(r2);
+                Assert.IsTrue(r2 != r);
+
+                IDictionary<string, string> s = commit.UserData;
+                int v;
+                if (s.Count == 0)
+                {
+                    // First commit created by IW
+                    v = -1;
+                }
+                else
+                {
+                    v = Convert.ToInt32(s["index"]);
+                }
+                if (v < 4)
+                {
+                    Assert.AreEqual(1 + v, r2.NumDocs);
+                }
+                else
+                {
+                    Assert.AreEqual(7 - v, r2.NumDocs);
+                }
+                r.Dispose();
+                r = r2;
+            }
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestOpenIfChangedNRTToCommit()
+        {
+            Directory dir = NewDirectory();
+
+            // Can't use RIW because it randomly commits:
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document doc = new Document();
+            doc.Add(NewStringField("field", "value", Field.Store.NO));
+            w.AddDocument(doc);
+            w.Commit();
+            IList<IndexCommit> commits = DirectoryReader.ListCommits(dir);
+            Assert.AreEqual(1, commits.Count);
+            w.AddDocument(doc);
+            DirectoryReader r = DirectoryReader.Open(w, true);
+
+            Assert.AreEqual(2, r.NumDocs);
+            IndexReader r2 = DirectoryReader.OpenIfChanged(r, commits[0]);
+            Assert.IsNotNull(r2);
+            r.Dispose();
+            Assert.AreEqual(1, r2.NumDocs);
+            w.Dispose();
+            r2.Dispose();
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file


[35/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestDocValuesIndexing.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestDocValuesIndexing.cs b/src/Lucene.Net.Tests/Index/TestDocValuesIndexing.cs
new file mode 100644
index 0000000..f46110b
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestDocValuesIndexing.cs
@@ -0,0 +1,982 @@
+using System;
+using System.Threading;
+using Lucene.Net.Documents;
+using Lucene.Net.Search;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using System.Threading;
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Analyzer = Lucene.Net.Analysis.Analyzer;
+    using BinaryDocValuesField = BinaryDocValuesField;
+    using IBits = Lucene.Net.Util.IBits;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using NumericDocValuesField = NumericDocValuesField;
+    using SortedDocValuesField = SortedDocValuesField;
+    using SortedSetDocValuesField = SortedSetDocValuesField;
+    using StringField = StringField;
+    using TextField = TextField;
+
+    ///
+    /// <summary>
+    /// Tests DocValues integration into IndexWriter
+    ///
+    /// </summary>
+    [SuppressCodecs("Lucene3x")]
+    [TestFixture]
+    public class TestDocValuesIndexing : LuceneTestCase
+    {
+        /*
+         * - add test for multi segment case with deletes
+         * - add multithreaded tests / integrate into stress indexing?
+         */
+
+        [Test]
+        public virtual void TestAddIndexes()
+        {
+            Directory d1 = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), d1, Similarity, TimeZone);
+            Document doc = new Document();
+            doc.Add(NewStringField("id", "1", Field.Store.YES));
+            doc.Add(new NumericDocValuesField("dv", 1));
+            w.AddDocument(doc);
+            IndexReader r1 = w.Reader;
+            w.Dispose();
+
+            Directory d2 = NewDirectory();
+            w = new RandomIndexWriter(Random(), d2, Similarity, TimeZone);
+            doc = new Document();
+            doc.Add(NewStringField("id", "2", Field.Store.YES));
+            doc.Add(new NumericDocValuesField("dv", 2));
+            w.AddDocument(doc);
+            IndexReader r2 = w.Reader;
+            w.Dispose();
+
+            Directory d3 = NewDirectory();
+            w = new RandomIndexWriter(Random(), d3, Similarity, TimeZone);
+            w.AddIndexes(SlowCompositeReaderWrapper.Wrap(r1), SlowCompositeReaderWrapper.Wrap(r2));
+            r1.Dispose();
+            d1.Dispose();
+            r2.Dispose();
+            d2.Dispose();
+
+            w.ForceMerge(1);
+            DirectoryReader r3 = w.Reader;
+            w.Dispose();
+            AtomicReader sr = GetOnlySegmentReader(r3);
+            Assert.AreEqual(2, sr.NumDocs);
+            NumericDocValues docValues = sr.GetNumericDocValues("dv");
+            Assert.IsNotNull(docValues);
+            r3.Dispose();
+            d3.Dispose();
+        }
+
+        [Test]
+        public virtual void TestMultiValuedDocValuesField()
+        {
+            Directory d = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), d, Similarity, TimeZone);
+            Document doc = new Document();
+            Field f = new NumericDocValuesField("field", 17);
+            // Index doc values are single-valued so we should not
+            // be able to add same field more than once:
+            doc.Add(f);
+            doc.Add(f);
+            try
+            {
+                w.AddDocument(doc);
+                Assert.Fail("didn't hit expected exception");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // expected
+            }
+
+            doc = new Document();
+            doc.Add(f);
+            w.AddDocument(doc);
+            w.ForceMerge(1);
+            DirectoryReader r = w.Reader;
+            w.Dispose();
+            Assert.AreEqual(17, FieldCache.DEFAULT.GetInt32s(GetOnlySegmentReader(r), "field", false).Get(0));
+            r.Dispose();
+            d.Dispose();
+        }
+
+        [Test]
+        public virtual void TestDifferentTypedDocValuesField()
+        {
+            Directory d = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), d, Similarity, TimeZone);
+            Document doc = new Document();
+            // Index doc values are single-valued so we should not
+            // be able to add same field more than once:
+            Field f;
+            doc.Add(f = new NumericDocValuesField("field", 17));
+            doc.Add(new BinaryDocValuesField("field", new BytesRef("blah")));
+            try
+            {
+                w.AddDocument(doc);
+                Assert.Fail("didn't hit expected exception");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // expected
+            }
+
+            doc = new Document();
+            doc.Add(f);
+            w.AddDocument(doc);
+            w.ForceMerge(1);
+            DirectoryReader r = w.Reader;
+            w.Dispose();
+            Assert.AreEqual(17, FieldCache.DEFAULT.GetInt32s(GetOnlySegmentReader(r), "field", false).Get(0));
+            r.Dispose();
+            d.Dispose();
+        }
+
+        [Test]
+        public virtual void TestDifferentTypedDocValuesField2()
+        {
+            Directory d = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), d, Similarity, TimeZone);
+            Document doc = new Document();
+            // Index doc values are single-valued so we should not
+            // be able to add same field more than once:
+            Field f = new NumericDocValuesField("field", 17);
+            doc.Add(f);
+            doc.Add(new SortedDocValuesField("field", new BytesRef("hello")));
+            try
+            {
+                w.AddDocument(doc);
+                Assert.Fail("didn't hit expected exception");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            doc = new Document();
+            doc.Add(f);
+            w.AddDocument(doc);
+            w.ForceMerge(1);
+            DirectoryReader r = w.Reader;
+            Assert.AreEqual(17, GetOnlySegmentReader(r).GetNumericDocValues("field").Get(0));
+            r.Dispose();
+            w.Dispose();
+            d.Dispose();
+        }
+
+        // LUCENE-3870
+        [Test]
+        public virtual void TestLengthPrefixAcrossTwoPages()
+        {
+            Directory d = NewDirectory();
+            IndexWriter w = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document doc = new Document();
+            var bytes = new byte[32764];
+            BytesRef b = new BytesRef();
+            b.Bytes = bytes;
+            b.Length = bytes.Length;
+            doc.Add(new SortedDocValuesField("field", b));
+            w.AddDocument(doc);
+            bytes[0] = 1;
+            w.AddDocument(doc);
+            w.ForceMerge(1);
+            DirectoryReader r = w.Reader;
+            BinaryDocValues s = FieldCache.DEFAULT.GetTerms(GetOnlySegmentReader(r), "field", false);
+
+            BytesRef bytes1 = new BytesRef();
+            s.Get(0, bytes1);
+            Assert.AreEqual(bytes.Length, bytes1.Length);
+            bytes[0] = 0;
+            Assert.AreEqual(b, bytes1);
+
+            s.Get(1, bytes1);
+            Assert.AreEqual(bytes.Length, bytes1.Length);
+            bytes[0] = 1;
+            Assert.AreEqual(b, bytes1);
+            r.Dispose();
+            w.Dispose();
+            d.Dispose();
+        }
+
+        [Test]
+        public virtual void TestDocValuesUnstored()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig iwconfig = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            iwconfig.SetMergePolicy(NewLogMergePolicy());
+            IndexWriter writer = new IndexWriter(dir, iwconfig);
+            for (int i = 0; i < 50; i++)
+            {
+                Document doc = new Document();
+                doc.Add(new NumericDocValuesField("dv", i));
+                doc.Add(new TextField("docId", "" + i, Field.Store.YES));
+                writer.AddDocument(doc);
+            }
+            DirectoryReader r = writer.Reader;
+            AtomicReader slow = SlowCompositeReaderWrapper.Wrap(r);
+            FieldInfos fi = slow.FieldInfos;
+            FieldInfo dvInfo = fi.FieldInfo("dv");
+            Assert.IsTrue(dvInfo.HasDocValues);
+            NumericDocValues dv = slow.GetNumericDocValues("dv");
+            for (int i = 0; i < 50; i++)
+            {
+                Assert.AreEqual(i, dv.Get(i));
+                Document d = slow.Document(i);
+                // cannot use d.Get("dv") due to another bug!
+                Assert.IsNull(d.GetField("dv"));
+                Assert.AreEqual(Convert.ToString(i), d.Get("docId"));
+            }
+            slow.Dispose();
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        // Same field in one document as different types:
+        [Test]
+        public virtual void TestMixedTypesSameDocument()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document doc = new Document();
+            doc.Add(new NumericDocValuesField("foo", 0));
+            doc.Add(new SortedDocValuesField("foo", new BytesRef("hello")));
+            try
+            {
+                w.AddDocument(doc);
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            w.Dispose();
+            dir.Dispose();
+        }
+
+        // Two documents with same field as different types:
+        [Test]
+        public virtual void TestMixedTypesDifferentDocuments()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document doc = new Document();
+            doc.Add(new NumericDocValuesField("foo", 0));
+            w.AddDocument(doc);
+
+            doc = new Document();
+            doc.Add(new SortedDocValuesField("foo", new BytesRef("hello")));
+            try
+            {
+                w.AddDocument(doc);
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            w.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestAddSortedTwice()
+        {
+            Analyzer analyzer = new MockAnalyzer(Random());
+
+            Directory directory = NewDirectory();
+            // we don't use RandomIndexWriter because it might add more docvalues than we expect !!!!1
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
+            iwc.SetMergePolicy(NewLogMergePolicy());
+            IndexWriter iwriter = new IndexWriter(directory, iwc);
+            Document doc = new Document();
+            doc.Add(new SortedDocValuesField("dv", new BytesRef("foo!")));
+            doc.Add(new SortedDocValuesField("dv", new BytesRef("bar!")));
+            try
+            {
+                iwriter.AddDocument(doc);
+                Assert.Fail("didn't hit expected exception");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException expected)
+#pragma warning restore 168
+            {
+                // expected
+            }
+
+            iwriter.Dispose();
+            directory.Dispose();
+        }
+
+        [Test]
+        public virtual void TestAddBinaryTwice()
+        {
+            Analyzer analyzer = new MockAnalyzer(Random());
+
+            Directory directory = NewDirectory();
+            // we don't use RandomIndexWriter because it might add more docvalues than we expect !!!!1
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
+            iwc.SetMergePolicy(NewLogMergePolicy());
+            IndexWriter iwriter = new IndexWriter(directory, iwc);
+            Document doc = new Document();
+            doc.Add(new BinaryDocValuesField("dv", new BytesRef("foo!")));
+            doc.Add(new BinaryDocValuesField("dv", new BytesRef("bar!")));
+            try
+            {
+                iwriter.AddDocument(doc);
+                Assert.Fail("didn't hit expected exception");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException expected)
+#pragma warning restore 168
+            {
+                // expected
+            }
+
+            iwriter.Dispose();
+            directory.Dispose();
+        }
+
+        [Test]
+        public virtual void TestAddNumericTwice()
+        {
+            Analyzer analyzer = new MockAnalyzer(Random());
+
+            Directory directory = NewDirectory();
+            // we don't use RandomIndexWriter because it might add more docvalues than we expect !!!!1
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
+            iwc.SetMergePolicy(NewLogMergePolicy());
+            IndexWriter iwriter = new IndexWriter(directory, iwc);
+            Document doc = new Document();
+            doc.Add(new NumericDocValuesField("dv", 1));
+            doc.Add(new NumericDocValuesField("dv", 2));
+            try
+            {
+                iwriter.AddDocument(doc);
+                Assert.Fail("didn't hit expected exception");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException expected)
+#pragma warning restore 168
+            {
+                // expected
+            }
+
+            iwriter.Dispose();
+            directory.Dispose();
+        }
+
+        [Test]
+        public virtual void TestTooLargeSortedBytes()
+        {
+            Analyzer analyzer = new MockAnalyzer(Random());
+
+            Directory directory = NewDirectory();
+            // we don't use RandomIndexWriter because it might add more docvalues than we expect !!!!1
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
+            iwc.SetMergePolicy(NewLogMergePolicy());
+            IndexWriter iwriter = new IndexWriter(directory, iwc);
+            Document doc = new Document();
+            var bytes = new byte[100000];
+            BytesRef b = new BytesRef(bytes);
+            Random().NextBytes(bytes);
+            doc.Add(new SortedDocValuesField("dv", b));
+            try
+            {
+                iwriter.AddDocument(doc);
+                Assert.Fail("did not get expected exception");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException expected)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            iwriter.Dispose();
+            directory.Dispose();
+        }
+
+        [Test]
+        public virtual void TestTooLargeTermSortedSetBytes()
+        {
+            AssumeTrue("codec does not support SORTED_SET", DefaultCodecSupportsSortedSet());
+            Analyzer analyzer = new MockAnalyzer(Random());
+
+            Directory directory = NewDirectory();
+            // we don't use RandomIndexWriter because it might add more docvalues than we expect !!!!1
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
+            iwc.SetMergePolicy(NewLogMergePolicy());
+            IndexWriter iwriter = new IndexWriter(directory, iwc);
+            Document doc = new Document();
+            byte[] bytes = new byte[100000];
+            BytesRef b = new BytesRef(bytes);
+            Random().NextBytes((byte[])(Array)bytes);
+            doc.Add(new SortedSetDocValuesField("dv", b));
+            try
+            {
+                iwriter.AddDocument(doc);
+                Assert.Fail("did not get expected exception");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException expected)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            iwriter.Dispose();
+            directory.Dispose();
+        }
+
+        // Two documents across segments
+        [Test]
+        public virtual void TestMixedTypesDifferentSegments()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document doc = new Document();
+            doc.Add(new NumericDocValuesField("foo", 0));
+            w.AddDocument(doc);
+            w.Commit();
+
+            doc = new Document();
+            doc.Add(new SortedDocValuesField("foo", new BytesRef("hello")));
+            try
+            {
+                w.AddDocument(doc);
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            w.Dispose();
+            dir.Dispose();
+        }
+
+        // Add inconsistent document after deleteAll
+        [Test]
+        public virtual void TestMixedTypesAfterDeleteAll()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document doc = new Document();
+            doc.Add(new NumericDocValuesField("foo", 0));
+            w.AddDocument(doc);
+            w.DeleteAll();
+
+            doc = new Document();
+            doc.Add(new SortedDocValuesField("foo", new BytesRef("hello")));
+            w.AddDocument(doc);
+            w.Dispose();
+            dir.Dispose();
+        }
+
+        // Add inconsistent document after reopening IW w/ create
+        [Test]
+        public virtual void TestMixedTypesAfterReopenCreate()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document doc = new Document();
+            doc.Add(new NumericDocValuesField("foo", 0));
+            w.AddDocument(doc);
+            w.Dispose();
+
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            iwc.SetOpenMode(OpenMode.CREATE);
+            w = new IndexWriter(dir, iwc);
+            doc = new Document();
+            doc.Add(new SortedDocValuesField("foo", new BytesRef("hello")));
+            w.AddDocument(doc);
+            w.Dispose();
+            dir.Dispose();
+        }
+
+        // Two documents with same field as different types, added
+        // from separate threads:
+        [Test]
+        public virtual void TestMixedTypesDifferentThreads()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+
+            CountdownEvent startingGun = new CountdownEvent(1);
+            AtomicBoolean hitExc = new AtomicBoolean();
+            ThreadClass[] threads = new ThreadClass[3];
+            for (int i = 0; i < 3; i++)
+            {
+                Field field;
+                if (i == 0)
+                {
+                    field = new SortedDocValuesField("foo", new BytesRef("hello"));
+                }
+                else if (i == 1)
+                {
+                    field = new NumericDocValuesField("foo", 0);
+                }
+                else
+                {
+                    field = new BinaryDocValuesField("foo", new BytesRef("bazz"));
+                }
+                Document doc = new Document();
+                doc.Add(field);
+
+                threads[i] = new ThreadAnonymousInnerClassHelper(this, w, startingGun, hitExc, doc);
+                threads[i].Start();
+            }
+
+            startingGun.Signal();
+
+            foreach (ThreadClass t in threads)
+            {
+                t.Join();
+            }
+            Assert.IsTrue(hitExc.Get());
+            w.Dispose();
+            dir.Dispose();
+        }
+
+        private class ThreadAnonymousInnerClassHelper : ThreadClass
+        {
+            private readonly TestDocValuesIndexing OuterInstance;
+
+            private IndexWriter w;
+            private CountdownEvent StartingGun;
+            private AtomicBoolean HitExc;
+            private Document Doc;
+
+            public ThreadAnonymousInnerClassHelper(TestDocValuesIndexing outerInstance, IndexWriter w, CountdownEvent startingGun, AtomicBoolean hitExc, Document doc)
+            {
+                this.OuterInstance = outerInstance;
+                this.w = w;
+                this.StartingGun = startingGun;
+                this.HitExc = hitExc;
+                this.Doc = doc;
+            }
+
+            public override void Run()
+            {
+                try
+                {
+                    StartingGun.Wait();
+                    w.AddDocument(Doc);
+                }
+#pragma warning disable 168
+                catch (System.ArgumentException iae)
+#pragma warning restore 168
+                {
+                    // expected
+                    HitExc.Set(true);
+                }
+                catch (Exception e)
+                {
+                    throw new Exception(e.Message, e);
+                }
+            }
+        }
+
+        // Adding documents via addIndexes
+        [Test]
+        public virtual void TestMixedTypesViaAddIndexes()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document doc = new Document();
+            doc.Add(new NumericDocValuesField("foo", 0));
+            w.AddDocument(doc);
+
+            // Make 2nd index w/ inconsistent field
+            Directory dir2 = NewDirectory();
+            IndexWriter w2 = new IndexWriter(dir2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            doc = new Document();
+            doc.Add(new SortedDocValuesField("foo", new BytesRef("hello")));
+            w2.AddDocument(doc);
+            w2.Dispose();
+
+            try
+            {
+                w.AddIndexes(dir2);
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // expected
+            }
+
+            IndexReader r = DirectoryReader.Open(dir2);
+            try
+            {
+                w.AddIndexes(new IndexReader[] { r });
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // expected
+            }
+
+            r.Dispose();
+            dir2.Dispose();
+            w.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestIllegalTypeChange()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, conf);
+            Document doc = new Document();
+            doc.Add(new NumericDocValuesField("dv", 0L));
+            writer.AddDocument(doc);
+            doc = new Document();
+            doc.Add(new SortedDocValuesField("dv", new BytesRef("foo")));
+            try
+            {
+                writer.AddDocument(doc);
+                Assert.Fail("did not hit exception");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestIllegalTypeChangeAcrossSegments()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)conf.Clone());
+            Document doc = new Document();
+            doc.Add(new NumericDocValuesField("dv", 0L));
+            writer.AddDocument(doc);
+            writer.Dispose();
+
+            writer = new IndexWriter(dir, (IndexWriterConfig)conf.Clone());
+            doc = new Document();
+            doc.Add(new SortedDocValuesField("dv", new BytesRef("foo")));
+            try
+            {
+                writer.AddDocument(doc);
+                Assert.Fail("did not hit exception");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestTypeChangeAfterCloseAndDeleteAll()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)conf.Clone());
+            Document doc = new Document();
+            doc.Add(new NumericDocValuesField("dv", 0L));
+            writer.AddDocument(doc);
+            writer.Dispose();
+
+            writer = new IndexWriter(dir, (IndexWriterConfig)conf.Clone());
+            writer.DeleteAll();
+            doc = new Document();
+            doc.Add(new SortedDocValuesField("dv", new BytesRef("foo")));
+            writer.AddDocument(doc);
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestTypeChangeAfterDeleteAll()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, conf);
+            Document doc = new Document();
+            doc.Add(new NumericDocValuesField("dv", 0L));
+            writer.AddDocument(doc);
+            writer.DeleteAll();
+            doc = new Document();
+            doc.Add(new SortedDocValuesField("dv", new BytesRef("foo")));
+            writer.AddDocument(doc);
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestTypeChangeAfterCommitAndDeleteAll()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, conf);
+            Document doc = new Document();
+            doc.Add(new NumericDocValuesField("dv", 0L));
+            writer.AddDocument(doc);
+            writer.Commit();
+            writer.DeleteAll();
+            doc = new Document();
+            doc.Add(new SortedDocValuesField("dv", new BytesRef("foo")));
+            writer.AddDocument(doc);
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestTypeChangeAfterOpenCreate()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)conf.Clone());
+            Document doc = new Document();
+            doc.Add(new NumericDocValuesField("dv", 0L));
+            writer.AddDocument(doc);
+            writer.Dispose();
+            conf.SetOpenMode(OpenMode.CREATE);
+            writer = new IndexWriter(dir, (IndexWriterConfig)conf.Clone());
+            doc = new Document();
+            doc.Add(new SortedDocValuesField("dv", new BytesRef("foo")));
+            writer.AddDocument(doc);
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestTypeChangeViaAddIndexes()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)conf.Clone());
+            Document doc = new Document();
+            doc.Add(new NumericDocValuesField("dv", 0L));
+            writer.AddDocument(doc);
+            writer.Dispose();
+
+            Directory dir2 = NewDirectory();
+            writer = new IndexWriter(dir2, (IndexWriterConfig)conf.Clone());
+            doc = new Document();
+            doc.Add(new SortedDocValuesField("dv", new BytesRef("foo")));
+            writer.AddDocument(doc);
+            try
+            {
+                writer.AddIndexes(dir);
+                Assert.Fail("did not hit exception");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            writer.Dispose();
+
+            dir.Dispose();
+            dir2.Dispose();
+        }
+
+        [Test]
+        public virtual void TestTypeChangeViaAddIndexesIR()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)conf.Clone());
+            Document doc = new Document();
+            doc.Add(new NumericDocValuesField("dv", 0L));
+            writer.AddDocument(doc);
+            writer.Dispose();
+
+            Directory dir2 = NewDirectory();
+            writer = new IndexWriter(dir2, (IndexWriterConfig)conf.Clone());
+            doc = new Document();
+            doc.Add(new SortedDocValuesField("dv", new BytesRef("foo")));
+            writer.AddDocument(doc);
+            IndexReader[] readers = new IndexReader[] { DirectoryReader.Open(dir) };
+            try
+            {
+                writer.AddIndexes(readers);
+                Assert.Fail("did not hit exception");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            readers[0].Dispose();
+            writer.Dispose();
+
+            dir.Dispose();
+            dir2.Dispose();
+        }
+
+        [Test]
+        public virtual void TestTypeChangeViaAddIndexes2()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)conf.Clone());
+            Document doc = new Document();
+            doc.Add(new NumericDocValuesField("dv", 0L));
+            writer.AddDocument(doc);
+            writer.Dispose();
+
+            Directory dir2 = NewDirectory();
+            writer = new IndexWriter(dir2, (IndexWriterConfig)conf.Clone());
+            writer.AddIndexes(dir);
+            doc = new Document();
+            doc.Add(new SortedDocValuesField("dv", new BytesRef("foo")));
+            try
+            {
+                writer.AddDocument(doc);
+                Assert.Fail("did not hit exception");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            writer.Dispose();
+            dir2.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestTypeChangeViaAddIndexesIR2()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)conf.Clone());
+            Document doc = new Document();
+            doc.Add(new NumericDocValuesField("dv", 0L));
+            writer.AddDocument(doc);
+            writer.Dispose();
+
+            Directory dir2 = NewDirectory();
+            writer = new IndexWriter(dir2, (IndexWriterConfig)conf.Clone());
+            IndexReader[] readers = new IndexReader[] { DirectoryReader.Open(dir) };
+            writer.AddIndexes(readers);
+            readers[0].Dispose();
+            doc = new Document();
+            doc.Add(new SortedDocValuesField("dv", new BytesRef("foo")));
+            try
+            {
+                writer.AddDocument(doc);
+                Assert.Fail("did not hit exception");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            writer.Dispose();
+            dir2.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestDocsWithField()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, conf);
+            Document doc = new Document();
+            doc.Add(new NumericDocValuesField("dv", 0L));
+            writer.AddDocument(doc);
+
+            doc = new Document();
+            doc.Add(new TextField("dv", "some text", Field.Store.NO));
+            doc.Add(new NumericDocValuesField("dv", 0L));
+            writer.AddDocument(doc);
+
+            DirectoryReader r = writer.Reader;
+            writer.Dispose();
+
+            AtomicReader subR = (AtomicReader)r.Leaves[0].Reader;
+            Assert.AreEqual(2, subR.NumDocs);
+
+            IBits bits = FieldCache.DEFAULT.GetDocsWithField(subR, "dv");
+            Assert.IsTrue(bits.Get(0));
+            Assert.IsTrue(bits.Get(1));
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestSameFieldNameForPostingAndDocValue()
+        {
+            // LUCENE-5192: FieldInfos.Builder neglected to update
+            // globalFieldNumbers.docValuesType map if the field existed, resulting in
+            // potentially adding the same field with different DV types.
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            Document doc = new Document();
+            doc.Add(new StringField("f", "mock-value", Field.Store.NO));
+            doc.Add(new NumericDocValuesField("f", 5));
+            writer.AddDocument(doc);
+            writer.Commit();
+
+            doc = new Document();
+            doc.Add(new BinaryDocValuesField("f", new BytesRef("mock")));
+            try
+            {
+                writer.AddDocument(doc);
+                Assert.Fail("should not have succeeded to add a field with different DV type than what already exists");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException e)
+#pragma warning restore 168
+            {
+                writer.Rollback();
+            }
+
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestDocValuesWithThreads.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestDocValuesWithThreads.cs b/src/Lucene.Net.Tests/Index/TestDocValuesWithThreads.cs
new file mode 100644
index 0000000..40cc370
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestDocValuesWithThreads.cs
@@ -0,0 +1,311 @@
+using System;
+using System.Collections.Generic;
+using System.Threading;
+using Lucene.Net.Documents;
+using Lucene.Net.Search;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Randomized.Generators;
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using System.IO;
+    using System.Threading;
+    using BinaryDocValuesField = BinaryDocValuesField;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using NumericDocValuesField = NumericDocValuesField;
+    using SortedDocValuesField = SortedDocValuesField;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    [SuppressCodecs("Lucene3x")]
+    [TestFixture]
+    public class TestDocValuesWithThreads : LuceneTestCase
+    {
+        [Test]
+        public virtual void Test()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()));
+
+            IList<long?> numbers = new List<long?>();
+            IList<BytesRef> binary = new List<BytesRef>();
+            IList<BytesRef> sorted = new List<BytesRef>();
+            int numDocs = AtLeast(100);
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document d = new Document();
+                long number = Random().NextLong();
+                d.Add(new NumericDocValuesField("number", number));
+                BytesRef bytes = new BytesRef(TestUtil.RandomRealisticUnicodeString(Random()));
+                d.Add(new BinaryDocValuesField("bytes", bytes));
+                binary.Add(bytes);
+                bytes = new BytesRef(TestUtil.RandomRealisticUnicodeString(Random()));
+                d.Add(new SortedDocValuesField("sorted", bytes));
+                sorted.Add(bytes);
+                w.AddDocument(d);
+                numbers.Add(number);
+            }
+
+            w.ForceMerge(1);
+            IndexReader r = w.Reader;
+            w.Dispose();
+
+            Assert.AreEqual(1, r.Leaves.Count);
+            AtomicReader ar = (AtomicReader)r.Leaves[0].Reader;
+
+            int numThreads = TestUtil.NextInt(Random(), 2, 5);
+            IList<ThreadClass> threads = new List<ThreadClass>();
+            CountdownEvent startingGun = new CountdownEvent(1);
+            for (int t = 0; t < numThreads; t++)
+            {
+                Random threadRandom = new Random(Random().Next());
+                ThreadClass thread = new ThreadAnonymousInnerClassHelper(this, numbers, binary, sorted, numDocs, ar, startingGun, threadRandom);
+                thread.Start();
+                threads.Add(thread);
+            }
+
+            startingGun.Signal();
+
+            foreach (ThreadClass thread in threads)
+            {
+                thread.Join();
+            }
+
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        private class ThreadAnonymousInnerClassHelper : ThreadClass
+        {
+            private readonly TestDocValuesWithThreads OuterInstance;
+
+            private IList<long?> Numbers;
+            private IList<BytesRef> Binary;
+            private IList<BytesRef> Sorted;
+            private int NumDocs;
+            private AtomicReader Ar;
+            private CountdownEvent StartingGun;
+            private Random ThreadRandom;
+
+            public ThreadAnonymousInnerClassHelper(TestDocValuesWithThreads outerInstance, IList<long?> numbers, IList<BytesRef> binary, IList<BytesRef> sorted, int numDocs, AtomicReader ar, CountdownEvent startingGun, Random threadRandom)
+            {
+                this.OuterInstance = outerInstance;
+                this.Numbers = numbers;
+                this.Binary = binary;
+                this.Sorted = sorted;
+                this.NumDocs = numDocs;
+                this.Ar = ar;
+                this.StartingGun = startingGun;
+                this.ThreadRandom = threadRandom;
+            }
+
+            public override void Run()
+            {
+                try
+                {
+                    //NumericDocValues ndv = ar.GetNumericDocValues("number");
+                    FieldCache.Int64s ndv = FieldCache.DEFAULT.GetInt64s(Ar, "number", false);
+                    //BinaryDocValues bdv = ar.GetBinaryDocValues("bytes");
+                    BinaryDocValues bdv = FieldCache.DEFAULT.GetTerms(Ar, "bytes", false);
+                    SortedDocValues sdv = FieldCache.DEFAULT.GetTermsIndex(Ar, "sorted");
+                    StartingGun.Wait();
+                    int iters = AtLeast(1000);
+                    BytesRef scratch = new BytesRef();
+                    BytesRef scratch2 = new BytesRef();
+                    for (int iter = 0; iter < iters; iter++)
+                    {
+                        int docID = ThreadRandom.Next(NumDocs);
+                        switch (ThreadRandom.Next(6))
+                        {
+#pragma warning disable 612, 618
+                            case 0:
+                                Assert.AreEqual((long)(sbyte)Numbers[docID], FieldCache.DEFAULT.GetBytes(Ar, "number", false).Get(docID));
+                                break;
+
+                            case 1:
+                                Assert.AreEqual((long)(short)Numbers[docID], FieldCache.DEFAULT.GetInt16s(Ar, "number", false).Get(docID));
+                                break;
+#pragma warning restore 612, 618
+
+                            case 2:
+                                Assert.AreEqual((long)(int)Numbers[docID], FieldCache.DEFAULT.GetInt32s(Ar, "number", false).Get(docID));
+                                break;
+
+                            case 3:
+                                Assert.AreEqual((long)Numbers[docID], FieldCache.DEFAULT.GetInt64s(Ar, "number", false).Get(docID));
+                                break;
+
+                            case 4:
+                                Assert.AreEqual(Number.Int32BitsToSingle((int)Numbers[docID]), FieldCache.DEFAULT.GetSingles(Ar, "number", false).Get(docID), 0.0f);
+                                break;
+
+                            case 5:
+                                Assert.AreEqual(BitConverter.Int64BitsToDouble((long)Numbers[docID]), FieldCache.DEFAULT.GetDoubles(Ar, "number", false).Get(docID), 0.0);
+                                break;
+                        }
+                        bdv.Get(docID, scratch);
+                        Assert.AreEqual(Binary[docID], scratch);
+                        // Cannot share a single scratch against two "sources":
+                        sdv.Get(docID, scratch2);
+                        Assert.AreEqual(Sorted[docID], scratch2);
+                    }
+                }
+                catch (Exception e)
+                {
+                    throw new Exception(e.Message, e);
+                }
+            }
+        }
+
+        [Test]
+        public virtual void Test2()
+        {
+            Random random = Random();
+            int NUM_DOCS = AtLeast(100);
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(random, dir, Similarity, TimeZone);
+            bool allowDups = random.NextBoolean();
+            HashSet<string> seen = new HashSet<string>();
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: NUM_DOCS=" + NUM_DOCS + " allowDups=" + allowDups);
+            }
+            int numDocs = 0;
+            IList<BytesRef> docValues = new List<BytesRef>();
+
+            // TODO: deletions
+            while (numDocs < NUM_DOCS)
+            {
+                string s;
+                if (random.NextBoolean())
+                {
+                    s = TestUtil.RandomSimpleString(random);
+                }
+                else
+                {
+                    s = TestUtil.RandomUnicodeString(random);
+                }
+                BytesRef br = new BytesRef(s);
+
+                if (!allowDups)
+                {
+                    if (seen.Contains(s))
+                    {
+                        continue;
+                    }
+                    seen.Add(s);
+                }
+
+                if (VERBOSE)
+                {
+                    Console.WriteLine("  " + numDocs + ": s=" + s);
+                }
+
+                Document doc = new Document();
+                doc.Add(new SortedDocValuesField("stringdv", br));
+                doc.Add(new NumericDocValuesField("id", numDocs));
+                docValues.Add(br);
+                writer.AddDocument(doc);
+                numDocs++;
+
+                if (random.Next(40) == 17)
+                {
+                    // force flush
+                    writer.Reader.Dispose();
+                }
+            }
+
+            writer.ForceMerge(1);
+            DirectoryReader r = writer.Reader;
+            writer.Dispose();
+
+            AtomicReader sr = GetOnlySegmentReader(r);
+
+            long END_TIME = Environment.TickCount + (TEST_NIGHTLY ? 30 : 1);
+
+            int NUM_THREADS = TestUtil.NextInt(Random(), 1, 10);
+            ThreadClass[] threads = new ThreadClass[NUM_THREADS];
+            for (int thread = 0; thread < NUM_THREADS; thread++)
+            {
+                threads[thread] = new ThreadAnonymousInnerClassHelper2(random, docValues, sr, END_TIME);
+                threads[thread].Start();
+            }
+
+            foreach (ThreadClass thread in threads)
+            {
+                thread.Join();
+            }
+
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        private class ThreadAnonymousInnerClassHelper2 : ThreadClass
+        {
+            private Random Random;
+            private IList<BytesRef> DocValues;
+            private AtomicReader Sr;
+            private long END_TIME;
+
+            public ThreadAnonymousInnerClassHelper2(Random random, IList<BytesRef> docValues, AtomicReader sr, long END_TIME)
+            {
+                this.Random = random;
+                this.DocValues = docValues;
+                this.Sr = sr;
+                this.END_TIME = END_TIME;
+            }
+
+            public override void Run()
+            {
+                Random random = Random();
+                SortedDocValues stringDVDirect;
+                NumericDocValues docIDToID;
+                try
+                {
+                    stringDVDirect = Sr.GetSortedDocValues("stringdv");
+                    docIDToID = Sr.GetNumericDocValues("id");
+                    Assert.IsNotNull(stringDVDirect);
+                }
+                catch (IOException ioe)
+                {
+                    throw new Exception(ioe.Message, ioe);
+                }
+                while (Environment.TickCount < END_TIME)
+                {
+                    SortedDocValues source;
+                    source = stringDVDirect;
+                    BytesRef scratch = new BytesRef();
+
+                    for (int iter = 0; iter < 100; iter++)
+                    {
+                        int docID = random.Next(Sr.MaxDoc);
+                        source.Get(docID, scratch);
+                        Assert.AreEqual(DocValues[(int)docIDToID.Get(docID)], scratch);
+                    }
+                }
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestDocsAndPositions.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestDocsAndPositions.cs b/src/Lucene.Net.Tests/Index/TestDocsAndPositions.cs
new file mode 100644
index 0000000..03ba737
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestDocsAndPositions.cs
@@ -0,0 +1,430 @@
+using System;
+using System.Collections.Generic;
+using System.Text;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Randomized.Generators;
+    using NUnit.Framework;
+    using IBits = Lucene.Net.Util.IBits;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldType = FieldType;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using TextField = TextField;
+
+    [TestFixture]
+    public class TestDocsAndPositions : LuceneTestCase
+    {
+        private string FieldName;
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            FieldName = "field" + Random().Next();
+        }
+
+        /// <summary>
+        /// Simple testcase for <seealso cref="DocsAndPositionsEnum"/>
+        /// </summary>
+        [Test]
+        public virtual void TestPositionsSimple()
+        {
+            Directory directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            for (int i = 0; i < 39; i++)
+            {
+                Document doc = new Document();
+                FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
+                customType.OmitNorms = true;
+                doc.Add(NewField(FieldName, "1 2 3 4 5 6 7 8 9 10 " + "1 2 3 4 5 6 7 8 9 10 " + "1 2 3 4 5 6 7 8 9 10 " + "1 2 3 4 5 6 7 8 9 10", customType));
+                writer.AddDocument(doc);
+            }
+            IndexReader reader = writer.Reader;
+            writer.Dispose();
+
+            int num = AtLeast(13);
+            for (int i = 0; i < num; i++)
+            {
+                BytesRef bytes = new BytesRef("1");
+                IndexReaderContext topReaderContext = reader.Context;
+                foreach (AtomicReaderContext atomicReaderContext in topReaderContext.Leaves)
+                {
+                    DocsAndPositionsEnum docsAndPosEnum = GetDocsAndPositions((AtomicReader)atomicReaderContext.Reader, bytes, null);
+                    Assert.IsNotNull(docsAndPosEnum);
+                    if (atomicReaderContext.Reader.MaxDoc == 0)
+                    {
+                        continue;
+                    }
+                    int advance = docsAndPosEnum.Advance(Random().Next(atomicReaderContext.Reader.MaxDoc));
+                    do
+                    {
+                        string msg = "Advanced to: " + advance + " current doc: " + docsAndPosEnum.DocID; // TODO: + " usePayloads: " + usePayload;
+                        Assert.AreEqual(4, docsAndPosEnum.Freq, msg);
+                        Assert.AreEqual(0, docsAndPosEnum.NextPosition(), msg);
+                        Assert.AreEqual(4, docsAndPosEnum.Freq, msg);
+                        Assert.AreEqual(10, docsAndPosEnum.NextPosition(), msg);
+                        Assert.AreEqual(4, docsAndPosEnum.Freq, msg);
+                        Assert.AreEqual(20, docsAndPosEnum.NextPosition(), msg);
+                        Assert.AreEqual(4, docsAndPosEnum.Freq, msg);
+                        Assert.AreEqual(30, docsAndPosEnum.NextPosition(), msg);
+                    } while (docsAndPosEnum.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+                }
+            }
+            reader.Dispose();
+            directory.Dispose();
+        }
+
+        public virtual DocsAndPositionsEnum GetDocsAndPositions(AtomicReader reader, BytesRef bytes, IBits liveDocs)
+        {
+            Terms terms = reader.Terms(FieldName);
+            if (terms != null)
+            {
+                TermsEnum te = terms.GetIterator(null);
+                if (te.SeekExact(bytes))
+                {
+                    return te.DocsAndPositions(liveDocs, null);
+                }
+            }
+            return null;
+        }
+
+        /// <summary>
+        /// this test indexes random numbers within a range into a field and checks
+        /// their occurrences by searching for a number from that range selected at
+        /// random. All positions for that number are saved up front and compared to
+        /// the enums positions.
+        /// </summary>
+        [Test]
+        public virtual void TestRandomPositions()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()));
+            int numDocs = AtLeast(47);
+            int max = 1051;
+            int term = Random().Next(max);
+            int?[][] positionsInDoc = new int?[numDocs][];
+            FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
+            customType.OmitNorms = true;
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document doc = new Document();
+                List<int?> positions = new List<int?>();
+                StringBuilder builder = new StringBuilder();
+                int num = AtLeast(131);
+                for (int j = 0; j < num; j++)
+                {
+                    int nextInt = Random().Next(max);
+                    builder.Append(nextInt).Append(" ");
+                    if (nextInt == term)
+                    {
+                        positions.Add(Convert.ToInt32(j));
+                    }
+                }
+                if (positions.Count == 0)
+                {
+                    builder.Append(term);
+                    positions.Add(num);
+                }
+                doc.Add(NewField(FieldName, builder.ToString(), customType));
+                positionsInDoc[i] = positions.ToArray();
+                writer.AddDocument(doc);
+            }
+
+            IndexReader reader = writer.Reader;
+            writer.Dispose();
+
+            int num_ = AtLeast(13);
+            for (int i = 0; i < num_; i++)
+            {
+                BytesRef bytes = new BytesRef("" + term);
+                IndexReaderContext topReaderContext = reader.Context;
+                foreach (AtomicReaderContext atomicReaderContext in topReaderContext.Leaves)
+                {
+                    DocsAndPositionsEnum docsAndPosEnum = GetDocsAndPositions((AtomicReader)atomicReaderContext.Reader, bytes, null);
+                    Assert.IsNotNull(docsAndPosEnum);
+                    int initDoc = 0;
+                    int maxDoc = atomicReaderContext.Reader.MaxDoc;
+                    // initially advance or do next doc
+                    if (Random().NextBoolean())
+                    {
+                        initDoc = docsAndPosEnum.NextDoc();
+                    }
+                    else
+                    {
+                        initDoc = docsAndPosEnum.Advance(Random().Next(maxDoc));
+                    }
+                    // now run through the scorer and check if all positions are there...
+                    do
+                    {
+                        int docID = docsAndPosEnum.DocID;
+                        if (docID == DocIdSetIterator.NO_MORE_DOCS)
+                        {
+                            break;
+                        }
+                        int?[] pos = positionsInDoc[atomicReaderContext.DocBase + docID];
+                        Assert.AreEqual(pos.Length, docsAndPosEnum.Freq);
+                        // number of positions read should be random - don't read all of them
+                        // allways
+                        int howMany = Random().Next(20) == 0 ? pos.Length - Random().Next(pos.Length) : pos.Length;
+                        for (int j = 0; j < howMany; j++)
+                        {
+                            Assert.AreEqual(pos[j], docsAndPosEnum.NextPosition(), "iteration: " + i + " initDoc: " + initDoc + " doc: " + docID + " base: " + atomicReaderContext.DocBase + " positions: " + pos); /* TODO: + " usePayloads: "
+	                + usePayload*/
+                        }
+
+                        if (Random().Next(10) == 0) // once is a while advance
+                        {
+                            if (docsAndPosEnum.Advance(docID + 1 + Random().Next((maxDoc - docID))) == DocIdSetIterator.NO_MORE_DOCS)
+                            {
+                                break;
+                            }
+                        }
+                    } while (docsAndPosEnum.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+                }
+            }
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestRandomDocs()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()));
+            int numDocs = AtLeast(49);
+            int max = 15678;
+            int term = Random().Next(max);
+            int[] freqInDoc = new int[numDocs];
+            FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
+            customType.OmitNorms = true;
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document doc = new Document();
+                StringBuilder builder = new StringBuilder();
+                for (int j = 0; j < 199; j++)
+                {
+                    int nextInt = Random().Next(max);
+                    builder.Append(nextInt).Append(' ');
+                    if (nextInt == term)
+                    {
+                        freqInDoc[i]++;
+                    }
+                }
+                doc.Add(NewField(FieldName, builder.ToString(), customType));
+                writer.AddDocument(doc);
+            }
+
+            IndexReader reader = writer.Reader;
+            writer.Dispose();
+
+            int num = AtLeast(13);
+            for (int i = 0; i < num; i++)
+            {
+                BytesRef bytes = new BytesRef("" + term);
+                IndexReaderContext topReaderContext = reader.Context;
+                foreach (AtomicReaderContext context in topReaderContext.Leaves)
+                {
+                    int maxDoc = context.AtomicReader.MaxDoc;
+                    DocsEnum docsEnum = TestUtil.Docs(Random(), context.Reader, FieldName, bytes, null, null, DocsEnum.FLAG_FREQS);
+                    if (FindNext(freqInDoc, context.DocBase, context.DocBase + maxDoc) == int.MaxValue)
+                    {
+                        Assert.IsNull(docsEnum);
+                        continue;
+                    }
+                    Assert.IsNotNull(docsEnum);
+                    docsEnum.NextDoc();
+                    for (int j = 0; j < maxDoc; j++)
+                    {
+                        if (freqInDoc[context.DocBase + j] != 0)
+                        {
+                            Assert.AreEqual(j, docsEnum.DocID);
+                            Assert.AreEqual(docsEnum.Freq, freqInDoc[context.DocBase + j]);
+                            if (i % 2 == 0 && Random().Next(10) == 0)
+                            {
+                                int next = FindNext(freqInDoc, context.DocBase + j + 1, context.DocBase + maxDoc) - context.DocBase;
+                                int advancedTo = docsEnum.Advance(next);
+                                if (next >= maxDoc)
+                                {
+                                    Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, advancedTo);
+                                }
+                                else
+                                {
+                                    Assert.IsTrue(next >= advancedTo, "advanced to: " + advancedTo + " but should be <= " + next);
+                                }
+                            }
+                            else
+                            {
+                                docsEnum.NextDoc();
+                            }
+                        }
+                    }
+                    Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, docsEnum.DocID, "DocBase: " + context.DocBase + " maxDoc: " + maxDoc + " " + docsEnum.GetType());
+                }
+            }
+
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        private static int FindNext(int[] docs, int pos, int max)
+        {
+            for (int i = pos; i < max; i++)
+            {
+                if (docs[i] != 0)
+                {
+                    return i;
+                }
+            }
+            return int.MaxValue;
+        }
+
+        /// <summary>
+        /// tests retrieval of positions for terms that have a large number of
+        /// occurrences to force test of buffer refill during positions iteration.
+        /// </summary>
+        [Test]
+        public virtual void TestLargeNumberOfPositions()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            int howMany = 1000;
+            FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
+            customType.OmitNorms = true;
+            for (int i = 0; i < 39; i++)
+            {
+                Document doc = new Document();
+                StringBuilder builder = new StringBuilder();
+                for (int j = 0; j < howMany; j++)
+                {
+                    if (j % 2 == 0)
+                    {
+                        builder.Append("even ");
+                    }
+                    else
+                    {
+                        builder.Append("odd ");
+                    }
+                }
+                doc.Add(NewField(FieldName, builder.ToString(), customType));
+                writer.AddDocument(doc);
+            }
+
+            // now do searches
+            IndexReader reader = writer.Reader;
+            writer.Dispose();
+
+            int num = AtLeast(13);
+            for (int i = 0; i < num; i++)
+            {
+                BytesRef bytes = new BytesRef("even");
+
+                IndexReaderContext topReaderContext = reader.Context;
+                foreach (AtomicReaderContext atomicReaderContext in topReaderContext.Leaves)
+                {
+                    DocsAndPositionsEnum docsAndPosEnum = GetDocsAndPositions((AtomicReader)atomicReaderContext.Reader, bytes, null);
+                    Assert.IsNotNull(docsAndPosEnum);
+
+                    int initDoc = 0;
+                    int maxDoc = atomicReaderContext.Reader.MaxDoc;
+                    // initially advance or do next doc
+                    if (Random().NextBoolean())
+                    {
+                        initDoc = docsAndPosEnum.NextDoc();
+                    }
+                    else
+                    {
+                        initDoc = docsAndPosEnum.Advance(Random().Next(maxDoc));
+                    }
+                    string msg = "Iteration: " + i + " initDoc: " + initDoc; // TODO: + " payloads: " + usePayload;
+                    Assert.AreEqual(howMany / 2, docsAndPosEnum.Freq);
+                    for (int j = 0; j < howMany; j += 2)
+                    {
+                        Assert.AreEqual(j, docsAndPosEnum.NextPosition(), "position missmatch index: " + j + " with freq: " + docsAndPosEnum.Freq + " -- " + msg);
+                    }
+                }
+            }
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestDocsEnumStart()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            Document doc = new Document();
+            doc.Add(NewStringField("foo", "bar", Field.Store.NO));
+            writer.AddDocument(doc);
+            DirectoryReader reader = writer.Reader;
+            AtomicReader r = GetOnlySegmentReader(reader);
+            DocsEnum disi = TestUtil.Docs(Random(), r, "foo", new BytesRef("bar"), null, null, DocsEnum.FLAG_NONE);
+            int docid = disi.DocID;
+            Assert.AreEqual(-1, docid);
+            Assert.IsTrue(disi.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+
+            // now reuse and check again
+            TermsEnum te = r.Terms("foo").GetIterator(null);
+            Assert.IsTrue(te.SeekExact(new BytesRef("bar")));
+            disi = TestUtil.Docs(Random(), te, null, disi, DocsEnum.FLAG_NONE);
+            docid = disi.DocID;
+            Assert.AreEqual(-1, docid);
+            Assert.IsTrue(disi.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            writer.Dispose();
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestDocsAndPositionsEnumStart()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            Document doc = new Document();
+            doc.Add(NewTextField("foo", "bar", Field.Store.NO));
+            writer.AddDocument(doc);
+            DirectoryReader reader = writer.Reader;
+            AtomicReader r = GetOnlySegmentReader(reader);
+            DocsAndPositionsEnum disi = r.TermPositionsEnum(new Term("foo", "bar"));
+            int docid = disi.DocID;
+            Assert.AreEqual(-1, docid);
+            Assert.IsTrue(disi.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+
+            // now reuse and check again
+            TermsEnum te = r.Terms("foo").GetIterator(null);
+            Assert.IsTrue(te.SeekExact(new BytesRef("bar")));
+            disi = te.DocsAndPositions(null, disi);
+            docid = disi.DocID;
+            Assert.AreEqual(-1, docid);
+            Assert.IsTrue(disi.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            writer.Dispose();
+            r.Dispose();
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestDocumentWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestDocumentWriter.cs b/src/Lucene.Net.Tests/Index/TestDocumentWriter.cs
new file mode 100644
index 0000000..fe61f04
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestDocumentWriter.cs
@@ -0,0 +1,409 @@
+using Lucene.Net.Analysis.TokenAttributes;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    using Lucene.Net.Analysis;
+    using NUnit.Framework;
+    using System.IO;
+    using AttributeSource = Lucene.Net.Util.AttributeSource;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldType = FieldType;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using StringField = StringField;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using TextField = TextField;
+
+    [TestFixture]
+    public class TestDocumentWriter : LuceneTestCase
+    {
+        private Directory Dir;
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Dir = NewDirectory();
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            Dir.Dispose();
+            base.TearDown();
+        }
+
+        [Test]
+        public virtual void Test()
+        {
+            Assert.IsTrue(Dir != null);
+        }
+
+        [Test]
+        public virtual void TestAddDocument()
+        {
+            Document testDoc = new Document();
+            DocHelper.SetupDoc(testDoc);
+            IndexWriter writer = new IndexWriter(Dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            writer.AddDocument(testDoc);
+            writer.Commit();
+            SegmentCommitInfo info = writer.NewestSegment();
+            writer.Dispose();
+            //After adding the document, we should be able to read it back in
+            SegmentReader reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, NewIOContext(Random()));
+            Assert.IsTrue(reader != null);
+            Document doc = reader.Document(0);
+            Assert.IsTrue(doc != null);
+
+            //System.out.println("Document: " + doc);
+            IIndexableField[] fields = doc.GetFields("textField2");
+            Assert.IsTrue(fields != null && fields.Length == 1);
+            Assert.IsTrue(fields[0].GetStringValue().Equals(DocHelper.FIELD_2_TEXT));
+            Assert.IsTrue(fields[0].FieldType.StoreTermVectors);
+
+            fields = doc.GetFields("textField1");
+            Assert.IsTrue(fields != null && fields.Length == 1);
+            Assert.IsTrue(fields[0].GetStringValue().Equals(DocHelper.FIELD_1_TEXT));
+            Assert.IsFalse(fields[0].FieldType.StoreTermVectors);
+
+            fields = doc.GetFields("keyField");
+            Assert.IsTrue(fields != null && fields.Length == 1);
+            Assert.IsTrue(fields[0].GetStringValue().Equals(DocHelper.KEYWORD_TEXT));
+
+            fields = doc.GetFields(DocHelper.NO_NORMS_KEY);
+            Assert.IsTrue(fields != null && fields.Length == 1);
+            Assert.IsTrue(fields[0].GetStringValue().Equals(DocHelper.NO_NORMS_TEXT));
+
+            fields = doc.GetFields(DocHelper.TEXT_FIELD_3_KEY);
+            Assert.IsTrue(fields != null && fields.Length == 1);
+            Assert.IsTrue(fields[0].GetStringValue().Equals(DocHelper.FIELD_3_TEXT));
+
+            // test that the norms are not present in the segment if
+            // omitNorms is true
+            foreach (FieldInfo fi in reader.FieldInfos)
+            {
+                if (fi.IsIndexed)
+                {
+                    Assert.IsTrue(fi.OmitsNorms == (reader.GetNormValues(fi.Name) == null));
+                }
+            }
+            reader.Dispose();
+        }
+
+        [Test]
+        public virtual void TestPositionIncrementGap()
+        {
+            Analyzer analyzer = new AnalyzerAnonymousInnerClassHelper(this);
+
+            IndexWriter writer = new IndexWriter(Dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
+
+            Document doc = new Document();
+            doc.Add(NewTextField("repeated", "repeated one", Field.Store.YES));
+            doc.Add(NewTextField("repeated", "repeated two", Field.Store.YES));
+
+            writer.AddDocument(doc);
+            writer.Commit();
+            SegmentCommitInfo info = writer.NewestSegment();
+            writer.Dispose();
+            SegmentReader reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, NewIOContext(Random()));
+
+            DocsAndPositionsEnum termPositions = MultiFields.GetTermPositionsEnum(reader, MultiFields.GetLiveDocs(reader), "repeated", new BytesRef("repeated"));
+            Assert.IsTrue(termPositions.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            int freq = termPositions.Freq;
+            Assert.AreEqual(2, freq);
+            Assert.AreEqual(0, termPositions.NextPosition());
+            Assert.AreEqual(502, termPositions.NextPosition());
+            reader.Dispose();
+        }
+
+        private class AnalyzerAnonymousInnerClassHelper : Analyzer
+        {
+            private readonly TestDocumentWriter OuterInstance;
+
+            public AnalyzerAnonymousInnerClassHelper(TestDocumentWriter outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                return new TokenStreamComponents(new MockTokenizer(reader, MockTokenizer.WHITESPACE, false));
+            }
+
+            public override int GetPositionIncrementGap(string fieldName)
+            {
+                return 500;
+            }
+        }
+
+        [Test]
+        public virtual void TestTokenReuse()
+        {
+            Analyzer analyzer = new AnalyzerAnonymousInnerClassHelper2(this);
+
+            IndexWriter writer = new IndexWriter(Dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
+
+            Document doc = new Document();
+            doc.Add(NewTextField("f1", "a 5 a a", Field.Store.YES));
+
+            writer.AddDocument(doc);
+            writer.Commit();
+            SegmentCommitInfo info = writer.NewestSegment();
+            writer.Dispose();
+            SegmentReader reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, NewIOContext(Random()));
+
+            DocsAndPositionsEnum termPositions = MultiFields.GetTermPositionsEnum(reader, reader.LiveDocs, "f1", new BytesRef("a"));
+            Assert.IsTrue(termPositions.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            int freq = termPositions.Freq;
+            Assert.AreEqual(3, freq);
+            Assert.AreEqual(0, termPositions.NextPosition());
+            Assert.IsNotNull(termPositions.GetPayload());
+            Assert.AreEqual(6, termPositions.NextPosition());
+            Assert.IsNull(termPositions.GetPayload());
+            Assert.AreEqual(7, termPositions.NextPosition());
+            Assert.IsNull(termPositions.GetPayload());
+            reader.Dispose();
+        }
+
+        private class AnalyzerAnonymousInnerClassHelper2 : Analyzer
+        {
+            private readonly TestDocumentWriter OuterInstance;
+
+            public AnalyzerAnonymousInnerClassHelper2(TestDocumentWriter outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
+                return new TokenStreamComponents(tokenizer, new TokenFilterAnonymousInnerClassHelper(this, tokenizer));
+            }
+
+            private class TokenFilterAnonymousInnerClassHelper : TokenFilter
+            {
+                private readonly AnalyzerAnonymousInnerClassHelper2 OuterInstance;
+
+                public TokenFilterAnonymousInnerClassHelper(AnalyzerAnonymousInnerClassHelper2 outerInstance, Tokenizer tokenizer)
+                    : base(tokenizer)
+                {
+                    this.OuterInstance = outerInstance;
+                    first = true;
+                    termAtt = AddAttribute<ICharTermAttribute>();
+                    payloadAtt = AddAttribute<IPayloadAttribute>();
+                    posIncrAtt = AddAttribute<IPositionIncrementAttribute>();
+                }
+
+                internal bool first;
+                internal AttributeSource.State state;
+
+                public sealed override bool IncrementToken()
+                {
+                    if (state != null)
+                    {
+                        RestoreState(state);
+                        payloadAtt.Payload = null;
+                        posIncrAtt.PositionIncrement = 0;
+                        termAtt.SetEmpty().Append("b");
+                        state = null;
+                        return true;
+                    }
+
+                    bool hasNext = m_input.IncrementToken();
+                    if (!hasNext)
+                    {
+                        return false;
+                    }
+                    if (char.IsDigit(termAtt.Buffer[0]))
+                    {
+                        posIncrAtt.PositionIncrement = termAtt.Buffer[0] - '0';
+                    }
+                    if (first)
+                    {
+                        // set payload on first position only
+                        payloadAtt.Payload = new BytesRef(new byte[] { 100 });
+                        first = false;
+                    }
+
+                    // index a "synonym" for every token
+                    state = CaptureState();
+                    return true;
+                }
+
+                public sealed override void Reset()
+                {
+                    base.Reset();
+                    first = true;
+                    state = null;
+                }
+
+                internal readonly ICharTermAttribute termAtt;
+                internal readonly IPayloadAttribute payloadAtt;
+                internal readonly IPositionIncrementAttribute posIncrAtt;
+            }
+        }
+
+        [Test]
+        public virtual void TestPreAnalyzedField()
+        {
+            IndexWriter writer = new IndexWriter(Dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document doc = new Document();
+
+            doc.Add(new TextField("preanalyzed", new TokenStreamAnonymousInnerClassHelper(this)));
+
+            writer.AddDocument(doc);
+            writer.Commit();
+            SegmentCommitInfo info = writer.NewestSegment();
+            writer.Dispose();
+            SegmentReader reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, NewIOContext(Random()));
+
+            DocsAndPositionsEnum termPositions = reader.TermPositionsEnum(new Term("preanalyzed", "term1"));
+            Assert.IsTrue(termPositions.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(1, termPositions.Freq);
+            Assert.AreEqual(0, termPositions.NextPosition());
+
+            termPositions = reader.TermPositionsEnum(new Term("preanalyzed", "term2"));
+            Assert.IsTrue(termPositions.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(2, termPositions.Freq);
+            Assert.AreEqual(1, termPositions.NextPosition());
+            Assert.AreEqual(3, termPositions.NextPosition());
+
+            termPositions = reader.TermPositionsEnum(new Term("preanalyzed", "term3"));
+            Assert.IsTrue(termPositions.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(1, termPositions.Freq);
+            Assert.AreEqual(2, termPositions.NextPosition());
+            reader.Dispose();
+        }
+
+        private class TokenStreamAnonymousInnerClassHelper : TokenStream
+        {
+            private readonly TestDocumentWriter OuterInstance;
+
+            public TokenStreamAnonymousInnerClassHelper(TestDocumentWriter outerInstance) 
+            {
+                this.OuterInstance = outerInstance;
+                tokens = new string[] { "term1", "term2", "term3", "term2" };
+                index = 0;
+                termAtt = AddAttribute<ICharTermAttribute>();
+            }
+
+            private string[] tokens;
+            private int index;
+
+            private ICharTermAttribute termAtt;
+
+            public sealed override bool IncrementToken()
+            {
+                if (index == tokens.Length)
+                {
+                    return false;
+                }
+                else
+                {
+                    ClearAttributes();
+                    termAtt.SetEmpty().Append(tokens[index++]);
+                    return true;
+                }
+            }
+        }
+
+        /// <summary>
+        /// Test adding two fields with the same name, but
+        /// with different term vector setting (LUCENE-766).
+        /// </summary>
+        [Test]
+        public virtual void TestMixedTermVectorSettingsSameField()
+        {
+            Document doc = new Document();
+            // f1 first without tv then with tv
+            doc.Add(NewStringField("f1", "v1", Field.Store.YES));
+            FieldType customType2 = new FieldType(StringField.TYPE_STORED);
+            customType2.StoreTermVectors = true;
+            customType2.StoreTermVectorOffsets = true;
+            customType2.StoreTermVectorPositions = true;
+            doc.Add(NewField("f1", "v2", customType2));
+            // f2 first with tv then without tv
+            doc.Add(NewField("f2", "v1", customType2));
+            doc.Add(NewStringField("f2", "v2", Field.Store.YES));
+
+            IndexWriter writer = new IndexWriter(Dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            writer.AddDocument(doc);
+            writer.Dispose();
+
+            TestUtil.CheckIndex(Dir);
+
+            IndexReader reader = DirectoryReader.Open(Dir);
+            // f1
+            Terms tfv1 = reader.GetTermVectors(0).GetTerms("f1");
+            Assert.IsNotNull(tfv1);
+            Assert.AreEqual(2, tfv1.Count, "the 'with_tv' setting should rule!");
+            // f2
+            Terms tfv2 = reader.GetTermVectors(0).GetTerms("f2");
+            Assert.IsNotNull(tfv2);
+            Assert.AreEqual(2, tfv2.Count, "the 'with_tv' setting should rule!");
+            reader.Dispose();
+        }
+
+        /// <summary>
+        /// Test adding two fields with the same name, one indexed
+        /// the other stored only. The omitNorms and omitTermFreqAndPositions setting
+        /// of the stored field should not affect the indexed one (LUCENE-1590)
+        /// </summary>
+        [Test]
+        public virtual void TestLUCENE_1590()
+        {
+            Document doc = new Document();
+            // f1 has no norms
+            FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
+            customType.OmitNorms = true;
+            FieldType customType2 = new FieldType();
+            customType2.IsStored = true;
+            doc.Add(NewField("f1", "v1", customType));
+            doc.Add(NewField("f1", "v2", customType2));
+            // f2 has no TF
+            FieldType customType3 = new FieldType(TextField.TYPE_NOT_STORED);
+            customType3.IndexOptions = IndexOptions.DOCS_ONLY;
+            Field f = NewField("f2", "v1", customType3);
+            doc.Add(f);
+            doc.Add(NewField("f2", "v2", customType2));
+
+            IndexWriter writer = new IndexWriter(Dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            writer.AddDocument(doc);
+            writer.ForceMerge(1); // be sure to have a single segment
+            writer.Dispose();
+
+            TestUtil.CheckIndex(Dir);
+
+            SegmentReader reader = GetOnlySegmentReader(DirectoryReader.Open(Dir));
+            FieldInfos fi = reader.FieldInfos;
+            // f1
+            Assert.IsFalse(fi.FieldInfo("f1").HasNorms, "f1 should have no norms");
+            Assert.AreEqual(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, fi.FieldInfo("f1").IndexOptions, "omitTermFreqAndPositions field bit should not be set for f1");
+            // f2
+            Assert.IsTrue(fi.FieldInfo("f2").HasNorms, "f2 should have norms");
+            Assert.AreEqual(IndexOptions.DOCS_ONLY, fi.FieldInfo("f2").IndexOptions, "omitTermFreqAndPositions field bit should be set for f2");
+            reader.Dispose();
+        }
+    }
+}
\ No newline at end of file


[12/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/Similarities/TestSimilarityBase.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/Similarities/TestSimilarityBase.cs b/src/Lucene.Net.Tests/Search/Similarities/TestSimilarityBase.cs
new file mode 100644
index 0000000..28ef7b4
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/Similarities/TestSimilarityBase.cs
@@ -0,0 +1,651 @@
+using System;
+using System.Collections.Generic;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search.Similarities
+{
+    using NUnit.Framework;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Codec = Lucene.Net.Codecs.Codec;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldInvertState = Lucene.Net.Index.FieldInvertState;
+    using FieldType = FieldType;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Term = Lucene.Net.Index.Term;
+    using TextField = TextField;
+
+    /// <summary>
+    /// Tests the <seealso cref="SimilarityBase"/>-based Similarities. Contains unit tests and
+    /// integration tests for all Similarities and correctness tests for a select
+    /// few.
+    /// <p>this class maintains a list of
+    /// {@code SimilarityBase} subclasses. Each test case performs its test on all
+    /// items in the list. If a test case fails, the name of the Similarity that
+    /// caused the failure is returned as part of the assertion error message.</p>
+    /// <p>Unit testing is performed by constructing statistics manually and calling
+    /// the <seealso cref="SimilarityBase#score(BasicStats, float, float)"/> method of the
+    /// Similarities. The statistics represent corner cases of corpus distributions.
+    /// </p>
+    /// <p>For the integration tests, a small (8-document) collection is indexed. The
+    /// tests verify that for a specific query, all relevant documents are returned
+    /// in the correct order. The collection consists of two poems of English poet
+    /// <a href="http://en.wikipedia.org/wiki/William_blake">William Blake</a>.</p>
+    /// <p>Note: the list of Similarities is maintained by hand. If a new Similarity
+    /// is added to the {@code Lucene.Net.Search.Similarities} package, the
+    /// list should be updated accordingly.</p>
+    /// <p>
+    /// In the correctness tests, the score is verified against the result of manual
+    /// computation. Since it would be impossible to test all Similarities
+    /// (e.g. all possible DFR combinations, all parameter values for LM), only
+    /// the best performing setups in the original papers are verified.
+    /// </p>
+    /// </summary>
+    [TestFixture]
+    public class TestSimilarityBase : LuceneTestCase
+    {
+        private static string FIELD_BODY = "body";
+        private static string FIELD_ID = "id";
+
+        /// <summary>
+        /// The tolerance range for float equality. </summary>
+        private static float FLOAT_EPSILON = 1e-5f;
+
+        /// <summary>
+        /// The DFR basic models to test. </summary>
+        internal static BasicModel[] BASIC_MODELS = new BasicModel[] { new BasicModelBE(), new BasicModelD(), new BasicModelG(), new BasicModelIF(), new BasicModelIn(), new BasicModelIne(), new BasicModelP() };
+
+        /// <summary>
+        /// The DFR aftereffects to test. </summary>
+        internal static AfterEffect[] AFTER_EFFECTS = new AfterEffect[] { new AfterEffectB(), new AfterEffectL(), new AfterEffect.NoAfterEffect() };
+
+        /// <summary>
+        /// The DFR normalizations to test. </summary>
+        internal static Normalization[] NORMALIZATIONS = new Normalization[] { new NormalizationH1(), new NormalizationH2(), new NormalizationH3(), new NormalizationZ(), new Normalization.NoNormalization() };
+
+        /// <summary>
+        /// The distributions for IB. </summary>
+        internal static Distribution[] DISTRIBUTIONS = new Distribution[] { new DistributionLL(), new DistributionSPL() };
+
+        /// <summary>
+        /// Lambdas for IB. </summary>
+        internal static Lambda[] LAMBDAS = new Lambda[] { new LambdaDF(), new LambdaTTF() };
+
+        private IndexSearcher Searcher;
+        private Directory Dir;
+        private IndexReader Reader;
+
+        /// <summary>
+        /// The list of similarities to test. </summary>
+        private IList<SimilarityBase> Sims;
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+
+            Dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), Dir, Similarity, TimeZone);
+
+            for (int i = 0; i < Docs.Length; i++)
+            {
+                Document d = new Document();
+                FieldType ft = new FieldType(TextField.TYPE_STORED);
+                ft.IsIndexed = false;
+                d.Add(NewField(FIELD_ID, Convert.ToString(i), ft));
+                d.Add(NewTextField(FIELD_BODY, Docs[i], Field.Store.YES));
+                writer.AddDocument(d);
+            }
+
+            Reader = writer.Reader;
+            Searcher = NewSearcher(Reader);
+            writer.Dispose();
+
+            Sims = new List<SimilarityBase>();
+            foreach (BasicModel basicModel in BASIC_MODELS)
+            {
+                foreach (AfterEffect afterEffect in AFTER_EFFECTS)
+                {
+                    foreach (Normalization normalization in NORMALIZATIONS)
+                    {
+                        Sims.Add(new DFRSimilarity(basicModel, afterEffect, normalization));
+                    }
+                }
+            }
+            foreach (Distribution distribution in DISTRIBUTIONS)
+            {
+                foreach (Lambda lambda in LAMBDAS)
+                {
+                    foreach (Normalization normalization in NORMALIZATIONS)
+                    {
+                        Sims.Add(new IBSimilarity(distribution, lambda, normalization));
+                    }
+                }
+            }
+            Sims.Add(new LMDirichletSimilarity());
+            Sims.Add(new LMJelinekMercerSimilarity(0.1f));
+            Sims.Add(new LMJelinekMercerSimilarity(0.7f));
+        }
+
+        // ------------------------------- Unit tests --------------------------------
+
+        /// <summary>
+        /// The default number of documents in the unit tests. </summary>
+        private static int NUMBER_OF_DOCUMENTS = 100;
+
+        /// <summary>
+        /// The default total number of tokens in the field in the unit tests. </summary>
+        private static long NUMBER_OF_FIELD_TOKENS = 5000;
+
+        /// <summary>
+        /// The default average field length in the unit tests. </summary>
+        private static float AVG_FIELD_LENGTH = 50;
+
+        /// <summary>
+        /// The default document frequency in the unit tests. </summary>
+        private static int DOC_FREQ = 10;
+
+        /// <summary>
+        /// The default total number of occurrences of this term across all documents
+        /// in the unit tests.
+        /// </summary>
+        private static long TOTAL_TERM_FREQ = 70;
+
+        /// <summary>
+        /// The default tf in the unit tests. </summary>
+        private static float FREQ = 7;
+
+        /// <summary>
+        /// The default document length in the unit tests. </summary>
+        private static int DOC_LEN = 40;
+
+        /// <summary>
+        /// Creates the default statistics object that the specific tests modify. </summary>
+        private BasicStats CreateStats()
+        {
+            BasicStats stats = new BasicStats("spoof", 1);
+            stats.NumberOfDocuments = NUMBER_OF_DOCUMENTS;
+            stats.NumberOfFieldTokens = NUMBER_OF_FIELD_TOKENS;
+            stats.AvgFieldLength = AVG_FIELD_LENGTH;
+            stats.DocFreq = DOC_FREQ;
+            stats.TotalTermFreq = TOTAL_TERM_FREQ;
+            return stats;
+        }
+
+        private CollectionStatistics ToCollectionStats(BasicStats stats)
+        {
+            return new CollectionStatistics(stats.Field, stats.NumberOfDocuments, -1, stats.NumberOfFieldTokens, -1);
+        }
+
+        private TermStatistics ToTermStats(BasicStats stats)
+        {
+            return new TermStatistics(new BytesRef("spoofyText"), stats.DocFreq, stats.TotalTermFreq);
+        }
+
+        /// <summary>
+        /// The generic test core called by all unit test methods. It calls the
+        /// <seealso cref="SimilarityBase#score(BasicStats, float, float)"/> method of all
+        /// Similarities in <seealso cref="#sims"/> and checks if the score is valid; i.e. it
+        /// is a finite positive real number.
+        /// </summary>
+        private void UnitTestCore(BasicStats stats, float freq, int docLen)
+        {
+            foreach (SimilarityBase sim in Sims)
+            {
+                BasicStats realStats = (BasicStats)sim.ComputeWeight(stats.TotalBoost, ToCollectionStats(stats), ToTermStats(stats));
+                float score = sim.Score(realStats, freq, docLen);
+                float explScore = sim.Explain(realStats, 1, new Explanation(freq, "freq"), docLen).Value;
+                Assert.IsFalse(float.IsInfinity(score), "Score infinite: " + sim.ToString());
+                Assert.IsFalse(float.IsNaN(score), "Score NaN: " + sim.ToString());
+                Assert.IsTrue(score >= 0, "Score negative: " + sim.ToString());
+                Assert.AreEqual(score, explScore, FLOAT_EPSILON, "score() and explain() return different values: " + sim.ToString());
+            }
+        }
+
+        /// <summary>
+        /// Runs the unit test with the default statistics. </summary>
+        [Test]
+        public virtual void TestDefault()
+        {
+            UnitTestCore(CreateStats(), FREQ, DOC_LEN);
+        }
+
+        /// <summary>
+        /// Tests correct behavior when
+        /// {@code numberOfDocuments = numberOfFieldTokens}.
+        /// </summary>
+        [Test]
+        public virtual void TestSparseDocuments()
+        {
+            BasicStats stats = CreateStats();
+            stats.NumberOfFieldTokens = stats.NumberOfDocuments;
+            stats.TotalTermFreq = stats.DocFreq;
+            stats.AvgFieldLength = (float)stats.NumberOfFieldTokens / stats.NumberOfDocuments;
+            UnitTestCore(stats, FREQ, DOC_LEN);
+        }
+
+        /// <summary>
+        /// Tests correct behavior when
+        /// {@code numberOfDocuments > numberOfFieldTokens}.
+        /// </summary>
+        [Test]
+        public virtual void TestVerySparseDocuments()
+        {
+            BasicStats stats = CreateStats();
+            stats.NumberOfFieldTokens = stats.NumberOfDocuments * 2 / 3;
+            stats.TotalTermFreq = stats.DocFreq;
+            stats.AvgFieldLength = (float)stats.NumberOfFieldTokens / stats.NumberOfDocuments;
+            UnitTestCore(stats, FREQ, DOC_LEN);
+        }
+
+        /// <summary>
+        /// Tests correct behavior when
+        /// {@code NumberOfDocuments = 1}.
+        /// </summary>
+        [Test]
+        public virtual void TestOneDocument()
+        {
+            BasicStats stats = CreateStats();
+            stats.NumberOfDocuments = 1;
+            stats.NumberOfFieldTokens = DOC_LEN;
+            stats.AvgFieldLength = DOC_LEN;
+            stats.DocFreq = 1;
+            stats.TotalTermFreq = (int)FREQ;
+            UnitTestCore(stats, FREQ, DOC_LEN);
+        }
+
+        /// <summary>
+        /// Tests correct behavior when
+        /// {@code docFreq = numberOfDocuments}.
+        /// </summary>
+        [Test]
+        public virtual void TestAllDocumentsRelevant()
+        {
+            BasicStats stats = CreateStats();
+            float mult = (0.0f + stats.NumberOfDocuments) / stats.DocFreq;
+            stats.TotalTermFreq = (int)(stats.TotalTermFreq * mult);
+            stats.DocFreq = stats.NumberOfDocuments;
+            UnitTestCore(stats, FREQ, DOC_LEN);
+        }
+
+        /// <summary>
+        /// Tests correct behavior when
+        /// {@code docFreq > numberOfDocuments / 2}.
+        /// </summary>
+        [Test]
+        public virtual void TestMostDocumentsRelevant()
+        {
+            BasicStats stats = CreateStats();
+            float mult = (0.6f * stats.NumberOfDocuments) / stats.DocFreq;
+            stats.TotalTermFreq = (int)(stats.TotalTermFreq * mult);
+            stats.DocFreq = (int)(stats.NumberOfDocuments * 0.6);
+            UnitTestCore(stats, FREQ, DOC_LEN);
+        }
+
+        /// <summary>
+        /// Tests correct behavior when
+        /// {@code docFreq = 1}.
+        /// </summary>
+        [Test]
+        public virtual void TestOnlyOneRelevantDocument()
+        {
+            BasicStats stats = CreateStats();
+            stats.DocFreq = 1;
+            stats.TotalTermFreq = (int)FREQ + 3;
+            UnitTestCore(stats, FREQ, DOC_LEN);
+        }
+
+        /// <summary>
+        /// Tests correct behavior when
+        /// {@code totalTermFreq = numberOfFieldTokens}.
+        /// </summary>
+        [Test]
+        public virtual void TestAllTermsRelevant()
+        {
+            BasicStats stats = CreateStats();
+            stats.TotalTermFreq = stats.NumberOfFieldTokens;
+            UnitTestCore(stats, DOC_LEN, DOC_LEN);
+            stats.AvgFieldLength = DOC_LEN + 10;
+            UnitTestCore(stats, DOC_LEN, DOC_LEN);
+        }
+
+        /// <summary>
+        /// Tests correct behavior when
+        /// {@code totalTermFreq > numberOfDocuments}.
+        /// </summary>
+        [Test]
+        public virtual void TestMoreTermsThanDocuments()
+        {
+            BasicStats stats = CreateStats();
+            stats.TotalTermFreq = stats.TotalTermFreq + stats.NumberOfDocuments;
+            UnitTestCore(stats, 2 * FREQ, DOC_LEN);
+        }
+
+        /// <summary>
+        /// Tests correct behavior when
+        /// {@code totalTermFreq = numberOfDocuments}.
+        /// </summary>
+        [Test]
+        public virtual void TestNumberOfTermsAsDocuments()
+        {
+            BasicStats stats = CreateStats();
+            stats.TotalTermFreq = stats.NumberOfDocuments;
+            UnitTestCore(stats, FREQ, DOC_LEN);
+        }
+
+        /// <summary>
+        /// Tests correct behavior when {@code totalTermFreq = 1}.
+        /// </summary>
+        [Test]
+        public virtual void TestOneTerm()
+        {
+            BasicStats stats = CreateStats();
+            stats.DocFreq = 1;
+            stats.TotalTermFreq = 1;
+            UnitTestCore(stats, 1, DOC_LEN);
+        }
+
+        /// <summary>
+        /// Tests correct behavior when {@code totalTermFreq = freq}.
+        /// </summary>
+        [Test]
+        public virtual void TestOneRelevantDocument()
+        {
+            BasicStats stats = CreateStats();
+            stats.DocFreq = 1;
+            stats.TotalTermFreq = (int)FREQ;
+            UnitTestCore(stats, FREQ, DOC_LEN);
+        }
+
+        /// <summary>
+        /// Tests correct behavior when {@code numberOfFieldTokens = freq}.
+        /// </summary>
+        [Test]
+        public virtual void TestAllTermsRelevantOnlyOneDocument()
+        {
+            BasicStats stats = CreateStats();
+            stats.NumberOfDocuments = 10;
+            stats.NumberOfFieldTokens = 50;
+            stats.AvgFieldLength = 5;
+            stats.DocFreq = 1;
+            stats.TotalTermFreq = 50;
+            UnitTestCore(stats, 50, 50);
+        }
+
+        /// <summary>
+        /// Tests correct behavior when there is only one document with a single term
+        /// in the collection.
+        /// </summary>
+        [Test]
+        public virtual void TestOnlyOneTermOneDocument()
+        {
+            BasicStats stats = CreateStats();
+            stats.NumberOfDocuments = 1;
+            stats.NumberOfFieldTokens = 1;
+            stats.AvgFieldLength = 1;
+            stats.DocFreq = 1;
+            stats.TotalTermFreq = 1;
+            UnitTestCore(stats, 1, 1);
+        }
+
+        /// <summary>
+        /// Tests correct behavior when there is only one term in the field, but
+        /// more than one documents.
+        /// </summary>
+        [Test]
+        public virtual void TestOnlyOneTerm()
+        {
+            BasicStats stats = CreateStats();
+            stats.NumberOfFieldTokens = 1;
+            stats.AvgFieldLength = 1.0f / stats.NumberOfDocuments;
+            stats.DocFreq = 1;
+            stats.TotalTermFreq = 1;
+            UnitTestCore(stats, 1, DOC_LEN);
+        }
+
+        /// <summary>
+        /// Tests correct behavior when {@code avgFieldLength = docLen}.
+        /// </summary>
+        [Test]
+        public virtual void TestDocumentLengthAverage()
+        {
+            BasicStats stats = CreateStats();
+            UnitTestCore(stats, FREQ, (int)stats.AvgFieldLength);
+        }
+
+        // ---------------------------- Correctness tests ----------------------------
+
+        /// <summary>
+        /// Correctness test for the Dirichlet LM model. </summary>
+        [Test]
+        public virtual void TestLMDirichlet()
+        {
+            float p = (FREQ + 2000.0f * (TOTAL_TERM_FREQ + 1) / (NUMBER_OF_FIELD_TOKENS + 1.0f)) / (DOC_LEN + 2000.0f);
+            float a = 2000.0f / (DOC_LEN + 2000.0f);
+            float gold = (float)(Math.Log(p / (a * (TOTAL_TERM_FREQ + 1) / (NUMBER_OF_FIELD_TOKENS + 1.0f))) + Math.Log(a));
+            CorrectnessTestCore(new LMDirichletSimilarity(), gold);
+        }
+
+        /// <summary>
+        /// Correctness test for the Jelinek-Mercer LM model. </summary>
+        [Test]
+        public virtual void TestLMJelinekMercer()
+        {
+            float p = (1 - 0.1f) * FREQ / DOC_LEN + 0.1f * (TOTAL_TERM_FREQ + 1) / (NUMBER_OF_FIELD_TOKENS + 1.0f);
+            float gold = (float)(Math.Log(p / (0.1f * (TOTAL_TERM_FREQ + 1) / (NUMBER_OF_FIELD_TOKENS + 1.0f))));
+            CorrectnessTestCore(new LMJelinekMercerSimilarity(0.1f), gold);
+        }
+
+        /// <summary>
+        /// Correctness test for the LL IB model with DF-based lambda and
+        /// no normalization.
+        /// </summary>
+        [Test]
+        public virtual void TestLLForIB()
+        {
+            SimilarityBase sim = new IBSimilarity(new DistributionLL(), new LambdaDF(), new Normalization.NoNormalization());
+            CorrectnessTestCore(sim, 4.178574562072754f);
+        }
+
+        /// <summary>
+        /// Correctness test for the SPL IB model with TTF-based lambda and
+        /// no normalization.
+        /// </summary>
+        [Test]
+        public virtual void TestSPLForIB()
+        {
+            SimilarityBase sim = new IBSimilarity(new DistributionSPL(), new LambdaTTF(), new Normalization.NoNormalization());
+            CorrectnessTestCore(sim, 2.2387237548828125f);
+        }
+
+        /// <summary>
+        /// Correctness test for the PL2 DFR model. </summary>
+        [Test]
+        public virtual void TestPL2()
+        {
+            SimilarityBase sim = new DFRSimilarity(new BasicModelP(), new AfterEffectL(), new NormalizationH2());
+            float tfn = (float)(FREQ * SimilarityBase.Log2(1 + AVG_FIELD_LENGTH / DOC_LEN)); // 8.1894750101
+            float l = 1.0f / (tfn + 1.0f); // 0.108820144666
+            float lambda = (1.0f + TOTAL_TERM_FREQ) / (1f + NUMBER_OF_DOCUMENTS); // 0.7029703
+            float p = (float)(tfn * SimilarityBase.Log2(tfn / lambda) + (lambda + 1 / (12 * tfn) - tfn) * SimilarityBase.Log2(Math.E) + 0.5 * SimilarityBase.Log2(2 * Math.PI * tfn)); // 21.065619
+            float gold = l * p; // 2.2923636
+            CorrectnessTestCore(sim, gold);
+        }
+
+        /// <summary>
+        /// Correctness test for the IneB2 DFR model. </summary>
+        [Test]
+        public virtual void TestIneB2()
+        {
+            SimilarityBase sim = new DFRSimilarity(new BasicModelIne(), new AfterEffectB(), new NormalizationH2());
+            CorrectnessTestCore(sim, 5.747603416442871f);
+        }
+
+        /// <summary>
+        /// Correctness test for the GL1 DFR model. </summary>
+        [Test]
+        public virtual void TestGL1()
+        {
+            SimilarityBase sim = new DFRSimilarity(new BasicModelG(), new AfterEffectL(), new NormalizationH1());
+            CorrectnessTestCore(sim, 1.6390540599822998f);
+        }
+
+        /// <summary>
+        /// Correctness test for the BEB1 DFR model. </summary>
+        [Test]
+        public virtual void TestBEB1()
+        {
+            SimilarityBase sim = new DFRSimilarity(new BasicModelBE(), new AfterEffectB(), new NormalizationH1());
+            float tfn = FREQ * AVG_FIELD_LENGTH / DOC_LEN; // 8.75
+            float b = (TOTAL_TERM_FREQ + 1 + 1) / ((DOC_FREQ + 1) * (tfn + 1)); // 0.67132866
+            double f = TOTAL_TERM_FREQ + 1 + tfn;
+            double n = f + NUMBER_OF_DOCUMENTS;
+            double n1 = n + f - 1; // 258.5
+            double m1 = n + f - tfn - 2; // 248.75
+            double n2 = f; // 79.75
+            double m2 = f - tfn; // 71.0
+            float be = (float)(-SimilarityBase.Log2(n - 1) - SimilarityBase.Log2(Math.E) + ((m1 + 0.5f) * SimilarityBase.Log2(n1 / m1) + (n1 - m1) * SimilarityBase.Log2(n1)) - ((m2 + 0.5f) * SimilarityBase.Log2(n2 / m2) + (n2 - m2) * SimilarityBase.Log2(n2))); // 67.26544321004599 -  91.9620374903885 -  -8.924494472554715
+            // 15.7720995
+            float gold = b * be; // 10.588263
+            CorrectnessTestCore(sim, gold);
+        }
+
+        /// <summary>
+        /// Correctness test for the D DFR model (basic model only). </summary>
+        [Test]
+        public virtual void TestD()
+        {
+            SimilarityBase sim = new DFRSimilarity(new BasicModelD(), new AfterEffect.NoAfterEffect(), new Normalization.NoNormalization());
+            double totalTermFreqNorm = TOTAL_TERM_FREQ + FREQ + 1;
+            double p = 1.0 / (NUMBER_OF_DOCUMENTS + 1); // 0.009900990099009901
+            double phi = FREQ / totalTermFreqNorm; // 0.08974358974358974
+            double D = phi * SimilarityBase.Log2(phi / p) + (1 - phi) * SimilarityBase.Log2((1 - phi) / (1 - p)); // 0.17498542370019005
+            float gold = (float)(totalTermFreqNorm * D + 0.5 * SimilarityBase.Log2(1 + 2 * Math.PI * FREQ * (1 - phi))); // 16.328257
+            CorrectnessTestCore(sim, gold);
+        }
+
+        /// <summary>
+        /// Correctness test for the In2 DFR model with no aftereffect. </summary>
+        [Test]
+        public virtual void TestIn2()
+        {
+            SimilarityBase sim = new DFRSimilarity(new BasicModelIn(), new AfterEffect.NoAfterEffect(), new NormalizationH2());
+            float tfn = (float)(FREQ * SimilarityBase.Log2(1 + AVG_FIELD_LENGTH / DOC_LEN)); // 8.1894750101
+            float gold = (float)(tfn * SimilarityBase.Log2((NUMBER_OF_DOCUMENTS + 1) / (DOC_FREQ + 0.5))); // 26.7459577898
+            CorrectnessTestCore(sim, gold);
+        }
+
+        /// <summary>
+        /// Correctness test for the IFB DFR model with no normalization. </summary>
+        [Test]
+        public virtual void TestIFB()
+        {
+            SimilarityBase sim = new DFRSimilarity(new BasicModelIF(), new AfterEffectB(), new Normalization.NoNormalization());
+            float B = (TOTAL_TERM_FREQ + 1 + 1) / ((DOC_FREQ + 1) * (FREQ + 1)); // 0.8875
+            float IF = (float)(FREQ * SimilarityBase.Log2(1 + (NUMBER_OF_DOCUMENTS + 1) / (TOTAL_TERM_FREQ + 0.5))); // 8.97759389642
+            float gold = B * IF; // 7.96761458307
+            CorrectnessTestCore(sim, gold);
+        }
+
+        /// <summary>
+        /// The generic test core called by all correctness test methods. It calls the
+        /// <seealso cref="SimilarityBase#score(BasicStats, float, float)"/> method of all
+        /// Similarities in <seealso cref="#sims"/> and compares the score against the manually
+        /// computed {@code gold}.
+        /// </summary>
+        private void CorrectnessTestCore(SimilarityBase sim, float gold)
+        {
+            BasicStats stats = CreateStats();
+            BasicStats realStats = (BasicStats)sim.ComputeWeight(stats.TotalBoost, ToCollectionStats(stats), ToTermStats(stats));
+            float score = sim.Score(realStats, FREQ, DOC_LEN);
+            Assert.AreEqual(gold, score, FLOAT_EPSILON, sim.ToString() + " score not correct.");
+        }
+
+        // ---------------------------- Integration tests ----------------------------
+
+        /// <summary>
+        /// The "collection" for the integration tests. </summary>
+        internal string[] Docs = new string[] { "Tiger, tiger burning bright   In the forest of the night   What immortal hand or eye   Could frame thy fearful symmetry ?", "In what distant depths or skies   Burnt the fire of thine eyes ?   On what wings dare he aspire ?   What the hands the seize the fire ?", "And what shoulder and what art   Could twist the sinews of thy heart ?   And when thy heart began to beat What dread hand ? And what dread feet ?", "What the hammer? What the chain ?   In what furnace was thy brain ?   What the anvil ? And what dread grasp   Dare its deadly terrors clasp ?", "And when the stars threw down their spears   And water'd heaven with their tear   Did he smile his work to see ?   Did he, who made the lamb, made thee ?", "Tiger, tiger burning bright   In the forest of the night   What immortal hand or eye   Dare frame thy fearful symmetry ?", "Cruelty has a human heart   And jealousy a human face   Terror the human form divine   And Secrecy the human 
 dress .", "The human dress is forg'd iron   The human form a fiery forge   The human face a furnace seal'd   The human heart its fiery gorge ." };
+
+        /// <summary>
+        /// Tests whether all similarities return three documents for the query word
+        /// "heart".
+        /// </summary>
+        [Test]
+        public virtual void TestHeartList()
+        {
+            Query q = new TermQuery(new Term(FIELD_BODY, "heart"));
+
+            foreach (SimilarityBase sim in Sims)
+            {
+                Searcher.Similarity = sim;
+                TopDocs topDocs = Searcher.Search(q, 1000);
+                Assert.AreEqual(3, topDocs.TotalHits, "Failed: " + sim.ToString());
+            }
+        }
+
+        /// <summary>
+        /// Test whether all similarities return document 3 before documents 7 and 8. </summary>
+        [Test]
+        public virtual void TestHeartRanking()
+        {
+            AssumeFalse("PreFlex codec does not support the stats necessary for this test!", "Lucene3x".Equals(Codec.Default.Name));
+
+            Query q = new TermQuery(new Term(FIELD_BODY, "heart"));
+
+            foreach (SimilarityBase sim in Sims)
+            {
+                Searcher.Similarity = sim;
+                TopDocs topDocs = Searcher.Search(q, 1000);
+                Assert.AreEqual("2", Reader.Document(topDocs.ScoreDocs[0].Doc).Get(FIELD_ID), "Failed: " + sim.ToString());
+            }
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            Reader.Dispose();
+            Dir.Dispose();
+            base.TearDown();
+        }
+
+        // LUCENE-5221
+        [Test]
+        public virtual void TestDiscountOverlapsBoost()
+        {
+            DefaultSimilarity expected = new DefaultSimilarity();
+            SimilarityBase actual = new DFRSimilarity(new BasicModelIne(), new AfterEffectB(), new NormalizationH2());
+            expected.DiscountOverlaps = false;
+            actual.DiscountOverlaps = false;
+            FieldInvertState state = new FieldInvertState("foo");
+            state.Length = 5;
+            state.NumOverlap = 2;
+            state.Boost = 3;
+            Assert.AreEqual(expected.ComputeNorm(state), actual.ComputeNorm(state));
+            expected.DiscountOverlaps = true;
+            actual.DiscountOverlaps = true;
+            Assert.AreEqual(expected.ComputeNorm(state), actual.ComputeNorm(state));
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/SingleDocTestFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/SingleDocTestFilter.cs b/src/Lucene.Net.Tests/Search/SingleDocTestFilter.cs
new file mode 100644
index 0000000..8e404ea
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/SingleDocTestFilter.cs
@@ -0,0 +1,44 @@
+namespace Lucene.Net.Search
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
+    using IBits = Lucene.Net.Util.IBits;
+    using FixedBitSet = Lucene.Net.Util.FixedBitSet;
+
+    public class SingleDocTestFilter : Filter
+    {
+        private int Doc;
+
+        public SingleDocTestFilter(int doc)
+        {
+            this.Doc = doc;
+        }
+
+        public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs)
+        {
+            FixedBitSet bits = new FixedBitSet(context.Reader.MaxDoc);
+            bits.Set(Doc);
+            if (acceptDocs != null && !acceptDocs.Get(Doc))
+            {
+                bits.Clear(Doc);
+            }
+            return bits;
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/Spans/JustCompileSearchSpans.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/Spans/JustCompileSearchSpans.cs b/src/Lucene.Net.Tests/Search/Spans/JustCompileSearchSpans.cs
new file mode 100644
index 0000000..f2b71ae
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/Spans/JustCompileSearchSpans.cs
@@ -0,0 +1,165 @@
+using System.Collections.Generic;
+
+namespace Lucene.Net.Search.Spans
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
+    using IBits = Lucene.Net.Util.IBits;
+    using Similarity = Lucene.Net.Search.Similarities.Similarity;
+    using Term = Lucene.Net.Index.Term;
+    using TermContext = Lucene.Net.Index.TermContext;
+
+    /// <summary>
+    /// Holds all implementations of classes in the o.a.l.s.spans package as a
+    /// back-compatibility test. It does not run any tests per-se, however if
+    /// someone adds a method to an interface or abstract method to an abstract
+    /// class, one of the implementations here will fail to compile and so we know
+    /// back-compat policy was violated.
+    /// </summary>
+    internal sealed class JustCompileSearchSpans
+    {
+        private const string UNSUPPORTED_MSG = "unsupported: used for back-compat testing only !";
+
+        internal sealed class JustCompileSpans : Spans
+        {
+            public override int Doc
+            {
+                get { throw new System.NotSupportedException(UNSUPPORTED_MSG); }
+            }
+
+            public override int End
+            {
+                get { throw new System.NotSupportedException(UNSUPPORTED_MSG); }
+            }
+
+            public override bool Next()
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+
+            public override bool SkipTo(int target)
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+
+            public override int Start
+            {
+                get { throw new System.NotSupportedException(UNSUPPORTED_MSG); }
+            }
+
+            public override ICollection<byte[]> GetPayload()
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+
+            public override bool IsPayloadAvailable
+            {
+                get
+                {
+                    throw new System.NotSupportedException(UNSUPPORTED_MSG);
+                }
+            }
+
+            public override long GetCost()
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+        }
+
+        internal sealed class JustCompileSpanQuery : SpanQuery
+        {
+            public override string Field
+            {
+                get
+                {
+                    throw new System.NotSupportedException(UNSUPPORTED_MSG);
+                }
+            }
+
+            public override Spans GetSpans(AtomicReaderContext context, IBits acceptDocs, IDictionary<Term, TermContext> termContexts)
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+
+            public override string ToString(string field)
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+        }
+
+        internal sealed class JustCompilePayloadSpans : Spans
+        {
+            public override ICollection<byte[]> GetPayload()
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+
+            public override bool IsPayloadAvailable
+            {
+                get
+                {
+                    throw new System.NotSupportedException(UNSUPPORTED_MSG);
+                }
+            }
+
+            public override int Doc
+            {
+                get { throw new System.NotSupportedException(UNSUPPORTED_MSG); }
+            }
+
+            public override int End
+            {
+                get { throw new System.NotSupportedException(UNSUPPORTED_MSG); }
+            }
+
+            public override bool Next()
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+
+            public override bool SkipTo(int target)
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+
+            public override int Start
+            {
+                get { throw new System.NotSupportedException(UNSUPPORTED_MSG); }
+            }
+
+            public override long GetCost()
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+        }
+
+        internal sealed class JustCompileSpanScorer : SpanScorer
+        {
+            internal JustCompileSpanScorer(Spans spans, Weight weight, Similarity.SimScorer docScorer)
+                : base(spans, weight, docScorer)
+            {
+            }
+
+            protected override bool SetFreqCurrentDoc()
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/Spans/MultiSpansWrapper.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/Spans/MultiSpansWrapper.cs b/src/Lucene.Net.Tests/Search/Spans/MultiSpansWrapper.cs
new file mode 100644
index 0000000..2a53f3e
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/Spans/MultiSpansWrapper.cs
@@ -0,0 +1,215 @@
+using System.Collections.Generic;
+using System.Diagnostics;
+
+namespace Lucene.Net.Search.Spans
+{
+    using Lucene.Net.Index;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
+    using IndexReaderContext = Lucene.Net.Index.IndexReaderContext;
+    using ReaderUtil = Lucene.Net.Index.ReaderUtil;
+    using Term = Lucene.Net.Index.Term;
+    using TermContext = Lucene.Net.Index.TermContext;
+
+    ///
+    /// <summary>
+    /// A wrapper to perform span operations on a non-leaf reader context
+    /// <p>
+    /// NOTE: this should be used for testing purposes only
+    /// @lucene.internal
+    /// </summary>
+    public class MultiSpansWrapper : Spans // can't be package private due to payloads
+    {
+        private readonly SpanQuery Query;
+        private readonly IList<AtomicReaderContext> Leaves;
+        private int LeafOrd = 0;
+        private Spans Current;
+        private readonly IDictionary<Term, TermContext> TermContexts;
+        private readonly int NumLeaves;
+
+        private MultiSpansWrapper(IList<AtomicReaderContext> leaves, SpanQuery query, IDictionary<Term, TermContext> termContexts)
+        {
+            this.Query = query;
+            this.Leaves = leaves;
+            this.NumLeaves = leaves.Count;
+            this.TermContexts = termContexts;
+        }
+
+        public static Spans Wrap(IndexReaderContext topLevelReaderContext, SpanQuery query)
+        {
+            IDictionary<Term, TermContext> termContexts = new Dictionary<Term, TermContext>();
+            SortedSet<Term> terms = new SortedSet<Term>();
+            query.ExtractTerms(terms);
+            foreach (Term term in terms)
+            {
+                termContexts[term] = TermContext.Build(topLevelReaderContext, term);
+            }
+            IList<AtomicReaderContext> leaves = topLevelReaderContext.Leaves;
+            if (leaves.Count == 1)
+            {
+                AtomicReaderContext ctx = leaves[0];
+                return query.GetSpans(ctx, ((AtomicReader)ctx.Reader).LiveDocs, termContexts);
+            }
+            return new MultiSpansWrapper(leaves, query, termContexts);
+        }
+
+        public override bool Next()
+        {
+            if (LeafOrd >= NumLeaves)
+            {
+                return false;
+            }
+            if (Current == null)
+            {
+                AtomicReaderContext ctx = Leaves[LeafOrd];
+                Current = Query.GetSpans(ctx, ((AtomicReader)ctx.Reader).LiveDocs, TermContexts);
+            }
+            while (true)
+            {
+                if (Current.Next())
+                {
+                    return true;
+                }
+                if (++LeafOrd < NumLeaves)
+                {
+                    AtomicReaderContext ctx = Leaves[LeafOrd];
+                    Current = Query.GetSpans(ctx, ((AtomicReader)ctx.Reader).LiveDocs, TermContexts);
+                }
+                else
+                {
+                    Current = null;
+                    break;
+                }
+            }
+            return false;
+        }
+
+        public override bool SkipTo(int target)
+        {
+            if (LeafOrd >= NumLeaves)
+            {
+                return false;
+            }
+
+            int subIndex = ReaderUtil.SubIndex(target, Leaves);
+            Debug.Assert(subIndex >= LeafOrd);
+            if (subIndex != LeafOrd)
+            {
+                AtomicReaderContext ctx = Leaves[subIndex];
+                Current = Query.GetSpans(ctx, ((AtomicReader)ctx.Reader).LiveDocs, TermContexts);
+                LeafOrd = subIndex;
+            }
+            else if (Current == null)
+            {
+                AtomicReaderContext ctx = Leaves[LeafOrd];
+                Current = Query.GetSpans(ctx, ((AtomicReader)ctx.Reader).LiveDocs, TermContexts);
+            }
+            while (true)
+            {
+                if (target < Leaves[LeafOrd].DocBase)
+                {
+                    // target was in the previous slice
+                    if (Current.Next())
+                    {
+                        return true;
+                    }
+                }
+                else if (Current.SkipTo(target - Leaves[LeafOrd].DocBase))
+                {
+                    return true;
+                }
+                if (++LeafOrd < NumLeaves)
+                {
+                    AtomicReaderContext ctx = Leaves[LeafOrd];
+                    Current = Query.GetSpans(ctx, ((AtomicReader)ctx.Reader).LiveDocs, TermContexts);
+                }
+                else
+                {
+                    Current = null;
+                    break;
+                }
+            }
+
+            return false;
+        }
+
+        public override int Doc
+        {
+            get
+            {
+                if (Current == null)
+                {
+                    return DocIdSetIterator.NO_MORE_DOCS;
+                }
+                return Current.Doc + Leaves[LeafOrd].DocBase;
+            }
+        }
+
+        public override int Start
+        {
+            get
+            {
+                if (Current == null)
+                {
+                    return DocIdSetIterator.NO_MORE_DOCS;
+                }
+                return Current.Start;
+            }
+        }
+
+        public override int End
+        {
+            get
+            {
+                if (Current == null)
+                {
+                    return DocIdSetIterator.NO_MORE_DOCS;
+                }
+                return Current.End;
+            }
+        }
+
+        public override ICollection<byte[]> GetPayload()
+        {
+            if (Current == null)
+            {
+                return new List<byte[]>();
+            }
+            return Current.GetPayload();
+        }
+
+        public override bool IsPayloadAvailable
+        {
+            get
+            {
+                if (Current == null)
+                {
+                    return false;
+                }
+                return Current.IsPayloadAvailable;
+            }
+        }
+
+        public override long GetCost()
+        {
+            return int.MaxValue; // just for tests
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/Spans/TestBasics.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/Spans/TestBasics.cs b/src/Lucene.Net.Tests/Search/Spans/TestBasics.cs
new file mode 100644
index 0000000..737aa82
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/Spans/TestBasics.cs
@@ -0,0 +1,626 @@
+using Lucene.Net.Analysis.TokenAttributes;
+using Lucene.Net.Documents;
+using Lucene.Net.Support;
+using System.Collections.Generic;
+
+namespace Lucene.Net.Search.Spans
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    using Lucene.Net.Analysis;
+    using Lucene.Net.Util;
+    using NUnit.Framework;
+    using System.IO;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using English = Lucene.Net.Util.English;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Term = Lucene.Net.Index.Term;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    /// <summary>
+    /// Tests basic search capabilities.
+    ///
+    /// <p>Uses a collection of 1000 documents, each the english rendition of their
+    /// document number.  For example, the document numbered 333 has text "three
+    /// hundred thirty three".
+    ///
+    /// <p>Tests are each a single query, and its hits are checked to ensure that
+    /// all and only the correct documents are returned, thus providing end-to-end
+    /// testing of the indexing and search code.
+    ///
+    /// </summary>
+    [TestFixture]
+    public class TestBasics : LuceneTestCase
+    {
+        private static IndexSearcher Searcher;
+        private static IndexReader Reader;
+        private static Directory Directory;
+
+        internal sealed class SimplePayloadFilter : TokenFilter
+        {
+            internal int Pos;
+            internal readonly IPayloadAttribute PayloadAttr;
+            internal readonly ICharTermAttribute TermAttr;
+
+            public SimplePayloadFilter(TokenStream input)
+                : base(input)
+            {
+                Pos = 0;
+                PayloadAttr = input.AddAttribute<IPayloadAttribute>();
+                TermAttr = input.AddAttribute<ICharTermAttribute>();
+            }
+
+            public override bool IncrementToken()
+            {
+                if (m_input.IncrementToken())
+                {
+#pragma warning disable 612, 618
+                    PayloadAttr.Payload = new BytesRef(("pos: " + Pos).GetBytes(IOUtils.CHARSET_UTF_8));
+#pragma warning restore 612, 618
+                    Pos++;
+                    return true;
+                }
+                else
+                {
+                    return false;
+                }
+            }
+
+            public override void Reset()
+            {
+                base.Reset();
+                Pos = 0;
+            }
+        }
+
+        internal static Analyzer SimplePayloadAnalyzer;
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Is non-static because NewIndexWriterConfig is no longer static.
+        /// </summary>
+        [OneTimeSetUp]
+        public void BeforeClass()
+        {
+            SimplePayloadAnalyzer = new AnalyzerAnonymousInnerClassHelper();
+
+            Directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), Directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, SimplePayloadAnalyzer).SetMaxBufferedDocs(TestUtil.NextInt(Random(), 100, 1000)).SetMergePolicy(NewLogMergePolicy()));
+            //writer.infoStream = System.out;
+            for (int i = 0; i < 2000; i++)
+            {
+                Document doc = new Document();
+                doc.Add(NewTextField("field", English.IntToEnglish(i), Field.Store.YES));
+                writer.AddDocument(doc);
+            }
+            Reader = writer.Reader;
+            Searcher = NewSearcher(Reader);
+            writer.Dispose();
+        }
+
+        private class AnalyzerAnonymousInnerClassHelper : Analyzer
+        {
+            public AnalyzerAnonymousInnerClassHelper()
+            {
+            }
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
+                return new TokenStreamComponents(tokenizer, new SimplePayloadFilter(tokenizer));
+            }
+        }
+
+        [OneTimeTearDown]
+        public static void AfterClass()
+        {
+            Reader.Dispose();
+            Directory.Dispose();
+            Searcher = null;
+            Reader = null;
+            Directory = null;
+            SimplePayloadAnalyzer = null;
+        }
+
+        [Test]
+        public virtual void TestTerm()
+        {
+            Query query = new TermQuery(new Term("field", "seventy"));
+            CheckHits(query, new int[] { 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 370, 371, 372, 373, 374, 375, 376, 377, 378, 379, 470, 471, 472, 473, 474, 475, 476, 477, 478, 479, 570, 571, 572, 573, 574, 575, 576, 577, 578, 579, 670, 671, 672, 673, 674, 675, 676, 677, 678, 679, 770, 771, 772, 773, 774, 775, 776, 777, 778, 779, 870, 871, 872, 873, 874, 875, 876, 877, 878, 879, 970, 971, 972, 973, 974, 975, 976, 977, 978, 979, 1070, 1071, 1072, 1073, 1074, 1075, 1076, 1077, 1078, 1079, 1170, 1171, 1172, 1173, 1174, 1175, 1176, 1177, 1178, 1179, 1270, 1271, 1272, 1273, 1274, 1275, 1276, 1277, 1278, 1279, 1370, 1371, 1372, 1373, 1374, 1375, 1376, 1377, 1378, 1379, 1470, 1471, 1472, 1473, 1474, 1475, 1476, 1477, 1478, 1479, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1578, 1579, 1670, 1671, 1672, 1673, 1674, 1675, 1676, 1677, 1678, 1679, 1770, 1771, 1772, 1773, 1774, 1775, 1776, 1777
 , 1778, 1779, 1870, 1871, 1872, 1873, 1874, 1875, 1876, 1877, 1878, 1879, 1970, 1971, 1972, 1973, 1974, 1975, 1976, 1977, 1978, 1979 });
+        }
+
+        [Test]
+        public virtual void TestTerm2()
+        {
+            Query query = new TermQuery(new Term("field", "seventish"));
+            CheckHits(query, new int[] { });
+        }
+
+        [Test]
+        public virtual void TestPhrase()
+        {
+            PhraseQuery query = new PhraseQuery();
+            query.Add(new Term("field", "seventy"));
+            query.Add(new Term("field", "seven"));
+            CheckHits(query, new int[] { 77, 177, 277, 377, 477, 577, 677, 777, 877, 977, 1077, 1177, 1277, 1377, 1477, 1577, 1677, 1777, 1877, 1977 });
+        }
+
+        [Test]
+        public virtual void TestPhrase2()
+        {
+            PhraseQuery query = new PhraseQuery();
+            query.Add(new Term("field", "seventish"));
+            query.Add(new Term("field", "sevenon"));
+            CheckHits(query, new int[] { });
+        }
+
+        [Test]
+        public virtual void TestBoolean()
+        {
+            BooleanQuery query = new BooleanQuery();
+            query.Add(new TermQuery(new Term("field", "seventy")), Occur.MUST);
+            query.Add(new TermQuery(new Term("field", "seven")), Occur.MUST);
+            CheckHits(query, new int[] { 77, 177, 277, 377, 477, 577, 677, 770, 771, 772, 773, 774, 775, 776, 777, 778, 779, 877, 977, 1077, 1177, 1277, 1377, 1477, 1577, 1677, 1770, 1771, 1772, 1773, 1774, 1775, 1776, 1777, 1778, 1779, 1877, 1977 });
+        }
+
+        [Test]
+        public virtual void TestBoolean2()
+        {
+            BooleanQuery query = new BooleanQuery();
+            query.Add(new TermQuery(new Term("field", "sevento")), Occur.MUST);
+            query.Add(new TermQuery(new Term("field", "sevenly")), Occur.MUST);
+            CheckHits(query, new int[] { });
+        }
+
+        [Test]
+        public virtual void TestSpanNearExact()
+        {
+            SpanTermQuery term1 = new SpanTermQuery(new Term("field", "seventy"));
+            SpanTermQuery term2 = new SpanTermQuery(new Term("field", "seven"));
+            SpanNearQuery query = new SpanNearQuery(new SpanQuery[] { term1, term2 }, 0, true);
+            CheckHits(query, new int[] { 77, 177, 277, 377, 477, 577, 677, 777, 877, 977, 1077, 1177, 1277, 1377, 1477, 1577, 1677, 1777, 1877, 1977 });
+
+            Assert.IsTrue(Searcher.Explain(query, 77).Value > 0.0f);
+            Assert.IsTrue(Searcher.Explain(query, 977).Value > 0.0f);
+
+            QueryUtils.Check(term1);
+            QueryUtils.Check(term2);
+            QueryUtils.CheckUnequal(term1, term2);
+        }
+
+        [Test]
+        public virtual void TestSpanTermQuery()
+        {
+            SpanTermQuery term1 = new SpanTermQuery(new Term("field", "seventy"));
+            CheckHits(term1, new int[] { 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 370, 371, 372, 373, 374, 375, 376, 377, 378, 379, 470, 471, 472, 473, 474, 475, 476, 477, 478, 479, 570, 571, 572, 573, 574, 575, 576, 577, 578, 579, 670, 671, 672, 673, 674, 675, 676, 677, 678, 679, 770, 771, 772, 773, 774, 775, 776, 777, 778, 779, 870, 871, 872, 873, 874, 875, 876, 877, 878, 879, 970, 971, 972, 973, 974, 975, 976, 977, 978, 979, 1070, 1071, 1072, 1073, 1074, 1075, 1076, 1077, 1078, 1079, 1170, 1270, 1370, 1470, 1570, 1670, 1770, 1870, 1970, 1171, 1172, 1173, 1174, 1175, 1176, 1177, 1178, 1179, 1271, 1272, 1273, 1274, 1275, 1276, 1277, 1278, 1279, 1371, 1372, 1373, 1374, 1375, 1376, 1377, 1378, 1379, 1471, 1472, 1473, 1474, 1475, 1476, 1477, 1478, 1479, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1578, 1579, 1671, 1672, 1673, 1674, 1675, 1676, 1677, 1678, 1679, 1771, 1772, 1773, 1774, 1775
 , 1776, 1777, 1778, 1779, 1871, 1872, 1873, 1874, 1875, 1876, 1877, 1878, 1879, 1971, 1972, 1973, 1974, 1975, 1976, 1977, 1978, 1979 });
+        }
+
+        [Test]
+        public virtual void TestSpanNearUnordered()
+        {
+            SpanTermQuery term1 = new SpanTermQuery(new Term("field", "nine"));
+            SpanTermQuery term2 = new SpanTermQuery(new Term("field", "six"));
+            SpanNearQuery query = new SpanNearQuery(new SpanQuery[] { term1, term2 }, 4, false);
+
+            CheckHits(query, new int[] { 609, 629, 639, 649, 659, 669, 679, 689, 699, 906, 926, 936, 946, 956, 966, 976, 986, 996, 1609, 1629, 1639, 1649, 1659, 1669, 1679, 1689, 1699, 1906, 1926, 1936, 1946, 1956, 1966, 1976, 1986, 1996 });
+        }
+
+        [Test]
+        public virtual void TestSpanNearOrdered()
+        {
+            SpanTermQuery term1 = new SpanTermQuery(new Term("field", "nine"));
+            SpanTermQuery term2 = new SpanTermQuery(new Term("field", "six"));
+            SpanNearQuery query = new SpanNearQuery(new SpanQuery[] { term1, term2 }, 4, true);
+            CheckHits(query, new int[] { 906, 926, 936, 946, 956, 966, 976, 986, 996, 1906, 1926, 1936, 1946, 1956, 1966, 1976, 1986, 1996 });
+        }
+
+        [Test]
+        public virtual void TestSpanNot()
+        {
+            SpanTermQuery term1 = new SpanTermQuery(new Term("field", "eight"));
+            SpanTermQuery term2 = new SpanTermQuery(new Term("field", "one"));
+            SpanNearQuery near = new SpanNearQuery(new SpanQuery[] { term1, term2 }, 4, true);
+            SpanTermQuery term3 = new SpanTermQuery(new Term("field", "forty"));
+            SpanNotQuery query = new SpanNotQuery(near, term3);
+
+            CheckHits(query, new int[] { 801, 821, 831, 851, 861, 871, 881, 891, 1801, 1821, 1831, 1851, 1861, 1871, 1881, 1891 });
+
+            Assert.IsTrue(Searcher.Explain(query, 801).Value > 0.0f);
+            Assert.IsTrue(Searcher.Explain(query, 891).Value > 0.0f);
+        }
+
+        [Test]
+        public virtual void TestSpanWithMultipleNotSingle()
+        {
+            SpanTermQuery term1 = new SpanTermQuery(new Term("field", "eight"));
+            SpanTermQuery term2 = new SpanTermQuery(new Term("field", "one"));
+            SpanNearQuery near = new SpanNearQuery(new SpanQuery[] { term1, term2 }, 4, true);
+            SpanTermQuery term3 = new SpanTermQuery(new Term("field", "forty"));
+
+            SpanOrQuery or = new SpanOrQuery(term3);
+
+            SpanNotQuery query = new SpanNotQuery(near, or);
+
+            CheckHits(query, new int[] { 801, 821, 831, 851, 861, 871, 881, 891, 1801, 1821, 1831, 1851, 1861, 1871, 1881, 1891 });
+
+            Assert.IsTrue(Searcher.Explain(query, 801).Value > 0.0f);
+            Assert.IsTrue(Searcher.Explain(query, 891).Value > 0.0f);
+        }
+
+        [Test]
+        public virtual void TestSpanWithMultipleNotMany()
+        {
+            SpanTermQuery term1 = new SpanTermQuery(new Term("field", "eight"));
+            SpanTermQuery term2 = new SpanTermQuery(new Term("field", "one"));
+            SpanNearQuery near = new SpanNearQuery(new SpanQuery[] { term1, term2 }, 4, true);
+            SpanTermQuery term3 = new SpanTermQuery(new Term("field", "forty"));
+            SpanTermQuery term4 = new SpanTermQuery(new Term("field", "sixty"));
+            SpanTermQuery term5 = new SpanTermQuery(new Term("field", "eighty"));
+
+            SpanOrQuery or = new SpanOrQuery(term3, term4, term5);
+
+            SpanNotQuery query = new SpanNotQuery(near, or);
+
+            CheckHits(query, new int[] { 801, 821, 831, 851, 871, 891, 1801, 1821, 1831, 1851, 1871, 1891 });
+
+            Assert.IsTrue(Searcher.Explain(query, 801).Value > 0.0f);
+            Assert.IsTrue(Searcher.Explain(query, 891).Value > 0.0f);
+        }
+
+        [Test]
+        public virtual void TestNpeInSpanNearWithSpanNot()
+        {
+            SpanTermQuery term1 = new SpanTermQuery(new Term("field", "eight"));
+            SpanTermQuery term2 = new SpanTermQuery(new Term("field", "one"));
+            SpanNearQuery near = new SpanNearQuery(new SpanQuery[] { term1, term2 }, 4, true);
+            SpanTermQuery hun = new SpanTermQuery(new Term("field", "hundred"));
+            SpanTermQuery term3 = new SpanTermQuery(new Term("field", "forty"));
+            SpanNearQuery exclude = new SpanNearQuery(new SpanQuery[] { hun, term3 }, 1, true);
+
+            SpanNotQuery query = new SpanNotQuery(near, exclude);
+
+            CheckHits(query, new int[] { 801, 821, 831, 851, 861, 871, 881, 891, 1801, 1821, 1831, 1851, 1861, 1871, 1881, 1891 });
+
+            Assert.IsTrue(Searcher.Explain(query, 801).Value > 0.0f);
+            Assert.IsTrue(Searcher.Explain(query, 891).Value > 0.0f);
+        }
+
+        [Test]
+        public virtual void TestNpeInSpanNearInSpanFirstInSpanNot()
+        {
+            int n = 5;
+            SpanTermQuery hun = new SpanTermQuery(new Term("field", "hundred"));
+            SpanTermQuery term40 = new SpanTermQuery(new Term("field", "forty"));
+            SpanTermQuery term40c = (SpanTermQuery)term40.Clone();
+
+            SpanFirstQuery include = new SpanFirstQuery(term40, n);
+            SpanNearQuery near = new SpanNearQuery(new SpanQuery[] { hun, term40c }, n - 1, true);
+            SpanFirstQuery exclude = new SpanFirstQuery(near, n - 1);
+            SpanNotQuery q = new SpanNotQuery(include, exclude);
+
+            CheckHits(q, new int[] { 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 1040, 1041, 1042, 1043, 1044, 1045, 1046, 1047, 1048, 1049, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1147, 1148, 1149, 1240, 1241, 1242, 1243, 1244, 1245, 1246, 1247, 1248, 1249, 1340, 1341, 1342, 1343, 1344, 1345, 1346, 1347, 1348, 1349, 1440, 1441, 1442, 1443, 1444, 1445, 1446, 1447, 1448, 1449, 1540, 1541, 1542, 1543, 1544, 1545, 1546, 1547, 1548, 1549, 1640, 1641, 1642, 1643, 1644, 1645, 1646, 1647, 1648, 1649, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1749, 1840, 1841, 1842, 1843, 1844, 1845, 1846, 1847, 1848, 1849, 1940, 1941, 1942, 1943, 1944, 1945, 1946, 1947, 1948, 1949 });
+        }
+
+        [Test]
+        public virtual void TestSpanNotWindowOne()
+        {
+            SpanTermQuery term1 = new SpanTermQuery(new Term("field", "eight"));
+            SpanTermQuery term2 = new SpanTermQuery(new Term("field", "forty"));
+            SpanNearQuery near = new SpanNearQuery(new SpanQuery[] { term1, term2 }, 4, true);
+            SpanTermQuery term3 = new SpanTermQuery(new Term("field", "one"));
+            SpanNotQuery query = new SpanNotQuery(near, term3, 1, 1);
+
+            CheckHits(query, new int[] { 840, 842, 843, 844, 845, 846, 847, 848, 849, 1840, 1842, 1843, 1844, 1845, 1846, 1847, 1848, 1849 });
+
+            Assert.IsTrue(Searcher.Explain(query, 840).Value > 0.0f);
+            Assert.IsTrue(Searcher.Explain(query, 1842).Value > 0.0f);
+        }
+
+        [Test]
+        public virtual void TestSpanNotWindowTwoBefore()
+        {
+            SpanTermQuery term1 = new SpanTermQuery(new Term("field", "eight"));
+            SpanTermQuery term2 = new SpanTermQuery(new Term("field", "forty"));
+            SpanNearQuery near = new SpanNearQuery(new SpanQuery[] { term1, term2 }, 4, true);
+            SpanTermQuery term3 = new SpanTermQuery(new Term("field", "one"));
+            SpanNotQuery query = new SpanNotQuery(near, term3, 2, 0);
+
+            CheckHits(query, new int[] { 840, 841, 842, 843, 844, 845, 846, 847, 848, 849 });
+
+            Assert.IsTrue(Searcher.Explain(query, 840).Value > 0.0f);
+            Assert.IsTrue(Searcher.Explain(query, 849).Value > 0.0f);
+        }
+
+        [Test]
+        public virtual void TestSpanNotWindowNeg()
+        {
+            //test handling of invalid window < 0
+            SpanTermQuery term1 = new SpanTermQuery(new Term("field", "eight"));
+            SpanTermQuery term2 = new SpanTermQuery(new Term("field", "one"));
+            SpanNearQuery near = new SpanNearQuery(new SpanQuery[] { term1, term2 }, 4, true);
+            SpanTermQuery term3 = new SpanTermQuery(new Term("field", "forty"));
+
+            SpanOrQuery or = new SpanOrQuery(term3);
+
+            SpanNotQuery query = new SpanNotQuery(near, or);
+
+            CheckHits(query, new int[] { 801, 821, 831, 851, 861, 871, 881, 891, 1801, 1821, 1831, 1851, 1861, 1871, 1881, 1891 });
+
+            Assert.IsTrue(Searcher.Explain(query, 801).Value > 0.0f);
+            Assert.IsTrue(Searcher.Explain(query, 891).Value > 0.0f);
+        }
+
+        [Test]
+        public virtual void TestSpanNotWindowDoubleExcludesBefore()
+        {
+            //test hitting two excludes before an include
+            SpanTermQuery term1 = new SpanTermQuery(new Term("field", "forty"));
+            SpanTermQuery term2 = new SpanTermQuery(new Term("field", "two"));
+            SpanNearQuery near = new SpanNearQuery(new SpanTermQuery[] { term1, term2 }, 2, true);
+            SpanTermQuery exclude = new SpanTermQuery(new Term("field", "one"));
+
+            SpanNotQuery query = new SpanNotQuery(near, exclude, 4, 1);
+
+            CheckHits(query, new int[] { 42, 242, 342, 442, 542, 642, 742, 842, 942 });
+
+            Assert.IsTrue(Searcher.Explain(query, 242).Value > 0.0f);
+            Assert.IsTrue(Searcher.Explain(query, 942).Value > 0.0f);
+        }
+
+        [Test]
+        public virtual void TestSpanFirst()
+        {
+            SpanTermQuery term1 = new SpanTermQuery(new Term("field", "five"));
+            SpanFirstQuery query = new SpanFirstQuery(term1, 1);
+
+            CheckHits(query, new int[] { 5, 500, 501, 502, 503, 504, 505, 506, 507, 508, 509, 510, 511, 512, 513, 514, 515, 516, 517, 518, 519, 520, 521, 522, 523, 524, 525, 526, 527, 528, 529, 530, 531, 532, 533, 534, 535, 536, 537, 538, 539, 540, 541, 542, 543, 544, 545, 546, 547, 548, 549, 550, 551, 552, 553, 554, 555, 556, 557, 558, 559, 560, 561, 562, 563, 564, 565, 566, 567, 568, 569, 570, 571, 572, 573, 574, 575, 576, 577, 578, 579, 580, 581, 582, 583, 584, 585, 586, 587, 588, 589, 590, 591, 592, 593, 594, 595, 596, 597, 598, 599 });
+
+            Assert.IsTrue(Searcher.Explain(query, 5).Value > 0.0f);
+            Assert.IsTrue(Searcher.Explain(query, 599).Value > 0.0f);
+        }
+
+        [Test]
+        public virtual void TestSpanPositionRange()
+        {
+            SpanPositionRangeQuery query;
+            SpanTermQuery term1 = new SpanTermQuery(new Term("field", "five"));
+            query = new SpanPositionRangeQuery(term1, 1, 2);
+            CheckHits(query, new int[] { 25, 35, 45, 55, 65, 75, 85, 95 });
+            Assert.IsTrue(Searcher.Explain(query, 25).Value > 0.0f);
+            Assert.IsTrue(Searcher.Explain(query, 95).Value > 0.0f);
+
+            query = new SpanPositionRangeQuery(term1, 0, 1);
+            CheckHits(query, new int[] { 5, 500, 501, 502, 503, 504, 505, 506, 507, 508, 509, 510, 511, 512, 513, 514, 515, 516, 517, 518, 519, 520, 521, 522, 523, 524, 525, 526, 527, 528, 529, 530, 531, 532, 533, 534, 535, 536, 537, 538, 539, 540, 541, 542, 543, 544, 545, 546, 547, 548, 549, 550, 551, 552, 553, 554, 555, 556, 557, 558, 559, 560, 561, 562, 563, 564, 565, 566, 567, 568, 569, 570, 571, 572, 573, 574, 575, 576, 577, 578, 579, 580, 581, 582, 583, 584, 585, 586, 587, 588, 589, 590, 591, 592, 593, 594, 595, 596, 597, 598, 599 });
+
+            query = new SpanPositionRangeQuery(term1, 6, 7);
+            CheckHits(query, new int[] { });
+        }
+
+        [Test]
+        public virtual void TestSpanPayloadCheck()
+        {
+            SpanTermQuery term1 = new SpanTermQuery(new Term("field", "five"));
+#pragma warning disable 612, 618
+            BytesRef pay = new BytesRef(("pos: " + 5).GetBytes(IOUtils.CHARSET_UTF_8));
+#pragma warning restore 612, 618
+            SpanQuery query = new SpanPayloadCheckQuery(term1, new List<byte[]>() { pay.Bytes });
+            CheckHits(query, new int[] { 1125, 1135, 1145, 1155, 1165, 1175, 1185, 1195, 1225, 1235, 1245, 1255, 1265, 1275, 1285, 1295, 1325, 1335, 1345, 1355, 1365, 1375, 1385, 1395, 1425, 1435, 1445, 1455, 1465, 1475, 1485, 1495, 1525, 1535, 1545, 1555, 1565, 1575, 1585, 1595, 1625, 1635, 1645, 1655, 1665, 1675, 1685, 1695, 1725, 1735, 1745, 1755, 1765, 1775, 1785, 1795, 1825, 1835, 1845, 1855, 1865, 1875, 1885, 1895, 1925, 1935, 1945, 1955, 1965, 1975, 1985, 1995 });
+            Assert.IsTrue(Searcher.Explain(query, 1125).Value > 0.0f);
+
+            SpanTermQuery term2 = new SpanTermQuery(new Term("field", "hundred"));
+            SpanNearQuery snq;
+            SpanQuery[] clauses;
+            IList<byte[]> list;
+            BytesRef pay2;
+            clauses = new SpanQuery[2];
+            clauses[0] = term1;
+            clauses[1] = term2;
+            snq = new SpanNearQuery(clauses, 0, true);
+#pragma warning disable 612, 618
+            pay = new BytesRef(("pos: " + 0).GetBytes(IOUtils.CHARSET_UTF_8));
+            pay2 = new BytesRef(("pos: " + 1).GetBytes(IOUtils.CHARSET_UTF_8));
+#pragma warning restore 612, 618
+            list = new List<byte[]>();
+            list.Add(pay.Bytes);
+            list.Add(pay2.Bytes);
+            query = new SpanNearPayloadCheckQuery(snq, list);
+            CheckHits(query, new int[] { 500, 501, 502, 503, 504, 505, 506, 507, 508, 509, 510, 511, 512, 513, 514, 515, 516, 517, 518, 519, 520, 521, 522, 523, 524, 525, 526, 527, 528, 529, 530, 531, 532, 533, 534, 535, 536, 537, 538, 539, 540, 541, 542, 543, 544, 545, 546, 547, 548, 549, 550, 551, 552, 553, 554, 555, 556, 557, 558, 559, 560, 561, 562, 563, 564, 565, 566, 567, 568, 569, 570, 571, 572, 573, 574, 575, 576, 577, 578, 579, 580, 581, 582, 583, 584, 585, 586, 587, 588, 589, 590, 591, 592, 593, 594, 595, 596, 597, 598, 599 });
+            clauses = new SpanQuery[3];
+            clauses[0] = term1;
+            clauses[1] = term2;
+            clauses[2] = new SpanTermQuery(new Term("field", "five"));
+            snq = new SpanNearQuery(clauses, 0, true);
+#pragma warning disable 612, 618
+            pay = new BytesRef(("pos: " + 0).GetBytes(IOUtils.CHARSET_UTF_8));
+            pay2 = new BytesRef(("pos: " + 1).GetBytes(IOUtils.CHARSET_UTF_8));
+            BytesRef pay3 = new BytesRef(("pos: " + 2).GetBytes(IOUtils.CHARSET_UTF_8));
+#pragma warning restore 612, 618
+            list = new List<byte[]>();
+            list.Add(pay.Bytes);
+            list.Add(pay2.Bytes);
+            list.Add(pay3.Bytes);
+            query = new SpanNearPayloadCheckQuery(snq, list);
+            CheckHits(query, new int[] { 505 });
+        }
+
+        [Test]
+        public virtual void TestComplexSpanChecks()
+        {
+            SpanTermQuery one = new SpanTermQuery(new Term("field", "one"));
+            SpanTermQuery thous = new SpanTermQuery(new Term("field", "thousand"));
+            //should be one position in between
+            SpanTermQuery hundred = new SpanTermQuery(new Term("field", "hundred"));
+            SpanTermQuery three = new SpanTermQuery(new Term("field", "three"));
+
+            SpanNearQuery oneThous = new SpanNearQuery(new SpanQuery[] { one, thous }, 0, true);
+            SpanNearQuery hundredThree = new SpanNearQuery(new SpanQuery[] { hundred, three }, 0, true);
+            SpanNearQuery oneThousHunThree = new SpanNearQuery(new SpanQuery[] { oneThous, hundredThree }, 1, true);
+            SpanQuery query;
+            //this one's too small
+            query = new SpanPositionRangeQuery(oneThousHunThree, 1, 2);
+            CheckHits(query, new int[] { });
+            //this one's just right
+            query = new SpanPositionRangeQuery(oneThousHunThree, 0, 6);
+            CheckHits(query, new int[] { 1103, 1203, 1303, 1403, 1503, 1603, 1703, 1803, 1903 });
+
+            var payloads = new List<byte[]>();
+#pragma warning disable 612, 618
+            BytesRef pay = new BytesRef(("pos: " + 0).GetBytes(IOUtils.CHARSET_UTF_8));
+            BytesRef pay2 = new BytesRef(("pos: " + 1).GetBytes(IOUtils.CHARSET_UTF_8));
+            BytesRef pay3 = new BytesRef(("pos: " + 3).GetBytes(IOUtils.CHARSET_UTF_8));
+            BytesRef pay4 = new BytesRef(("pos: " + 4).GetBytes(IOUtils.CHARSET_UTF_8));
+#pragma warning restore 612, 618
+            payloads.Add(pay.Bytes);
+            payloads.Add(pay2.Bytes);
+            payloads.Add(pay3.Bytes);
+            payloads.Add(pay4.Bytes);
+            query = new SpanNearPayloadCheckQuery(oneThousHunThree, payloads);
+            CheckHits(query, new int[] { 1103, 1203, 1303, 1403, 1503, 1603, 1703, 1803, 1903 });
+        }
+
+        [Test]
+        public virtual void TestSpanOr()
+        {
+            SpanTermQuery term1 = new SpanTermQuery(new Term("field", "thirty"));
+            SpanTermQuery term2 = new SpanTermQuery(new Term("field", "three"));
+            SpanNearQuery near1 = new SpanNearQuery(new SpanQuery[] { term1, term2 }, 0, true);
+            SpanTermQuery term3 = new SpanTermQuery(new Term("field", "forty"));
+            SpanTermQuery term4 = new SpanTermQuery(new Term("field", "seven"));
+            SpanNearQuery near2 = new SpanNearQuery(new SpanQuery[] { term3, term4 }, 0, true);
+
+            SpanOrQuery query = new SpanOrQuery(near1, near2);
+
+            CheckHits(query, new int[] { 33, 47, 133, 147, 233, 247, 333, 347, 433, 447, 533, 547, 633, 647, 733, 747, 833, 847, 933, 947, 1033, 1047, 1133, 1147, 1233, 1247, 1333, 1347, 1433, 1447, 1533, 1547, 1633, 1647, 1733, 1747, 1833, 1847, 1933, 1947 });
+
+            Assert.IsTrue(Searcher.Explain(query, 33).Value > 0.0f);
+            Assert.IsTrue(Searcher.Explain(query, 947).Value > 0.0f);
+        }
+
+        [Test]
+        public virtual void TestSpanExactNested()
+        {
+            SpanTermQuery term1 = new SpanTermQuery(new Term("field", "three"));
+            SpanTermQuery term2 = new SpanTermQuery(new Term("field", "hundred"));
+            SpanNearQuery near1 = new SpanNearQuery(new SpanQuery[] { term1, term2 }, 0, true);
+            SpanTermQuery term3 = new SpanTermQuery(new Term("field", "thirty"));
+            SpanTermQuery term4 = new SpanTermQuery(new Term("field", "three"));
+            SpanNearQuery near2 = new SpanNearQuery(new SpanQuery[] { term3, term4 }, 0, true);
+
+            SpanNearQuery query = new SpanNearQuery(new SpanQuery[] { near1, near2 }, 0, true);
+
+            CheckHits(query, new int[] { 333, 1333 });
+
+            Assert.IsTrue(Searcher.Explain(query, 333).Value > 0.0f);
+        }
+
+        [Test]
+        public virtual void TestSpanNearOr()
+        {
+            SpanTermQuery t1 = new SpanTermQuery(new Term("field", "six"));
+            SpanTermQuery t3 = new SpanTermQuery(new Term("field", "seven"));
+
+            SpanTermQuery t5 = new SpanTermQuery(new Term("field", "seven"));
+            SpanTermQuery t6 = new SpanTermQuery(new Term("field", "six"));
+
+            SpanOrQuery to1 = new SpanOrQuery(t1, t3);
+            SpanOrQuery to2 = new SpanOrQuery(t5, t6);
+
+            SpanNearQuery query = new SpanNearQuery(new SpanQuery[] { to1, to2 }, 10, true);
+
+            CheckHits(query, new int[] { 606, 607, 626, 627, 636, 637, 646, 647, 656, 657, 666, 667, 676, 677, 686, 687, 696, 697, 706, 707, 726, 727, 736, 737, 746, 747, 756, 757, 766, 767, 776, 777, 786, 787, 796, 797, 1606, 1607, 1626, 1627, 1636, 1637, 1646, 1647, 1656, 1657, 1666, 1667, 1676, 1677, 1686, 1687, 1696, 1697, 1706, 1707, 1726, 1727, 1736, 1737, 1746, 1747, 1756, 1757, 1766, 1767, 1776, 1777, 1786, 1787, 1796, 1797 });
+        }
+
+        [Test]
+        public virtual void TestSpanComplex1()
+        {
+            SpanTermQuery t1 = new SpanTermQuery(new Term("field", "six"));
+            SpanTermQuery t2 = new SpanTermQuery(new Term("field", "hundred"));
+            SpanNearQuery tt1 = new SpanNearQuery(new SpanQuery[] { t1, t2 }, 0, true);
+
+            SpanTermQuery t3 = new SpanTermQuery(new Term("field", "seven"));
+            SpanTermQuery t4 = new SpanTermQuery(new Term("field", "hundred"));
+            SpanNearQuery tt2 = new SpanNearQuery(new SpanQuery[] { t3, t4 }, 0, true);
+
+            SpanTermQuery t5 = new SpanTermQuery(new Term("field", "seven"));
+            SpanTermQuery t6 = new SpanTermQuery(new Term("field", "six"));
+
+            SpanOrQuery to1 = new SpanOrQuery(tt1, tt2);
+            SpanOrQuery to2 = new SpanOrQuery(t5, t6);
+
+            SpanNearQuery query = new SpanNearQuery(new SpanQuery[] { to1, to2 }, 100, true);
+
+            CheckHits(query, new int[] { 606, 607, 626, 627, 636, 637, 646, 647, 656, 657, 666, 667, 676, 677, 686, 687, 696, 697, 706, 707, 726, 727, 736, 737, 746, 747, 756, 757, 766, 767, 776, 777, 786, 787, 796, 797, 1606, 1607, 1626, 1627, 1636, 1637, 1646, 1647, 1656, 1657, 1666, 1667, 1676, 1677, 1686, 1687, 1696, 1697, 1706, 1707, 1726, 1727, 1736, 1737, 1746, 1747, 1756, 1757, 1766, 1767, 1776, 1777, 1786, 1787, 1796, 1797 });
+        }
+
+        [Test]
+        public virtual void TestSpansSkipTo()
+        {
+            SpanTermQuery t1 = new SpanTermQuery(new Term("field", "seventy"));
+            SpanTermQuery t2 = new SpanTermQuery(new Term("field", "seventy"));
+            Spans s1 = MultiSpansWrapper.Wrap(Searcher.TopReaderContext, t1);
+            Spans s2 = MultiSpansWrapper.Wrap(Searcher.TopReaderContext, t2);
+
+            Assert.IsTrue(s1.Next());
+            Assert.IsTrue(s2.Next());
+
+            bool hasMore = true;
+
+            do
+            {
+                hasMore = SkipToAccoringToJavaDocs(s1, s1.Doc + 1);
+                Assert.AreEqual(hasMore, s2.SkipTo(s2.Doc + 1));
+                Assert.AreEqual(s1.Doc, s2.Doc);
+            } while (hasMore);
+        }
+
+        /// <summary>
+        /// Skips to the first match beyond the current, whose document number is
+        /// greater than or equal to <i>target</i>. <p>Returns true iff there is such
+        /// a match.  <p>Behaves as if written: <pre>
+        ///   boolean skipTo(int target) {
+        ///     do {
+        ///       if (!next())
+        ///       return false;
+        ///     } while (target > doc());
+        ///     return true;
+        ///   }
+        /// </pre>
+        /// </summary>
+        private bool SkipToAccoringToJavaDocs(Spans s, int target)
+        {
+            do
+            {
+                if (!s.Next())
+                {
+                    return false;
+                }
+            } while (target > s.Doc);
+            return true;
+        }
+
+        private void CheckHits(Query query, int[] results)
+        {
+            Search.CheckHits.DoCheckHits(Random(), query, "field", Searcher, results, Similarity);
+        }
+    }
+}
\ No newline at end of file


[05/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestFieldCacheRangeFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestFieldCacheRangeFilter.cs b/src/Lucene.Net.Tests/Search/TestFieldCacheRangeFilter.cs
new file mode 100644
index 0000000..63f77d9
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestFieldCacheRangeFilter.cs
@@ -0,0 +1,613 @@
+using Lucene.Net.Documents;
+using NUnit.Framework;
+using System;
+using System.IO;
+
+namespace Lucene.Net.Search
+{
+    /*
+    * Licensed to the Apache Software Foundation (ASF) under one or more
+    * contributor license agreements.  See the NOTICE file distributed with
+    * this work for additional information regarding copyright ownership.
+    * The ASF licenses this file to You under the Apache License, Version 2.0
+    * (the "License"); you may not use this file except in compliance with
+    * the License.  You may obtain a copy of the License at
+    *
+    *     http://www.apache.org/licenses/LICENSE-2.0
+    *
+    * Unless required by applicable law or agreed to in writing, software
+    * distributed under the License is distributed on an "AS IS" BASIS,
+    * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    * See the License for the specific language governing permissions and
+    * limitations under the License.
+    */
+
+    using Directory = Lucene.Net.Store.Directory;
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using IndexWriter = Lucene.Net.Index.IndexWriter;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using Term = Lucene.Net.Index.Term;
+
+    /// <summary>
+    /// A basic 'positive' Unit test class for the FieldCacheRangeFilter class.
+    ///
+    /// <p>
+    /// NOTE: at the moment, this class only tests for 'positive' results,
+    /// it does not verify the results to ensure there are no 'false positives',
+    /// nor does it adequately test 'negative' results.  It also does not test
+    /// that garbage in results in an Exception.
+    /// </summary>
+    [TestFixture]
+    public class TestFieldCacheRangeFilter : BaseTestRangeFilter
+    {
+        /// <summary>
+        /// LUCENENET specific. Ensure we have an infostream attached to the default FieldCache
+        /// when running the tests. In Java, this was done in the Core.Search.TestFieldCache.TestInfoStream() 
+        /// method (which polluted the state of these tests), but we need to make the tests self-contained 
+        /// so they can be run correctly regardless of order. Not setting the InfoStream skips an execution
+        /// path within these tests, so we should do it to make sure we test all of the code.
+        /// </summary>
+        public override void SetUp()
+        {
+            base.SetUp();
+            FieldCache.DEFAULT.InfoStream = new StringWriter();
+        }
+
+        /// <summary>
+        /// LUCENENET specific. See <see cref="SetUp()"/>. Dispose our InfoStream and set it to null
+        /// to avoid polluting the state of other tests.
+        /// </summary>
+        public override void TearDown()
+        {
+            FieldCache.DEFAULT.InfoStream.Dispose();
+            FieldCache.DEFAULT.InfoStream = null;
+            base.TearDown();
+        }
+
+        [Test]
+        public virtual void TestRangeFilterId()
+        {
+            IndexReader reader = SignedIndexReader;
+            IndexSearcher search = NewSearcher(reader);
+
+            int medId = ((MaxId - MinId) / 2);
+
+            string minIP = Pad(MinId);
+            string maxIP = Pad(MaxId);
+            string medIP = Pad(medId);
+
+            int numDocs = reader.NumDocs;
+
+            Assert.AreEqual(numDocs, 1 + MaxId - MinId, "num of docs");
+
+            ScoreDoc[] result;
+            Query q = new TermQuery(new Term("body", "body"));
+
+            // test id, bounded on both ends
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", minIP, maxIP, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "find all");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", minIP, maxIP, T, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "all but last");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", minIP, maxIP, F, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "all but first");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", minIP, maxIP, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 2, result.Length, "all but ends");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", medIP, maxIP, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1 + MaxId - medId, result.Length, "med and up");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", minIP, medIP, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1 + medId - MinId, result.Length, "up to med");
+
+            // unbounded id
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", null, null, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "find all");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", minIP, null, T, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "min and up");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", null, maxIP, F, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "max and down");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", minIP, null, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "not min, but up");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", null, maxIP, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "not max, but down");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", medIP, maxIP, T, F), numDocs).ScoreDocs;
+            Assert.AreEqual(MaxId - medId, result.Length, "med and up, not max");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", minIP, medIP, F, T), numDocs).ScoreDocs;
+            Assert.AreEqual(medId - MinId, result.Length, "not min, up to med");
+
+            // very small sets
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", minIP, minIP, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "min,min,F,F");
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", medIP, medIP, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "med,med,F,F");
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", maxIP, maxIP, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "max,max,F,F");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", minIP, minIP, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "min,min,T,T");
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", null, minIP, F, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "nul,min,F,T");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", maxIP, maxIP, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "max,max,T,T");
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", maxIP, null, T, F), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "max,nul,T,T");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", medIP, medIP, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "med,med,T,T");
+        }
+
+        [Test]
+        public virtual void TestFieldCacheRangeFilterRand()
+        {
+            IndexReader reader = SignedIndexReader;
+            IndexSearcher search = NewSearcher(reader);
+
+            string minRP = Pad(SignedIndexDir.MinR);
+            string maxRP = Pad(SignedIndexDir.MaxR);
+
+            int numDocs = reader.NumDocs;
+
+            Assert.AreEqual(numDocs, 1 + MaxId - MinId, "num of docs");
+
+            ScoreDoc[] result;
+            Query q = new TermQuery(new Term("body", "body"));
+
+            // test extremes, bounded on both ends
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", minRP, maxRP, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "find all");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", minRP, maxRP, T, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "all but biggest");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", minRP, maxRP, F, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "all but smallest");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", minRP, maxRP, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 2, result.Length, "all but extremes");
+
+            // unbounded
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", minRP, null, T, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "smallest and up");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", null, maxRP, F, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "biggest and down");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", minRP, null, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "not smallest, but up");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", null, maxRP, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "not biggest, but down");
+
+            // very small sets
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", minRP, minRP, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "min,min,F,F");
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", maxRP, maxRP, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "max,max,F,F");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", minRP, minRP, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "min,min,T,T");
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", null, minRP, F, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "nul,min,F,T");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", maxRP, maxRP, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "max,max,T,T");
+            result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", maxRP, null, T, F), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "max,nul,T,T");
+        }
+
+        // byte-ranges cannot be tested, because all ranges are too big for bytes, need an extra range for that
+
+        [Test]
+        public virtual void TestFieldCacheRangeFilterShorts()
+        {
+            IndexReader reader = SignedIndexReader;
+            IndexSearcher search = NewSearcher(reader);
+
+            int numDocs = reader.NumDocs;
+            int medId = ((MaxId - MinId) / 2);
+            short? minIdO = Convert.ToInt16((short)MinId);
+            short? maxIdO = Convert.ToInt16((short)MaxId);
+            short? medIdO = Convert.ToInt16((short)medId);
+
+            Assert.AreEqual(numDocs, 1 + MaxId - MinId, "num of docs");
+
+            ScoreDoc[] result;
+            Query q = new TermQuery(new Term("body", "body"));
+
+#pragma warning disable 612, 618
+            // test id, bounded on both ends
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", minIdO, maxIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "find all");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", minIdO, maxIdO, T, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "all but last");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", minIdO, maxIdO, F, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "all but first");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", minIdO, maxIdO, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 2, result.Length, "all but ends");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", medIdO, maxIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1 + MaxId - medId, result.Length, "med and up");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", minIdO, medIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1 + medId - MinId, result.Length, "up to med");
+
+            // unbounded id
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", null, null, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "find all");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", minIdO, null, T, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "min and up");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", null, maxIdO, F, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "max and down");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", minIdO, null, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "not min, but up");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", null, maxIdO, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "not max, but down");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", medIdO, maxIdO, T, F), numDocs).ScoreDocs;
+            Assert.AreEqual(MaxId - medId, result.Length, "med and up, not max");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", minIdO, medIdO, F, T), numDocs).ScoreDocs;
+            Assert.AreEqual(medId - MinId, result.Length, "not min, up to med");
+
+            // very small sets
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", minIdO, minIdO, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "min,min,F,F");
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", medIdO, medIdO, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "med,med,F,F");
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", maxIdO, maxIdO, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "max,max,F,F");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", minIdO, minIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "min,min,T,T");
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", null, minIdO, F, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "nul,min,F,T");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", maxIdO, maxIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "max,max,T,T");
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", maxIdO, null, T, F), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "max,nul,T,T");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", medIdO, medIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "med,med,T,T");
+
+            // special cases
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", Convert.ToInt16(short.MaxValue), null, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "overflow special case");
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", null, Convert.ToInt16(short.MinValue), F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "overflow special case");
+            result = search.Search(q, FieldCacheRangeFilter.NewInt16Range("id", maxIdO, minIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "inverse range");
+#pragma warning restore 612, 618
+        }
+
+        [Test]
+        public virtual void TestFieldCacheRangeFilterInts()
+        {
+            IndexReader reader = SignedIndexReader;
+            IndexSearcher search = NewSearcher(reader);
+
+            int numDocs = reader.NumDocs;
+            int medId = ((MaxId - MinId) / 2);
+            int? minIdO = Convert.ToInt32(MinId);
+            int? maxIdO = Convert.ToInt32(MaxId);
+            int? medIdO = Convert.ToInt32(medId);
+
+            Assert.AreEqual(numDocs, 1 + MaxId - MinId, "num of docs");
+
+            ScoreDoc[] result;
+            Query q = new TermQuery(new Term("body", "body"));
+
+            // test id, bounded on both ends
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", minIdO, maxIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "find all");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", minIdO, maxIdO, T, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "all but last");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", minIdO, maxIdO, F, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "all but first");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", minIdO, maxIdO, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 2, result.Length, "all but ends");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", medIdO, maxIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1 + MaxId - medId, result.Length, "med and up");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", minIdO, medIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1 + medId - MinId, result.Length, "up to med");
+
+            // unbounded id
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", null, null, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "find all");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", minIdO, null, T, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "min and up");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", null, maxIdO, F, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "max and down");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", minIdO, null, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "not min, but up");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", null, maxIdO, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "not max, but down");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", medIdO, maxIdO, T, F), numDocs).ScoreDocs;
+            Assert.AreEqual(MaxId - medId, result.Length, "med and up, not max");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", minIdO, medIdO, F, T), numDocs).ScoreDocs;
+            Assert.AreEqual(medId - MinId, result.Length, "not min, up to med");
+
+            // very small sets
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", minIdO, minIdO, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "min,min,F,F");
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", medIdO, medIdO, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "med,med,F,F");
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", maxIdO, maxIdO, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "max,max,F,F");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", minIdO, minIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "min,min,T,T");
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", null, minIdO, F, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "nul,min,F,T");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", maxIdO, maxIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "max,max,T,T");
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", maxIdO, null, T, F), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "max,nul,T,T");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", medIdO, medIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "med,med,T,T");
+
+            // special cases
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", Convert.ToInt32(int.MaxValue), null, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "overflow special case");
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", null, Convert.ToInt32(int.MinValue), F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "overflow special case");
+            result = search.Search(q, FieldCacheRangeFilter.NewInt32Range("id", maxIdO, minIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "inverse range");
+        }
+
+        [Test]
+        public virtual void TestFieldCacheRangeFilterLongs()
+        {
+            IndexReader reader = SignedIndexReader;
+            IndexSearcher search = NewSearcher(reader);
+
+            int numDocs = reader.NumDocs;
+            int medId = ((MaxId - MinId) / 2);
+            long? minIdO = Convert.ToInt64(MinId);
+            long? maxIdO = Convert.ToInt64(MaxId);
+            long? medIdO = Convert.ToInt64(medId);
+
+            Assert.AreEqual(numDocs, 1 + MaxId - MinId, "num of docs");
+
+            ScoreDoc[] result;
+            Query q = new TermQuery(new Term("body", "body"));
+
+            // test id, bounded on both ends
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", minIdO, maxIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "find all");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", minIdO, maxIdO, T, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "all but last");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", minIdO, maxIdO, F, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "all but first");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", minIdO, maxIdO, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 2, result.Length, "all but ends");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", medIdO, maxIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1 + MaxId - medId, result.Length, "med and up");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", minIdO, medIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1 + medId - MinId, result.Length, "up to med");
+
+            // unbounded id
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", null, null, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "find all");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", minIdO, null, T, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "min and up");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", null, maxIdO, F, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "max and down");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", minIdO, null, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "not min, but up");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", null, maxIdO, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs - 1, result.Length, "not max, but down");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", medIdO, maxIdO, T, F), numDocs).ScoreDocs;
+            Assert.AreEqual(MaxId - medId, result.Length, "med and up, not max");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", minIdO, medIdO, F, T), numDocs).ScoreDocs;
+            Assert.AreEqual(medId - MinId, result.Length, "not min, up to med");
+
+            // very small sets
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", minIdO, minIdO, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "min,min,F,F");
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", medIdO, medIdO, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "med,med,F,F");
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", maxIdO, maxIdO, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "max,max,F,F");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", minIdO, minIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "min,min,T,T");
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", null, minIdO, F, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "nul,min,F,T");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", maxIdO, maxIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "max,max,T,T");
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", maxIdO, null, T, F), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "max,nul,T,T");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", medIdO, medIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(1, result.Length, "med,med,T,T");
+
+            // special cases
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", Convert.ToInt64(long.MaxValue), null, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "overflow special case");
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", null, Convert.ToInt64(long.MinValue), F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "overflow special case");
+            result = search.Search(q, FieldCacheRangeFilter.NewInt64Range("id", maxIdO, minIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "inverse range");
+        }
+
+        // float and double tests are a bit minimalistic, but its complicated, because missing precision
+
+        [Test]
+        public virtual void TestFieldCacheRangeFilterFloats()
+        {
+            IndexReader reader = SignedIndexReader;
+            IndexSearcher search = NewSearcher(reader);
+
+            int numDocs = reader.NumDocs;
+            float? minIdO = Convert.ToSingle(MinId + .5f);
+            float? medIdO = Convert.ToSingle((float)minIdO + ((MaxId - MinId)) / 2.0f);
+
+            ScoreDoc[] result;
+            Query q = new TermQuery(new Term("body", "body"));
+
+            result = search.Search(q, FieldCacheRangeFilter.NewSingleRange("id", minIdO, medIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs / 2, result.Length, "find all");
+            int count = 0;
+            result = search.Search(q, FieldCacheRangeFilter.NewSingleRange("id", null, medIdO, F, T), numDocs).ScoreDocs;
+            count += result.Length;
+            result = search.Search(q, FieldCacheRangeFilter.NewSingleRange("id", medIdO, null, F, F), numDocs).ScoreDocs;
+            count += result.Length;
+            Assert.AreEqual(numDocs, count, "sum of two concenatted ranges");
+            result = search.Search(q, FieldCacheRangeFilter.NewSingleRange("id", null, null, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "find all");
+            result = search.Search(q, FieldCacheRangeFilter.NewSingleRange("id", Convert.ToSingle(float.PositiveInfinity), null, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "infinity special case");
+            result = search.Search(q, FieldCacheRangeFilter.NewSingleRange("id", null, Convert.ToSingle(float.NegativeInfinity), F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "infinity special case");
+        }
+
+        [Test]
+        public virtual void TestFieldCacheRangeFilterDoubles()
+        {
+            IndexReader reader = SignedIndexReader;
+            IndexSearcher search = NewSearcher(reader);
+
+            int numDocs = reader.NumDocs;
+            double? minIdO = Convert.ToDouble(MinId + .5);
+            double? medIdO = Convert.ToDouble((float)minIdO + ((MaxId - MinId)) / 2.0);
+
+            ScoreDoc[] result;
+            Query q = new TermQuery(new Term("body", "body"));
+
+            result = search.Search(q, FieldCacheRangeFilter.NewDoubleRange("id", minIdO, medIdO, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs / 2, result.Length, "find all");
+            int count = 0;
+            result = search.Search(q, FieldCacheRangeFilter.NewDoubleRange("id", null, medIdO, F, T), numDocs).ScoreDocs;
+            count += result.Length;
+            result = search.Search(q, FieldCacheRangeFilter.NewDoubleRange("id", medIdO, null, F, F), numDocs).ScoreDocs;
+            count += result.Length;
+            Assert.AreEqual(numDocs, count, "sum of two concenatted ranges");
+            result = search.Search(q, FieldCacheRangeFilter.NewDoubleRange("id", null, null, T, T), numDocs).ScoreDocs;
+            Assert.AreEqual(numDocs, result.Length, "find all");
+            result = search.Search(q, FieldCacheRangeFilter.NewDoubleRange("id", Convert.ToDouble(double.PositiveInfinity), null, F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "infinity special case");
+            result = search.Search(q, FieldCacheRangeFilter.NewDoubleRange("id", null, Convert.ToDouble(double.NegativeInfinity), F, F), numDocs).ScoreDocs;
+            Assert.AreEqual(0, result.Length, "infinity special case");
+        }
+
+        // test using a sparse index (with deleted docs).
+        [Test]
+        public virtual void TestSparseIndex()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+
+            for (int d = -20; d <= 20; d++)
+            {
+                Document doc = new Document();
+                doc.Add(NewStringField("id", Convert.ToString(d), Field.Store.NO));
+                doc.Add(NewStringField("body", "body", Field.Store.NO));
+                writer.AddDocument(doc);
+            }
+
+            writer.ForceMerge(1);
+            writer.DeleteDocuments(new Term("id", "0"));
+            writer.Dispose();
+
+            IndexReader reader = DirectoryReader.Open(dir);
+            IndexSearcher search = NewSearcher(reader);
+            Assert.IsTrue(reader.HasDeletions);
+
+            ScoreDoc[] result;
+            Query q = new TermQuery(new Term("body", "body"));
+
+#pragma warning disable 612, 618
+            result = search.Search(q, FieldCacheRangeFilter.NewByteRange("id", (sbyte?)-20, (sbyte?)20, T, T), 100).ScoreDocs;
+            Assert.AreEqual(40, result.Length, "find all");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewByteRange("id", (sbyte?)0, (sbyte?)20, T, T), 100).ScoreDocs;
+            Assert.AreEqual(20, result.Length, "find all");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewByteRange("id", (sbyte?)-20, (sbyte?)0, T, T), 100).ScoreDocs;
+            Assert.AreEqual(20, result.Length, "find all");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewByteRange("id", (sbyte?)10, (sbyte?)20, T, T), 100).ScoreDocs;
+            Assert.AreEqual(11, result.Length, "find all");
+
+            result = search.Search(q, FieldCacheRangeFilter.NewByteRange("id", (sbyte?)-20, (sbyte?)-10, T, T), 100).ScoreDocs;
+            Assert.AreEqual(11, result.Length, "find all");
+#pragma warning restore 612, 618
+
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+
+        #region SorterTestBase
+        // LUCENENET NOTE: Tests in a base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestPad()
+        {
+            base.TestPad();
+        }
+
+        #endregion
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestFieldCacheRewriteMethod.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestFieldCacheRewriteMethod.cs b/src/Lucene.Net.Tests/Search/TestFieldCacheRewriteMethod.cs
new file mode 100644
index 0000000..ee87c43
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestFieldCacheRewriteMethod.cs
@@ -0,0 +1,86 @@
+namespace Lucene.Net.Search
+{
+    using Lucene.Net.Attributes;
+    using NUnit.Framework;
+    using RegExp = Lucene.Net.Util.Automaton.RegExp;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Term = Lucene.Net.Index.Term;
+
+    /// <summary>
+    /// Tests the FieldcacheRewriteMethod with random regular expressions
+    /// </summary>
+    [TestFixture]
+    public class TestFieldCacheRewriteMethod : TestRegexpRandom2
+    {
+        /// <summary>
+        /// Test fieldcache rewrite against filter rewrite </summary>
+        protected internal override void AssertSame(string regexp)
+        {
+            RegexpQuery fieldCache = new RegexpQuery(new Term(FieldName, regexp), RegExp.NONE);
+            fieldCache.MultiTermRewriteMethod = (new FieldCacheRewriteMethod());
+
+            RegexpQuery filter = new RegexpQuery(new Term(FieldName, regexp), RegExp.NONE);
+            filter.MultiTermRewriteMethod = (MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE);
+
+            TopDocs fieldCacheDocs = Searcher1.Search(fieldCache, 25);
+            TopDocs filterDocs = Searcher2.Search(filter, 25);
+
+            CheckHits.CheckEqual(fieldCache, fieldCacheDocs.ScoreDocs, filterDocs.ScoreDocs);
+        }
+
+        [Test]
+        public virtual void TestEquals()
+        {
+            RegexpQuery a1 = new RegexpQuery(new Term(FieldName, "[aA]"), RegExp.NONE);
+            RegexpQuery a2 = new RegexpQuery(new Term(FieldName, "[aA]"), RegExp.NONE);
+            RegexpQuery b = new RegexpQuery(new Term(FieldName, "[bB]"), RegExp.NONE);
+            Assert.AreEqual(a1, a2);
+            Assert.IsFalse(a1.Equals(b));
+
+            a1.MultiTermRewriteMethod = (new FieldCacheRewriteMethod());
+            a2.MultiTermRewriteMethod = (new FieldCacheRewriteMethod());
+            b.MultiTermRewriteMethod = (new FieldCacheRewriteMethod());
+            Assert.AreEqual(a1, a2);
+            Assert.IsFalse(a1.Equals(b));
+            QueryUtils.Check(a1);
+        }
+
+
+
+        #region TestSnapshotDeletionPolicy
+        // LUCENENET NOTE: Tests in a base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        /// <summary>
+        /// test a bunch of random regular expressions </summary>
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(60000)]
+#endif
+        [Test, HasTimeout]
+        public override void TestRegexps()
+        {
+            base.TestRegexps();
+        }
+
+        #endregion
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestFieldCacheTermsFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestFieldCacheTermsFilter.cs b/src/Lucene.Net.Tests/Search/TestFieldCacheTermsFilter.cs
new file mode 100644
index 0000000..a7d231f
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestFieldCacheTermsFilter.cs
@@ -0,0 +1,80 @@
+using System.Collections.Generic;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using NUnit.Framework;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+
+    /// <summary>
+    /// A basic unit test for FieldCacheTermsFilter
+    /// </summary>
+    /// <seealso cref= Lucene.Net.Search.FieldCacheTermsFilter </seealso>
+    [TestFixture]
+    public class TestFieldCacheTermsFilter : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestMissingTerms()
+        {
+            string fieldName = "field1";
+            Directory rd = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), rd, Similarity, TimeZone);
+            for (int i = 0; i < 100; i++)
+            {
+                Document doc = new Document();
+                int term = i * 10; //terms are units of 10;
+                doc.Add(NewStringField(fieldName, "" + term, Field.Store.YES));
+                w.AddDocument(doc);
+            }
+            IndexReader reader = w.Reader;
+            w.Dispose();
+
+            IndexSearcher searcher = NewSearcher(reader);
+            int numDocs = reader.NumDocs;
+            ScoreDoc[] results;
+            MatchAllDocsQuery q = new MatchAllDocsQuery();
+
+            List<string> terms = new List<string>();
+            terms.Add("5");
+            results = searcher.Search(q, new FieldCacheTermsFilter(fieldName, terms.ToArray()), numDocs).ScoreDocs;
+            Assert.AreEqual(0, results.Length, "Must match nothing");
+
+            terms = new List<string>();
+            terms.Add("10");
+            results = searcher.Search(q, new FieldCacheTermsFilter(fieldName, terms.ToArray()), numDocs).ScoreDocs;
+            Assert.AreEqual(1, results.Length, "Must match 1");
+
+            terms = new List<string>();
+            terms.Add("10");
+            terms.Add("20");
+            results = searcher.Search(q, new FieldCacheTermsFilter(fieldName, terms.ToArray()), numDocs).ScoreDocs;
+            Assert.AreEqual(2, results.Length, "Must match 2");
+
+            reader.Dispose();
+            rd.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestFieldValueFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestFieldValueFilter.cs b/src/Lucene.Net.Tests/Search/TestFieldValueFilter.cs
new file mode 100644
index 0000000..4930c24
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestFieldValueFilter.cs
@@ -0,0 +1,127 @@
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using Lucene.Net.Randomized.Generators;
+    using NUnit.Framework;
+    using Directory = Lucene.Net.Store.Directory;
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Term = Lucene.Net.Index.Term;
+
+    ///
+    [TestFixture]
+    public class TestFieldValueFilter : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestFieldValueFilterNoValue()
+        {
+            Directory directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            int docs = AtLeast(10);
+            int[] docStates = BuildIndex(writer, docs);
+            int numDocsNoValue = 0;
+            for (int i = 0; i < docStates.Length; i++)
+            {
+                if (docStates[i] == 0)
+                {
+                    numDocsNoValue++;
+                }
+            }
+
+            IndexReader reader = DirectoryReader.Open(directory);
+            IndexSearcher searcher = NewSearcher(reader);
+            TopDocs search = searcher.Search(new TermQuery(new Term("all", "test")), new FieldValueFilter("some", true), docs);
+            Assert.AreEqual(search.TotalHits, numDocsNoValue);
+
+            ScoreDoc[] scoreDocs = search.ScoreDocs;
+            foreach (ScoreDoc scoreDoc in scoreDocs)
+            {
+                Assert.IsNull(reader.Document(scoreDoc.Doc).Get("some"));
+            }
+
+            reader.Dispose();
+            directory.Dispose();
+        }
+
+        [Test]
+        public virtual void TestFieldValueFilter_Mem()
+        {
+            Directory directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            int docs = AtLeast(10);
+            int[] docStates = BuildIndex(writer, docs);
+            int numDocsWithValue = 0;
+            for (int i = 0; i < docStates.Length; i++)
+            {
+                if (docStates[i] == 1)
+                {
+                    numDocsWithValue++;
+                }
+            }
+            IndexReader reader = DirectoryReader.Open(directory);
+            IndexSearcher searcher = NewSearcher(reader);
+            TopDocs search = searcher.Search(new TermQuery(new Term("all", "test")), new FieldValueFilter("some"), docs);
+            Assert.AreEqual(search.TotalHits, numDocsWithValue);
+
+            ScoreDoc[] scoreDocs = search.ScoreDocs;
+            foreach (ScoreDoc scoreDoc in scoreDocs)
+            {
+                Assert.AreEqual("value", reader.Document(scoreDoc.Doc).Get("some"));
+            }
+
+            reader.Dispose();
+            directory.Dispose();
+        }
+
+        private int[] BuildIndex(RandomIndexWriter writer, int docs)
+        {
+            int[] docStates = new int[docs];
+            for (int i = 0; i < docs; i++)
+            {
+                Document doc = new Document();
+                if (Random().NextBoolean())
+                {
+                    docStates[i] = 1;
+                    doc.Add(NewTextField("some", "value", Field.Store.YES));
+                }
+                doc.Add(NewTextField("all", "test", Field.Store.NO));
+                doc.Add(NewTextField("id", "" + i, Field.Store.YES));
+                writer.AddDocument(doc);
+            }
+            writer.Commit();
+            int numDeletes = Random().Next(docs);
+            for (int i = 0; i < numDeletes; i++)
+            {
+                int docID = Random().Next(docs);
+                writer.DeleteDocuments(new Term("id", "" + docID));
+                docStates[docID] = 2;
+            }
+            writer.Dispose();
+            return docStates;
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestFilteredQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestFilteredQuery.cs b/src/Lucene.Net.Tests/Search/TestFilteredQuery.cs
new file mode 100644
index 0000000..9847b9a
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestFilteredQuery.cs
@@ -0,0 +1,719 @@
+using Lucene.Net.Documents;
+using Lucene.Net.Support;
+using Lucene.Net.Util;
+using System;
+using System.Collections;
+
+namespace Lucene.Net.Search
+{
+    using Lucene.Net.Randomized.Generators;
+    using NUnit.Framework;
+    using System.Reflection;
+    using AtomicReader = Lucene.Net.Index.AtomicReader;
+    using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
+    using IBits = Lucene.Net.Util.IBits;
+    using Directory = Lucene.Net.Store.Directory;
+    using DocIdBitSet = Lucene.Net.Util.DocIdBitSet;
+    using DocsEnum = Lucene.Net.Index.DocsEnum;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FilterStrategy = Lucene.Net.Search.FilteredQuery.FilterStrategy;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using IOUtils = Lucene.Net.Util.IOUtils;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Term = Lucene.Net.Index.Term;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    /// <summary>
+    /// FilteredQuery JUnit tests.
+    ///
+    /// <p>Created: Apr 21, 2004 1:21:46 PM
+    ///
+    ///
+    /// @since   1.4
+    /// </summary>
+    [TestFixture]
+    public class TestFilteredQuery : LuceneTestCase
+    {
+        private IndexSearcher Searcher;
+        private IndexReader Reader;
+        private Directory Directory;
+        private Query Query;
+        private Filter Filter;
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), Directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()));
+
+            Document doc = new Document();
+            doc.Add(NewTextField("field", "one two three four five", Field.Store.YES));
+            doc.Add(NewTextField("sorter", "b", Field.Store.YES));
+            writer.AddDocument(doc);
+
+            doc = new Document();
+            doc.Add(NewTextField("field", "one two three four", Field.Store.YES));
+            doc.Add(NewTextField("sorter", "d", Field.Store.YES));
+            writer.AddDocument(doc);
+
+            doc = new Document();
+            doc.Add(NewTextField("field", "one two three y", Field.Store.YES));
+            doc.Add(NewTextField("sorter", "a", Field.Store.YES));
+            writer.AddDocument(doc);
+
+            doc = new Document();
+            doc.Add(NewTextField("field", "one two x", Field.Store.YES));
+            doc.Add(NewTextField("sorter", "c", Field.Store.YES));
+            writer.AddDocument(doc);
+
+            // tests here require single segment (eg try seed
+            // 8239472272678419952L), because SingleDocTestFilter(x)
+            // blindly accepts that docID in any sub-segment
+            writer.ForceMerge(1);
+
+            Reader = writer.Reader;
+            writer.Dispose();
+
+            Searcher = NewSearcher(Reader);
+
+            Query = new TermQuery(new Term("field", "three"));
+            Filter = NewStaticFilterB();
+        }
+
+        // must be static for serialization tests
+        private static Filter NewStaticFilterB()
+        {
+            return new FilterAnonymousInnerClassHelper();
+        }
+
+        private class FilterAnonymousInnerClassHelper : Filter
+        {
+            public FilterAnonymousInnerClassHelper()
+            {
+            }
+
+            public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs)
+            {
+                if (acceptDocs == null)
+                {
+                    acceptDocs = new Bits.MatchAllBits(5);
+                }
+                BitArray bitset = new BitArray(5);
+                if (acceptDocs.Get(1))
+                {
+                    bitset.SafeSet(1, true);
+                }
+                if (acceptDocs.Get(3))
+                {
+                    bitset.SafeSet(3, true);
+                }
+                return new DocIdBitSet(bitset);
+            }
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            Reader.Dispose();
+            Directory.Dispose();
+            base.TearDown();
+        }
+
+        [Test]
+        public virtual void TestFilteredQuery_Mem()
+        {
+            // force the filter to be executed as bits
+            TFilteredQuery(true);
+            // force the filter to be executed as iterator
+            TFilteredQuery(false);
+        }
+
+        private void TFilteredQuery(bool useRandomAccess)
+        {
+            Query filteredquery = new FilteredQuery(Query, Filter, RandomFilterStrategy(Random(), useRandomAccess));
+            ScoreDoc[] hits = Searcher.Search(filteredquery, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+            Assert.AreEqual(1, hits[0].Doc);
+            QueryUtils.Check(Random(), filteredquery, Searcher, Similarity);
+
+            hits = Searcher.Search(filteredquery, null, 1000, new Sort(new SortField("sorter", SortFieldType.STRING))).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+            Assert.AreEqual(1, hits[0].Doc);
+
+            filteredquery = new FilteredQuery(new TermQuery(new Term("field", "one")), Filter, RandomFilterStrategy(Random(), useRandomAccess));
+            hits = Searcher.Search(filteredquery, null, 1000).ScoreDocs;
+            Assert.AreEqual(2, hits.Length);
+            QueryUtils.Check(Random(), filteredquery, Searcher, Similarity);
+
+            filteredquery = new FilteredQuery(new MatchAllDocsQuery(), Filter, RandomFilterStrategy(Random(), useRandomAccess));
+            hits = Searcher.Search(filteredquery, null, 1000).ScoreDocs;
+            Assert.AreEqual(2, hits.Length);
+            QueryUtils.Check(Random(), filteredquery, Searcher, Similarity);
+
+            filteredquery = new FilteredQuery(new TermQuery(new Term("field", "x")), Filter, RandomFilterStrategy(Random(), useRandomAccess));
+            hits = Searcher.Search(filteredquery, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+            Assert.AreEqual(3, hits[0].Doc);
+            QueryUtils.Check(Random(), filteredquery, Searcher, Similarity);
+
+            filteredquery = new FilteredQuery(new TermQuery(new Term("field", "y")), Filter, RandomFilterStrategy(Random(), useRandomAccess));
+            hits = Searcher.Search(filteredquery, null, 1000).ScoreDocs;
+            Assert.AreEqual(0, hits.Length);
+            QueryUtils.Check(Random(), filteredquery, Searcher, Similarity);
+
+            // test boost
+            Filter f = NewStaticFilterA();
+
+            float boost = 2.5f;
+            BooleanQuery bq1 = new BooleanQuery();
+            TermQuery tq = new TermQuery(new Term("field", "one"));
+            tq.Boost = boost;
+            bq1.Add(tq, Occur.MUST);
+            bq1.Add(new TermQuery(new Term("field", "five")), Occur.MUST);
+
+            BooleanQuery bq2 = new BooleanQuery();
+            tq = new TermQuery(new Term("field", "one"));
+            filteredquery = new FilteredQuery(tq, f, RandomFilterStrategy(Random(), useRandomAccess));
+            filteredquery.Boost = boost;
+            bq2.Add(filteredquery, Occur.MUST);
+            bq2.Add(new TermQuery(new Term("field", "five")), Occur.MUST);
+            AssertScoreEquals(bq1, bq2);
+
+            Assert.AreEqual(boost, filteredquery.Boost, 0);
+            Assert.AreEqual(1.0f, tq.Boost, 0); // the boost value of the underlying query shouldn't have changed
+        }
+
+        // must be static for serialization tests
+        private static Filter NewStaticFilterA()
+        {
+            return new FilterAnonymousInnerClassHelper2();
+        }
+
+        private class FilterAnonymousInnerClassHelper2 : Filter
+        {
+            public FilterAnonymousInnerClassHelper2()
+            {
+            }
+
+            public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs)
+            {
+                Assert.IsNull(acceptDocs, "acceptDocs should be null, as we have an index without deletions");
+                BitArray bitset = new BitArray(5, true);
+                return new DocIdBitSet(bitset);
+            }
+        }
+
+        /// <summary>
+        /// Tests whether the scores of the two queries are the same.
+        /// </summary>
+        public virtual void AssertScoreEquals(Query q1, Query q2)
+        {
+            ScoreDoc[] hits1 = Searcher.Search(q1, null, 1000).ScoreDocs;
+            ScoreDoc[] hits2 = Searcher.Search(q2, null, 1000).ScoreDocs;
+
+            Assert.AreEqual(hits1.Length, hits2.Length);
+
+            for (int i = 0; i < hits1.Length; i++)
+            {
+                Assert.AreEqual(hits1[i].Score, hits2[i].Score, 0.000001f);
+            }
+        }
+
+        /// <summary>
+        /// this tests FilteredQuery's rewrite correctness
+        /// </summary>
+        [Test]
+        public virtual void TestRangeQuery()
+        {
+            // force the filter to be executed as bits
+            TRangeQuery(true);
+            TRangeQuery(false);
+        }
+
+        private void TRangeQuery(bool useRandomAccess)
+        {
+            TermRangeQuery rq = TermRangeQuery.NewStringRange("sorter", "b", "d", true, true);
+
+            Query filteredquery = new FilteredQuery(rq, Filter, RandomFilterStrategy(Random(), useRandomAccess));
+            ScoreDoc[] hits = Searcher.Search(filteredquery, null, 1000).ScoreDocs;
+            Assert.AreEqual(2, hits.Length);
+            QueryUtils.Check(Random(), filteredquery, Searcher, Similarity);
+        }
+
+        [Test]
+        public virtual void TestBooleanMUST()
+        {
+            // force the filter to be executed as bits
+            TBooleanMUST(true);
+            // force the filter to be executed as iterator
+            TBooleanMUST(false);
+        }
+
+        private void TBooleanMUST(bool useRandomAccess)
+        {
+            BooleanQuery bq = new BooleanQuery();
+            Query query = new FilteredQuery(new TermQuery(new Term("field", "one")), new SingleDocTestFilter(0), RandomFilterStrategy(Random(), useRandomAccess));
+            bq.Add(query, Occur.MUST);
+            query = new FilteredQuery(new TermQuery(new Term("field", "one")), new SingleDocTestFilter(1), RandomFilterStrategy(Random(), useRandomAccess));
+            bq.Add(query, Occur.MUST);
+            ScoreDoc[] hits = Searcher.Search(bq, null, 1000).ScoreDocs;
+            Assert.AreEqual(0, hits.Length);
+            QueryUtils.Check(Random(), query, Searcher, Similarity);
+        }
+
+        [Test]
+        public virtual void TestBooleanSHOULD()
+        {
+            // force the filter to be executed as bits
+            TBooleanSHOULD(true);
+            // force the filter to be executed as iterator
+            TBooleanSHOULD(false);
+        }
+
+        private void TBooleanSHOULD(bool useRandomAccess)
+        {
+            BooleanQuery bq = new BooleanQuery();
+            Query query = new FilteredQuery(new TermQuery(new Term("field", "one")), new SingleDocTestFilter(0), RandomFilterStrategy(Random(), useRandomAccess));
+            bq.Add(query, Occur.SHOULD);
+            query = new FilteredQuery(new TermQuery(new Term("field", "one")), new SingleDocTestFilter(1), RandomFilterStrategy(Random(), useRandomAccess));
+            bq.Add(query, Occur.SHOULD);
+            ScoreDoc[] hits = Searcher.Search(bq, null, 1000).ScoreDocs;
+            Assert.AreEqual(2, hits.Length);
+            QueryUtils.Check(Random(), query, Searcher, Similarity);
+        }
+
+        // Make sure BooleanQuery, which does out-of-order
+        // scoring, inside FilteredQuery, works
+        [Test]
+        public virtual void TestBoolean2()
+        {
+            // force the filter to be executed as bits
+            TBoolean2(true);
+            // force the filter to be executed as iterator
+            TBoolean2(false);
+        }
+
+        private void TBoolean2(bool useRandomAccess)
+        {
+            BooleanQuery bq = new BooleanQuery();
+            Query query = new FilteredQuery(bq, new SingleDocTestFilter(0), RandomFilterStrategy(Random(), useRandomAccess));
+            bq.Add(new TermQuery(new Term("field", "one")), Occur.SHOULD);
+            bq.Add(new TermQuery(new Term("field", "two")), Occur.SHOULD);
+            ScoreDoc[] hits = Searcher.Search(query, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+            QueryUtils.Check(Random(), query, Searcher, Similarity);
+        }
+
+        [Test]
+        public virtual void TestChainedFilters()
+        {
+            // force the filter to be executed as bits
+            TChainedFilters(true);
+            // force the filter to be executed as iterator
+            TChainedFilters(false);
+        }
+
+        private void TChainedFilters(bool useRandomAccess)
+        {
+            Query query = new FilteredQuery(new FilteredQuery(new MatchAllDocsQuery(), new CachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("field", "three")))), RandomFilterStrategy(Random(), useRandomAccess)), new CachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("field", "four")))), RandomFilterStrategy(Random(), useRandomAccess));
+            ScoreDoc[] hits = Searcher.Search(query, 10).ScoreDocs;
+            Assert.AreEqual(2, hits.Length);
+            QueryUtils.Check(Random(), query, Searcher, Similarity);
+
+            // one more:
+            query = new FilteredQuery(query, new CachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("field", "five")))), RandomFilterStrategy(Random(), useRandomAccess));
+            hits = Searcher.Search(query, 10).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+            QueryUtils.Check(Random(), query, Searcher, Similarity);
+        }
+
+        [Test]
+        public virtual void TestEqualsHashcode()
+        {
+            // some tests before, if the used queries and filters work:
+            Assert.AreEqual(new PrefixFilter(new Term("field", "o")), new PrefixFilter(new Term("field", "o")));
+            Assert.IsFalse((new PrefixFilter(new Term("field", "a"))).Equals(new PrefixFilter(new Term("field", "o"))));
+            QueryUtils.CheckHashEquals(new TermQuery(new Term("field", "one")));
+            QueryUtils.CheckUnequal(new TermQuery(new Term("field", "one")), new TermQuery(new Term("field", "two"))
+           );
+            // now test FilteredQuery equals/hashcode:
+            QueryUtils.CheckHashEquals(new FilteredQuery(new TermQuery(new Term("field", "one")), new PrefixFilter(new Term("field", "o"))));
+            QueryUtils.CheckUnequal(new FilteredQuery(new TermQuery(new Term("field", "one")), new PrefixFilter(new Term("field", "o"))), new FilteredQuery(new TermQuery(new Term("field", "two")), new PrefixFilter(new Term("field", "o")))
+           );
+            QueryUtils.CheckUnequal(new FilteredQuery(new TermQuery(new Term("field", "one")), new PrefixFilter(new Term("field", "a"))), new FilteredQuery(new TermQuery(new Term("field", "one")), new PrefixFilter(new Term("field", "o")))
+           );
+        }
+
+        [Test]
+        public virtual void TestInvalidArguments()
+        {
+            try
+            {
+                new FilteredQuery(null, null);
+                Assert.Fail("Should throw IllegalArgumentException");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // pass
+            }
+            try
+            {
+                new FilteredQuery(new TermQuery(new Term("field", "one")), null);
+                Assert.Fail("Should throw IllegalArgumentException");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // pass
+            }
+            try
+            {
+                new FilteredQuery(null, new PrefixFilter(new Term("field", "o")));
+                Assert.Fail("Should throw IllegalArgumentException");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // pass
+            }
+        }
+
+        private FilterStrategy RandomFilterStrategy()
+        {
+            return RandomFilterStrategy(Random(), true);
+        }
+
+        private void AssertRewrite(FilteredQuery fq, Type clazz)
+        {
+            // assign crazy boost to FQ
+            float boost = (float)Random().NextDouble() * 100.0f;
+            fq.Boost = boost;
+
+            // assign crazy boost to inner
+            float innerBoost = (float)Random().NextDouble() * 100.0f;
+            fq.Query.Boost = innerBoost;
+
+            // check the class and boosts of rewritten query
+            Query rewritten = Searcher.Rewrite(fq);
+            Assert.IsTrue(clazz.IsInstanceOfType(rewritten), "is not instance of " + clazz.Name);
+            if (rewritten is FilteredQuery)
+            {
+                Assert.AreEqual(boost, rewritten.Boost, 1E-5f);
+                Assert.AreEqual(innerBoost, ((FilteredQuery)rewritten).Query.Boost, 1E-5f);
+                Assert.AreEqual(fq.Strategy, ((FilteredQuery)rewritten).Strategy);
+            }
+            else
+            {
+                Assert.AreEqual(boost * innerBoost, rewritten.Boost, 1E-5f);
+            }
+
+            // check that the original query was not modified
+            Assert.AreEqual(boost, fq.Boost, 1E-5f);
+            Assert.AreEqual(innerBoost, fq.Query.Boost, 1E-5f);
+        }
+
+        [Test]
+        public virtual void TestRewrite()
+        {
+            AssertRewrite(new FilteredQuery(new TermQuery(new Term("field", "one")), new PrefixFilter(new Term("field", "o")), RandomFilterStrategy()), typeof(FilteredQuery));
+            AssertRewrite(new FilteredQuery(new PrefixQuery(new Term("field", "one")), new PrefixFilter(new Term("field", "o")), RandomFilterStrategy()), typeof(FilteredQuery));
+        }
+
+        [Test]
+        public virtual void TestGetFilterStrategy()
+        {
+            FilterStrategy randomFilterStrategy = RandomFilterStrategy();
+            FilteredQuery filteredQuery = new FilteredQuery(new TermQuery(new Term("field", "one")), new PrefixFilter(new Term("field", "o")), randomFilterStrategy);
+            Assert.AreSame(randomFilterStrategy, filteredQuery.Strategy);
+        }
+
+        private static FilteredQuery.FilterStrategy RandomFilterStrategy(Random random, bool useRandomAccess)
+        {
+            if (useRandomAccess)
+            {
+                return new RandomAccessFilterStrategyAnonymousInnerClassHelper();
+            }
+            return TestUtil.RandomFilterStrategy(random);
+        }
+
+        private class RandomAccessFilterStrategyAnonymousInnerClassHelper : FilteredQuery.RandomAccessFilterStrategy
+        {
+            public RandomAccessFilterStrategyAnonymousInnerClassHelper()
+            {
+            }
+
+            protected override bool UseRandomAccess(IBits bits, int firstFilterDoc)
+            {
+                return true;
+            }
+        }
+
+        /*
+         * Test if the QueryFirst strategy calls the bits only if the document has
+         * been matched by the query and not otherwise
+         */
+
+        [Test]
+        public virtual void TestQueryFirstFilterStrategy()
+        {
+            Directory directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            int numDocs = AtLeast(50);
+            int totalDocsWithZero = 0;
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document doc = new Document();
+                int num = Random().Next(5);
+                if (num == 0)
+                {
+                    totalDocsWithZero++;
+                }
+                doc.Add(NewTextField("field", "" + num, Field.Store.YES));
+                writer.AddDocument(doc);
+            }
+            IndexReader reader = writer.Reader;
+            writer.Dispose();
+
+            IndexSearcher searcher = NewSearcher(reader);
+            Query query = new FilteredQuery(new TermQuery(new Term("field", "0")), new FilterAnonymousInnerClassHelper3(this, reader), FilteredQuery.QUERY_FIRST_FILTER_STRATEGY);
+
+            TopDocs search = searcher.Search(query, 10);
+            Assert.AreEqual(totalDocsWithZero, search.TotalHits);
+            IOUtils.Close(reader, writer, directory);
+        }
+
+        private class FilterAnonymousInnerClassHelper3 : Filter
+        {
+            private readonly TestFilteredQuery OuterInstance;
+
+            private IndexReader Reader;
+
+            public FilterAnonymousInnerClassHelper3(TestFilteredQuery outerInstance, IndexReader reader)
+            {
+                this.OuterInstance = outerInstance;
+                this.Reader = reader;
+            }
+
+            public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs)
+            {
+                bool nullBitset = Random().Next(10) == 5;
+                AtomicReader reader = context.AtomicReader;
+                DocsEnum termDocsEnum = reader.TermDocsEnum(new Term("field", "0"));
+                if (termDocsEnum == null)
+                {
+                    return null; // no docs -- return null
+                }
+                BitArray bitSet = new BitArray(reader.MaxDoc);
+                int d;
+                while ((d = termDocsEnum.NextDoc()) != DocsEnum.NO_MORE_DOCS)
+                {
+                    bitSet.SafeSet(d, true);
+                }
+                return new DocIdSetAnonymousInnerClassHelper(this, nullBitset, reader, bitSet);
+            }
+
+            private class DocIdSetAnonymousInnerClassHelper : DocIdSet
+            {
+                private readonly FilterAnonymousInnerClassHelper3 OuterInstance;
+
+                private bool NullBitset;
+                private AtomicReader Reader;
+                private BitArray BitSet;
+
+                public DocIdSetAnonymousInnerClassHelper(FilterAnonymousInnerClassHelper3 outerInstance, bool nullBitset, AtomicReader reader, BitArray bitSet)
+                {
+                    this.OuterInstance = outerInstance;
+                    this.NullBitset = nullBitset;
+                    this.Reader = reader;
+                    this.BitSet = bitSet;
+                }
+
+                public override IBits Bits
+                {
+                    get
+                    {
+                        if (NullBitset)
+                        {
+                            return null;
+                        }
+                        return new BitsAnonymousInnerClassHelper(this);
+                    }
+                }
+
+                private class BitsAnonymousInnerClassHelper : IBits
+                {
+                    private readonly DocIdSetAnonymousInnerClassHelper OuterInstance;
+
+                    public BitsAnonymousInnerClassHelper(DocIdSetAnonymousInnerClassHelper outerInstance)
+                    {
+                        this.OuterInstance = outerInstance;
+                    }
+
+                    public bool Get(int index)
+                    {
+                        Assert.IsTrue(OuterInstance.BitSet.SafeGet(index), "filter was called for a non-matching doc");
+                        return OuterInstance.BitSet.SafeGet(index);
+                    }
+
+                    public int Length
+                    {
+                        get { return OuterInstance.BitSet.Length; }
+                    }
+                }
+
+                public override DocIdSetIterator GetIterator()
+                {
+                    Assert.IsTrue(NullBitset, "iterator should not be called if bitset is present");
+                    return Reader.TermDocsEnum(new Term("field", "0"));
+                }
+            }
+        }
+
+        /*
+         * Test if the leapfrog strategy works correctly in terms
+         * of advancing / next the right thing first
+         */
+
+        [Test]
+        public virtual void TestLeapFrogStrategy()
+        {
+            Directory directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            int numDocs = AtLeast(50);
+            int totalDocsWithZero = 0;
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document doc = new Document();
+                int num = Random().Next(10);
+                if (num == 0)
+                {
+                    totalDocsWithZero++;
+                }
+                doc.Add(NewTextField("field", "" + num, Field.Store.YES));
+                writer.AddDocument(doc);
+            }
+            IndexReader reader = writer.Reader;
+            writer.Dispose();
+            bool queryFirst = Random().NextBoolean();
+            IndexSearcher searcher = NewSearcher(reader);
+            Query query = new FilteredQuery(new TermQuery(new Term("field", "0")), new FilterAnonymousInnerClassHelper4(this, queryFirst), queryFirst ? FilteredQuery.LEAP_FROG_QUERY_FIRST_STRATEGY : Random()
+                  .NextBoolean() ? FilteredQuery.RANDOM_ACCESS_FILTER_STRATEGY : FilteredQuery.LEAP_FROG_FILTER_FIRST_STRATEGY); // if filterFirst, we can use random here since bits are null
+
+            TopDocs search = searcher.Search(query, 10);
+            Assert.AreEqual(totalDocsWithZero, search.TotalHits);
+            IOUtils.Close(reader, writer, directory);
+        }
+
+        private class FilterAnonymousInnerClassHelper4 : Filter
+        {
+            private readonly TestFilteredQuery OuterInstance;
+
+            private bool QueryFirst;
+
+            public FilterAnonymousInnerClassHelper4(TestFilteredQuery outerInstance, bool queryFirst)
+            {
+                this.OuterInstance = outerInstance;
+                this.QueryFirst = queryFirst;
+            }
+
+            public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs)
+            {
+                return new DocIdSetAnonymousInnerClassHelper2(this, context);
+            }
+
+            private class DocIdSetAnonymousInnerClassHelper2 : DocIdSet
+            {
+                private readonly FilterAnonymousInnerClassHelper4 OuterInstance;
+
+                private AtomicReaderContext Context;
+
+                public DocIdSetAnonymousInnerClassHelper2(FilterAnonymousInnerClassHelper4 outerInstance, AtomicReaderContext context)
+                {
+                    this.OuterInstance = outerInstance;
+                    this.Context = context;
+                }
+
+                public override IBits Bits
+                {
+                    get { return null; }
+                }
+
+                public override DocIdSetIterator GetIterator()
+                {
+                    DocsEnum termDocsEnum = ((AtomicReader)Context.Reader).TermDocsEnum(new Term("field", "0"));
+                    if (termDocsEnum == null)
+                    {
+                        return null;
+                    }
+                    return new DocIdSetIteratorAnonymousInnerClassHelper(this, termDocsEnum);
+                }
+
+                private class DocIdSetIteratorAnonymousInnerClassHelper : DocIdSetIterator
+                {
+                    private readonly DocIdSetAnonymousInnerClassHelper2 OuterInstance;
+
+                    private DocsEnum TermDocsEnum;
+
+                    public DocIdSetIteratorAnonymousInnerClassHelper(DocIdSetAnonymousInnerClassHelper2 outerInstance, DocsEnum termDocsEnum)
+                    {
+                        this.OuterInstance = outerInstance;
+                        this.TermDocsEnum = termDocsEnum;
+                    }
+
+                    internal bool nextCalled;
+                    internal bool advanceCalled;
+
+                    public override int NextDoc()
+                    {
+                        Assert.IsTrue(nextCalled || advanceCalled ^ !OuterInstance.OuterInstance.QueryFirst, "queryFirst: " + OuterInstance.OuterInstance.QueryFirst + " advanced: " + advanceCalled + " next: " + nextCalled);
+                        nextCalled = true;
+                        return TermDocsEnum.NextDoc();
+                    }
+
+                    public override int DocID
+                    {
+                        get { return TermDocsEnum.DocID; }
+                    }
+
+                    public override int Advance(int target)
+                    {
+                        Assert.IsTrue(advanceCalled || nextCalled ^ OuterInstance.OuterInstance.QueryFirst, "queryFirst: " + OuterInstance.OuterInstance.QueryFirst + " advanced: " + advanceCalled + " next: " + nextCalled);
+                        advanceCalled = true;
+                        return TermDocsEnum.Advance(target);
+                    }
+
+                    public override long GetCost()
+                    {
+                        return TermDocsEnum.GetCost();
+                    }
+                }
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestFilteredSearch.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestFilteredSearch.cs b/src/Lucene.Net.Tests/Search/TestFilteredSearch.cs
new file mode 100644
index 0000000..88dada2
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestFilteredSearch.cs
@@ -0,0 +1,112 @@
+using System;
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using NUnit.Framework;
+    using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
+    using IBits = Lucene.Net.Util.IBits;
+    using Directory = Lucene.Net.Store.Directory;
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FixedBitSet = Lucene.Net.Util.FixedBitSet;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using IndexWriter = Lucene.Net.Index.IndexWriter;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using OpenMode = Lucene.Net.Index.OpenMode;
+    using Term = Lucene.Net.Index.Term;
+
+    [TestFixture]
+    public class TestFilteredSearch : LuceneTestCase
+    {
+        private const string FIELD = "category";
+
+        [Test]
+        public virtual void TestFilteredSearch_Mem()
+        {
+            bool enforceSingleSegment = true;
+            Directory directory = NewDirectory();
+            int[] filterBits = new int[] { 1, 36 };
+            SimpleDocIdSetFilter filter = new SimpleDocIdSetFilter(filterBits);
+            IndexWriter writer = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()));
+            SearchFiltered(writer, directory, filter, enforceSingleSegment);
+            // run the test on more than one segment
+            enforceSingleSegment = false;
+            writer.Dispose();
+            writer = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(10).SetMergePolicy(NewLogMergePolicy()));
+            // we index 60 docs - this will create 6 segments
+            SearchFiltered(writer, directory, filter, enforceSingleSegment);
+            writer.Dispose();
+            directory.Dispose();
+        }
+
+        public virtual void SearchFiltered(IndexWriter writer, Directory directory, Filter filter, bool fullMerge)
+        {
+            for (int i = 0; i < 60; i++) //Simple docs
+            {
+                Document doc = new Document();
+                doc.Add(NewStringField(FIELD, Convert.ToString(i), Field.Store.YES));
+                writer.AddDocument(doc);
+            }
+            if (fullMerge)
+            {
+                writer.ForceMerge(1);
+            }
+            writer.Dispose();
+
+            BooleanQuery booleanQuery = new BooleanQuery();
+            booleanQuery.Add(new TermQuery(new Term(FIELD, "36")), Occur.SHOULD);
+
+            IndexReader reader = DirectoryReader.Open(directory);
+            IndexSearcher indexSearcher = NewSearcher(reader);
+            ScoreDoc[] hits = indexSearcher.Search(booleanQuery, filter, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length, "Number of matched documents");
+            reader.Dispose();
+        }
+
+        public sealed class SimpleDocIdSetFilter : Filter
+        {
+            internal readonly int[] Docs;
+
+            public SimpleDocIdSetFilter(int[] docs)
+            {
+                this.Docs = docs;
+            }
+
+            public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs)
+            {
+                Assert.IsNull(acceptDocs, "acceptDocs should be null, as we have an index without deletions");
+                FixedBitSet set = new FixedBitSet(context.Reader.MaxDoc);
+                int docBase = context.DocBase;
+                int limit = docBase + context.Reader.MaxDoc;
+                for (int index = 0; index < Docs.Length; index++)
+                {
+                    int docId = Docs[index];
+                    if (docId >= docBase && docId < limit)
+                    {
+                        set.Set(docId - docBase);
+                    }
+                }
+                return set.Cardinality() == 0 ? null : set;
+            }
+        }
+    }
+}
\ No newline at end of file


[38/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestDeletionPolicy.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestDeletionPolicy.cs b/src/Lucene.Net.Tests/Index/TestDeletionPolicy.cs
new file mode 100644
index 0000000..faaa659
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestDeletionPolicy.cs
@@ -0,0 +1,803 @@
+using System;
+using System.Collections.Generic;
+using System.Threading;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+    using System.IO;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using Query = Lucene.Net.Search.Query;
+    using ScoreDoc = Lucene.Net.Search.ScoreDoc;
+    using TermQuery = Lucene.Net.Search.TermQuery;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using Attributes;
+
+    /*
+      Verify we can read the pre-2.1 file format, do searches
+      against it, and add documents to it.
+    */
+
+    [TestFixture]
+    public class TestDeletionPolicy : LuceneTestCase
+    {
+        private void VerifyCommitOrder<T>(IList<T> commits)
+            where T : IndexCommit
+        {
+            if (commits.Count == 0)
+            {
+                return;
+            }
+            IndexCommit firstCommit = commits[0];
+            long last = SegmentInfos.GenerationFromSegmentsFileName(firstCommit.SegmentsFileName);
+            Assert.AreEqual(last, firstCommit.Generation);
+            for (int i = 1; i < commits.Count; i++)
+            {
+                IndexCommit commit = commits[i];
+                long now = SegmentInfos.GenerationFromSegmentsFileName(commit.SegmentsFileName);
+                Assert.IsTrue(now > last, "SegmentInfos commits are out-of-order");
+                Assert.AreEqual(now, commit.Generation);
+                last = now;
+            }
+        }
+
+        internal class KeepAllDeletionPolicy : IndexDeletionPolicy
+        {
+            private readonly TestDeletionPolicy OuterInstance;
+
+            internal int NumOnInit;
+            internal int NumOnCommit;
+            internal Directory Dir;
+
+            internal KeepAllDeletionPolicy(TestDeletionPolicy outerInstance, Directory dir)
+            {
+                this.OuterInstance = outerInstance;
+                this.Dir = dir;
+            }
+
+            public override void OnInit<T>(IList<T> commits)
+            {
+                OuterInstance.VerifyCommitOrder(commits);
+                NumOnInit++;
+            }
+
+            public override void OnCommit<T>(IList<T> commits)
+            {
+                IndexCommit lastCommit = commits[commits.Count - 1];
+                DirectoryReader r = DirectoryReader.Open(Dir);
+                Assert.AreEqual(r.Leaves.Count, lastCommit.SegmentCount, "lastCommit.segmentCount()=" + lastCommit.SegmentCount + " vs IndexReader.segmentCount=" + r.Leaves.Count);
+                r.Dispose();
+                OuterInstance.VerifyCommitOrder(commits);
+                NumOnCommit++;
+            }
+        }
+
+        /// <summary>
+        /// this is useful for adding to a big index when you know
+        /// readers are not using it.
+        /// </summary>
+        internal class KeepNoneOnInitDeletionPolicy : IndexDeletionPolicy
+        {
+            private readonly TestDeletionPolicy OuterInstance;
+
+            public KeepNoneOnInitDeletionPolicy(TestDeletionPolicy outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            internal int NumOnInit;
+            internal int NumOnCommit;
+
+            public override void OnInit<T>(IList<T> commits)
+            {
+                OuterInstance.VerifyCommitOrder(commits);
+                NumOnInit++;
+                // On init, delete all commit points:
+                foreach (IndexCommit commit in commits)
+                {
+                    commit.Delete();
+                    Assert.IsTrue(commit.IsDeleted);
+                }
+            }
+
+            public override void OnCommit<T>(IList<T> commits)
+            {
+                OuterInstance.VerifyCommitOrder(commits);
+                int size = commits.Count;
+                // Delete all but last one:
+                for (int i = 0; i < size - 1; i++)
+                {
+                    ((IndexCommit)commits[i]).Delete();
+                }
+                NumOnCommit++;
+            }
+        }
+
+        internal class KeepLastNDeletionPolicy : IndexDeletionPolicy
+        {
+            private readonly TestDeletionPolicy OuterInstance;
+
+            internal int NumOnInit;
+            internal int NumOnCommit;
+            internal int NumToKeep;
+            internal int NumDelete;
+            internal HashSet<string> Seen = new HashSet<string>();
+
+            public KeepLastNDeletionPolicy(TestDeletionPolicy outerInstance, int numToKeep)
+            {
+                this.OuterInstance = outerInstance;
+                this.NumToKeep = numToKeep;
+            }
+
+            public override void OnInit<T>(IList<T> commits)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: onInit");
+                }
+                OuterInstance.VerifyCommitOrder(commits);
+                NumOnInit++;
+                // do no deletions on init
+                DoDeletes(commits, false);
+            }
+
+            public override void OnCommit<T>(IList<T> commits)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: onCommit");
+                }
+                OuterInstance.VerifyCommitOrder(commits);
+                DoDeletes(commits, true);
+            }
+
+            internal virtual void DoDeletes<T>(IList<T> commits, bool isCommit)
+                where T : IndexCommit
+            {
+                // Assert that we really are only called for each new
+                // commit:
+                if (isCommit)
+                {
+                    string fileName = ((IndexCommit)commits[commits.Count - 1]).SegmentsFileName;
+                    if (Seen.Contains(fileName))
+                    {
+                        throw new Exception("onCommit was called twice on the same commit point: " + fileName);
+                    }
+                    Seen.Add(fileName);
+                    NumOnCommit++;
+                }
+                int size = commits.Count;
+                for (int i = 0; i < size - NumToKeep; i++)
+                {
+                    ((IndexCommit)commits[i]).Delete();
+                    NumDelete++;
+                }
+            }
+        }
+
+        internal static long GetCommitTime(IndexCommit commit)
+        {
+            return Convert.ToInt64(commit.UserData["commitTime"]);
+        }
+
+        /*
+         * Delete a commit only when it has been obsoleted by N
+         * seconds.
+         */
+
+        internal class ExpirationTimeDeletionPolicy : IndexDeletionPolicy
+        {
+            private readonly TestDeletionPolicy OuterInstance;
+
+            internal Directory Dir;
+            internal double ExpirationTimeSeconds;
+            internal int NumDelete;
+
+            public ExpirationTimeDeletionPolicy(TestDeletionPolicy outerInstance, Directory dir, double seconds)
+            {
+                this.OuterInstance = outerInstance;
+                this.Dir = dir;
+                this.ExpirationTimeSeconds = seconds;
+            }
+
+            public override void OnInit<T>(IList<T> commits)
+            {
+                if (commits.Count == 0)
+                {
+                    return;
+                }
+                OuterInstance.VerifyCommitOrder(commits);
+                OnCommit(commits);
+            }
+
+            public override void OnCommit<T>(IList<T> commits)
+            {
+                OuterInstance.VerifyCommitOrder(commits);
+
+                IndexCommit lastCommit = commits[commits.Count - 1];
+
+                // Any commit older than expireTime should be deleted:
+                double expireTime = GetCommitTime(lastCommit) / 1000.0 - ExpirationTimeSeconds;
+
+                foreach (IndexCommit commit in commits)
+                {
+                    double modTime = GetCommitTime(commit) / 1000.0;
+                    if (commit != lastCommit && modTime < expireTime)
+                    {
+                        commit.Delete();
+                        NumDelete += 1;
+                    }
+                }
+            }
+        }
+
+        /*
+         * Test "by time expiration" deletion policy:
+         */
+
+        [Test]
+        public virtual void TestExpirationTimeDeletionPolicy()
+        {
+            const double SECONDS = 2.0;
+
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetIndexDeletionPolicy(new ExpirationTimeDeletionPolicy(this, dir, SECONDS));
+            MergePolicy mp = conf.MergePolicy;
+            mp.NoCFSRatio = 1.0;
+            IndexWriter writer = new IndexWriter(dir, conf);
+            ExpirationTimeDeletionPolicy policy = (ExpirationTimeDeletionPolicy)writer.Config.IndexDeletionPolicy;
+            IDictionary<string, string> commitData = new Dictionary<string, string>();
+            commitData["commitTime"] = Convert.ToString(Environment.TickCount);
+            writer.CommitData = commitData;
+            writer.Commit();
+            writer.Dispose();
+
+            long lastDeleteTime = 0;
+            int targetNumDelete = TestUtil.NextInt(Random(), 1, 5);
+            while (policy.NumDelete < targetNumDelete)
+            {
+                // Record last time when writer performed deletes of
+                // past commits
+                lastDeleteTime = Environment.TickCount;
+                conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND).SetIndexDeletionPolicy(policy);
+                mp = conf.MergePolicy;
+                mp.NoCFSRatio = 1.0;
+                writer = new IndexWriter(dir, conf);
+                policy = (ExpirationTimeDeletionPolicy)writer.Config.IndexDeletionPolicy;
+                for (int j = 0; j < 17; j++)
+                {
+                    AddDoc(writer);
+                }
+                commitData = new Dictionary<string, string>();
+                commitData["commitTime"] = Convert.ToString(Environment.TickCount);
+                writer.CommitData = commitData;
+                writer.Commit();
+                writer.Dispose();
+
+                Thread.Sleep((int)(1000.0 * (SECONDS / 5.0)));
+            }
+
+            // Then simplistic check: just verify that the
+            // segments_N's that still exist are in fact within SECONDS
+            // seconds of the last one's mod time, and, that I can
+            // open a reader on each:
+            long gen = SegmentInfos.GetLastCommitGeneration(dir);
+
+            string fileName = IndexFileNames.FileNameFromGeneration(IndexFileNames.SEGMENTS, "", gen);
+            dir.DeleteFile(IndexFileNames.SEGMENTS_GEN);
+
+            bool oneSecondResolution = true;
+
+            while (gen > 0)
+            {
+                try
+                {
+                    IndexReader reader = DirectoryReader.Open(dir);
+                    reader.Dispose();
+                    fileName = IndexFileNames.FileNameFromGeneration(IndexFileNames.SEGMENTS, "", gen);
+
+                    // if we are on a filesystem that seems to have only
+                    // 1 second resolution, allow +1 second in commit
+                    // age tolerance:
+                    SegmentInfos sis = new SegmentInfos();
+                    sis.Read(dir, fileName);
+                    long modTime = Convert.ToInt64(sis.UserData["commitTime"]);
+                    oneSecondResolution &= (modTime % 1000) == 0;
+                    long leeway = (long)((SECONDS + (oneSecondResolution ? 1.0 : 0.0)) * 1000);
+
+                    Assert.IsTrue(lastDeleteTime - modTime <= leeway, "commit point was older than " + SECONDS + " seconds (" + (lastDeleteTime - modTime) + " msec) but did not get deleted ");
+                }
+#pragma warning disable 168
+                catch (IOException e)
+#pragma warning restore 168
+                {
+                    // OK
+                    break;
+                }
+
+                dir.DeleteFile(IndexFileNames.FileNameFromGeneration(IndexFileNames.SEGMENTS, "", gen));
+                gen--;
+            }
+
+            dir.Dispose();
+        }
+
+        /*
+         * Test a silly deletion policy that keeps all commits around.
+         */
+
+        [Test]
+        public virtual void TestKeepAllDeletionPolicy()
+        {
+            for (int pass = 0; pass < 2; pass++)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: cycle pass=" + pass);
+                }
+
+                bool useCompoundFile = (pass % 2) != 0;
+
+                Directory dir = NewDirectory();
+
+                IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetIndexDeletionPolicy(new KeepAllDeletionPolicy(this, dir)).SetMaxBufferedDocs(10).SetMergeScheduler(new SerialMergeScheduler());
+                MergePolicy mp = conf.MergePolicy;
+                mp.NoCFSRatio = useCompoundFile ? 1.0 : 0.0;
+                IndexWriter writer = new IndexWriter(dir, conf);
+                KeepAllDeletionPolicy policy = (KeepAllDeletionPolicy)writer.Config.IndexDeletionPolicy;
+                for (int i = 0; i < 107; i++)
+                {
+                    AddDoc(writer);
+                }
+                writer.Dispose();
+
+                bool needsMerging;
+                {
+                    DirectoryReader r = DirectoryReader.Open(dir);
+                    needsMerging = r.Leaves.Count != 1;
+                    r.Dispose();
+                }
+                if (needsMerging)
+                {
+                    conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND).SetIndexDeletionPolicy(policy);
+                    mp = conf.MergePolicy;
+                    mp.NoCFSRatio = useCompoundFile ? 1.0 : 0.0;
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("TEST: open writer for forceMerge");
+                    }
+                    writer = new IndexWriter(dir, conf);
+                    policy = (KeepAllDeletionPolicy)writer.Config.IndexDeletionPolicy;
+                    writer.ForceMerge(1);
+                    writer.Dispose();
+                }
+
+                Assert.AreEqual(needsMerging ? 2 : 1, policy.NumOnInit);
+
+                // If we are not auto committing then there should
+                // be exactly 2 commits (one per close above):
+                Assert.AreEqual(1 + (needsMerging ? 1 : 0), policy.NumOnCommit);
+
+                // Test listCommits
+                ICollection<IndexCommit> commits = DirectoryReader.ListCommits(dir);
+                // 2 from closing writer
+                Assert.AreEqual(1 + (needsMerging ? 1 : 0), commits.Count);
+
+                // Make sure we can open a reader on each commit:
+                foreach (IndexCommit commit in commits)
+                {
+                    IndexReader r = DirectoryReader.Open(commit);
+                    r.Dispose();
+                }
+
+                // Simplistic check: just verify all segments_N's still
+                // exist, and, I can open a reader on each:
+                dir.DeleteFile(IndexFileNames.SEGMENTS_GEN);
+                long gen = SegmentInfos.GetLastCommitGeneration(dir);
+                while (gen > 0)
+                {
+                    IndexReader reader = DirectoryReader.Open(dir);
+                    reader.Dispose();
+                    dir.DeleteFile(IndexFileNames.FileNameFromGeneration(IndexFileNames.SEGMENTS, "", gen));
+                    gen--;
+
+                    if (gen > 0)
+                    {
+                        // Now that we've removed a commit point, which
+                        // should have orphan'd at least one index file.
+                        // Open & close a writer and assert that it
+                        // actually removed something:
+                        int preCount = dir.ListAll().Length;
+                        writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND).SetIndexDeletionPolicy(policy));
+                        writer.Dispose();
+                        int postCount = dir.ListAll().Length;
+                        Assert.IsTrue(postCount < preCount);
+                    }
+                }
+
+                dir.Dispose();
+            }
+        }
+
+        /* Uses KeepAllDeletionPolicy to keep all commits around,
+         * then, opens a new IndexWriter on a previous commit
+         * point. */
+
+        [Test]
+        public virtual void TestOpenPriorSnapshot()
+        {
+            Directory dir = NewDirectory();
+
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetIndexDeletionPolicy(new KeepAllDeletionPolicy(this, dir)).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy(10)));
+            KeepAllDeletionPolicy policy = (KeepAllDeletionPolicy)writer.Config.IndexDeletionPolicy;
+            for (int i = 0; i < 10; i++)
+            {
+                AddDoc(writer);
+                if ((1 + i) % 2 == 0)
+                {
+                    writer.Commit();
+                }
+            }
+            writer.Dispose();
+
+            ICollection<IndexCommit> commits = DirectoryReader.ListCommits(dir);
+            Assert.AreEqual(5, commits.Count);
+            IndexCommit lastCommit = null;
+            foreach (IndexCommit commit in commits)
+            {
+                if (lastCommit == null || commit.Generation > lastCommit.Generation)
+                {
+                    lastCommit = commit;
+                }
+            }
+            Assert.IsTrue(lastCommit != null);
+
+            // Now add 1 doc and merge
+            writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetIndexDeletionPolicy(policy));
+            AddDoc(writer);
+            Assert.AreEqual(11, writer.NumDocs);
+            writer.ForceMerge(1);
+            writer.Dispose();
+
+            Assert.AreEqual(6, DirectoryReader.ListCommits(dir).Count);
+
+            // Now open writer on the commit just before merge:
+            writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetIndexDeletionPolicy(policy).SetIndexCommit(lastCommit));
+            Assert.AreEqual(10, writer.NumDocs);
+
+            // Should undo our rollback:
+            writer.Rollback();
+
+            DirectoryReader r = DirectoryReader.Open(dir);
+            // Still merged, still 11 docs
+            Assert.AreEqual(1, r.Leaves.Count);
+            Assert.AreEqual(11, r.NumDocs);
+            r.Dispose();
+
+            writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetIndexDeletionPolicy(policy).SetIndexCommit(lastCommit));
+            Assert.AreEqual(10, writer.NumDocs);
+            // Commits the rollback:
+            writer.Dispose();
+
+            // Now 8 because we made another commit
+            Assert.AreEqual(7, DirectoryReader.ListCommits(dir).Count);
+
+            r = DirectoryReader.Open(dir);
+            // Not fully merged because we rolled it back, and now only
+            // 10 docs
+            Assert.IsTrue(r.Leaves.Count > 1);
+            Assert.AreEqual(10, r.NumDocs);
+            r.Dispose();
+
+            // Re-merge
+            writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetIndexDeletionPolicy(policy));
+            writer.ForceMerge(1);
+            writer.Dispose();
+
+            r = DirectoryReader.Open(dir);
+            Assert.AreEqual(1, r.Leaves.Count);
+            Assert.AreEqual(10, r.NumDocs);
+            r.Dispose();
+
+            // Now open writer on the commit just before merging,
+            // but this time keeping only the last commit:
+            writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetIndexCommit(lastCommit));
+            Assert.AreEqual(10, writer.NumDocs);
+
+            // Reader still sees fully merged index, because writer
+            // opened on the prior commit has not yet committed:
+            r = DirectoryReader.Open(dir);
+            Assert.AreEqual(1, r.Leaves.Count);
+            Assert.AreEqual(10, r.NumDocs);
+            r.Dispose();
+
+            writer.Dispose();
+
+            // Now reader sees not-fully-merged index:
+            r = DirectoryReader.Open(dir);
+            Assert.IsTrue(r.Leaves.Count > 1);
+            Assert.AreEqual(10, r.NumDocs);
+            r.Dispose();
+
+            dir.Dispose();
+        }
+
+        /* Test keeping NO commit points.  this is a viable and
+         * useful case eg where you want to build a big index and
+         * you know there are no readers.
+         */
+
+        [Test]
+        public virtual void TestKeepNoneOnInitDeletionPolicy()
+        {
+            for (int pass = 0; pass < 2; pass++)
+            {
+                bool useCompoundFile = (pass % 2) != 0;
+
+                Directory dir = NewDirectory();
+
+                IndexWriterConfig conf = (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE).SetIndexDeletionPolicy(new KeepNoneOnInitDeletionPolicy(this)).SetMaxBufferedDocs(10);
+                MergePolicy mp = conf.MergePolicy;
+                mp.NoCFSRatio = useCompoundFile ? 1.0 : 0.0;
+                IndexWriter writer = new IndexWriter(dir, conf);
+                KeepNoneOnInitDeletionPolicy policy = (KeepNoneOnInitDeletionPolicy)writer.Config.IndexDeletionPolicy;
+                for (int i = 0; i < 107; i++)
+                {
+                    AddDoc(writer);
+                }
+                writer.Dispose();
+
+                conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND).SetIndexDeletionPolicy(policy);
+                mp = conf.MergePolicy;
+                mp.NoCFSRatio = 1.0;
+                writer = new IndexWriter(dir, conf);
+                policy = (KeepNoneOnInitDeletionPolicy)writer.Config.IndexDeletionPolicy;
+                writer.ForceMerge(1);
+                writer.Dispose();
+
+                Assert.AreEqual(2, policy.NumOnInit);
+                // If we are not auto committing then there should
+                // be exactly 2 commits (one per close above):
+                Assert.AreEqual(2, policy.NumOnCommit);
+
+                // Simplistic check: just verify the index is in fact
+                // readable:
+                IndexReader reader = DirectoryReader.Open(dir);
+                reader.Dispose();
+
+                dir.Dispose();
+            }
+        }
+
+        /*
+         * Test a deletion policy that keeps last N commits.
+         */
+
+        [Test]
+        public virtual void TestKeepLastNDeletionPolicy()
+        {
+            const int N = 5;
+
+            for (int pass = 0; pass < 2; pass++)
+            {
+                bool useCompoundFile = (pass % 2) != 0;
+
+                Directory dir = NewDirectory();
+
+                KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(this, N);
+                for (int j = 0; j < N + 1; j++)
+                {
+                    IndexWriterConfig conf = (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE).SetIndexDeletionPolicy(policy).SetMaxBufferedDocs(10);
+                    MergePolicy mp = conf.MergePolicy;
+                    mp.NoCFSRatio = useCompoundFile ? 1.0 : 0.0;
+                    IndexWriter writer = new IndexWriter(dir, conf);
+                    policy = (KeepLastNDeletionPolicy)writer.Config.IndexDeletionPolicy;
+                    for (int i = 0; i < 17; i++)
+                    {
+                        AddDoc(writer);
+                    }
+                    writer.ForceMerge(1);
+                    writer.Dispose();
+                }
+
+                Assert.IsTrue(policy.NumDelete > 0);
+                Assert.AreEqual(N + 1, policy.NumOnInit);
+                Assert.AreEqual(N + 1, policy.NumOnCommit);
+
+                // Simplistic check: just verify only the past N segments_N's still
+                // exist, and, I can open a reader on each:
+                dir.DeleteFile(IndexFileNames.SEGMENTS_GEN);
+                long gen = SegmentInfos.GetLastCommitGeneration(dir);
+                for (int i = 0; i < N + 1; i++)
+                {
+                    try
+                    {
+                        IndexReader reader = DirectoryReader.Open(dir);
+                        reader.Dispose();
+                        if (i == N)
+                        {
+                            Assert.Fail("should have failed on commits prior to last " + N);
+                        }
+                    }
+                    catch (IOException e)
+                    {
+                        if (i != N)
+                        {
+                            throw e;
+                        }
+                    }
+                    if (i < N)
+                    {
+                        dir.DeleteFile(IndexFileNames.FileNameFromGeneration(IndexFileNames.SEGMENTS, "", gen));
+                    }
+                    gen--;
+                }
+
+                dir.Dispose();
+            }
+        }
+
+        /*
+         * Test a deletion policy that keeps last N commits
+         * around, through creates.
+         */
+
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(300000)]
+#endif
+        [Test, HasTimeout]
+        public virtual void TestKeepLastNDeletionPolicyWithCreates()
+        {
+            const int N = 10;
+
+            for (int pass = 0; pass < 2; pass++)
+            {
+                bool useCompoundFile = (pass % 2) != 0;
+
+                Directory dir = NewDirectory();
+                IndexWriterConfig conf = (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE).SetIndexDeletionPolicy(new KeepLastNDeletionPolicy(this, N)).SetMaxBufferedDocs(10);
+                MergePolicy mp = conf.MergePolicy;
+                mp.NoCFSRatio = useCompoundFile ? 1.0 : 0.0;
+                IndexWriter writer = new IndexWriter(dir, conf);
+                KeepLastNDeletionPolicy policy = (KeepLastNDeletionPolicy)writer.Config.IndexDeletionPolicy;
+                writer.Dispose();
+                Term searchTerm = new Term("content", "aaa");
+                Query query = new TermQuery(searchTerm);
+
+                for (int i = 0; i < N + 1; i++)
+                {
+                    conf = (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND).SetIndexDeletionPolicy(policy).SetMaxBufferedDocs(10);
+                    mp = conf.MergePolicy;
+                    mp.NoCFSRatio = useCompoundFile ? 1.0 : 0.0;
+                    writer = new IndexWriter(dir, conf);
+                    policy = (KeepLastNDeletionPolicy)writer.Config.IndexDeletionPolicy;
+                    for (int j = 0; j < 17; j++)
+                    {
+                        AddDocWithID(writer, i * (N + 1) + j);
+                    }
+                    // this is a commit
+                    writer.Dispose();
+                    conf = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetIndexDeletionPolicy(policy).SetMergePolicy(NoMergePolicy.COMPOUND_FILES);
+                    writer = new IndexWriter(dir, conf);
+                    policy = (KeepLastNDeletionPolicy)writer.Config.IndexDeletionPolicy;
+                    writer.DeleteDocuments(new Term("id", "" + (i * (N + 1) + 3)));
+                    // this is a commit
+                    writer.Dispose();
+                    IndexReader reader = DirectoryReader.Open(dir);
+                    IndexSearcher searcher = NewSearcher(reader);
+                    ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs;
+                    Assert.AreEqual(16, hits.Length);
+                    reader.Dispose();
+
+                    writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE).SetIndexDeletionPolicy(policy));
+                    policy = (KeepLastNDeletionPolicy)writer.Config.IndexDeletionPolicy;
+                    // this will not commit: there are no changes
+                    // pending because we opened for "create":
+                    writer.Dispose();
+                }
+
+                Assert.AreEqual(3 * (N + 1) + 1, policy.NumOnInit);
+                Assert.AreEqual(3 * (N + 1) + 1, policy.NumOnCommit);
+
+                IndexReader rwReader = DirectoryReader.Open(dir);
+                IndexSearcher searcher_ = NewSearcher(rwReader);
+                ScoreDoc[] hits_ = searcher_.Search(query, null, 1000).ScoreDocs;
+                Assert.AreEqual(0, hits_.Length);
+
+                // Simplistic check: just verify only the past N segments_N's still
+                // exist, and, I can open a reader on each:
+                long gen = SegmentInfos.GetLastCommitGeneration(dir);
+
+                dir.DeleteFile(IndexFileNames.SEGMENTS_GEN);
+                int expectedCount = 0;
+
+                rwReader.Dispose();
+
+                for (int i = 0; i < N + 1; i++)
+                {
+                    try
+                    {
+                        IndexReader reader = DirectoryReader.Open(dir);
+
+                        // Work backwards in commits on what the expected
+                        // count should be.
+                        searcher_ = NewSearcher(reader);
+                        hits_ = searcher_.Search(query, null, 1000).ScoreDocs;
+                        Assert.AreEqual(expectedCount, hits_.Length);
+                        if (expectedCount == 0)
+                        {
+                            expectedCount = 16;
+                        }
+                        else if (expectedCount == 16)
+                        {
+                            expectedCount = 17;
+                        }
+                        else if (expectedCount == 17)
+                        {
+                            expectedCount = 0;
+                        }
+                        reader.Dispose();
+                        if (i == N)
+                        {
+                            Assert.Fail("should have failed on commits before last " + N);
+                        }
+                    }
+                    catch (IOException e)
+                    {
+                        if (i != N)
+                        {
+                            throw e;
+                        }
+                    }
+                    if (i < N)
+                    {
+                        dir.DeleteFile(IndexFileNames.FileNameFromGeneration(IndexFileNames.SEGMENTS, "", gen));
+                    }
+                    gen--;
+                }
+
+                dir.Dispose();
+            }
+        }
+
+        private void AddDocWithID(IndexWriter writer, int id)
+        {
+            Document doc = new Document();
+            doc.Add(NewTextField("content", "aaa", Field.Store.NO));
+            doc.Add(NewStringField("id", "" + id, Field.Store.NO));
+            writer.AddDocument(doc);
+        }
+
+        private void AddDoc(IndexWriter writer)
+        {
+            Document doc = new Document();
+            doc.Add(NewTextField("content", "aaa", Field.Store.NO));
+            writer.AddDocument(doc);
+        }
+    }
+}
\ No newline at end of file


[67/72] [abbrv] lucenenet git commit: BUG: Changed exeption type to ArgumentException for DefaultCodecFactory, DefaultDocValuesFactory, and DefaultPostingsFormatFactory to make the TestAddIndexMissingCodec() test pass.

Posted by ni...@apache.org.
BUG: Changed exeption type to ArgumentException for DefaultCodecFactory, DefaultDocValuesFactory, and DefaultPostingsFormatFactory to make the TestAddIndexMissingCodec() test pass.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/971b4387
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/971b4387
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/971b4387

Branch: refs/heads/api-work
Commit: 971b4387f9591afdfab8e34fe6fede61348e6101
Parents: 84ad7a3
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 26 03:46:57 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Feb 27 06:18:01 2017 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Core/Support/Codecs/DefaultCodecFactory.cs          | 2 +-
 .../Support/Codecs/DefaultDocValuesFormatFactory.cs                | 2 +-
 src/Lucene.Net.Core/Support/Codecs/DefaultPostingsFormatFactory.cs | 2 +-
 3 files changed, 3 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/971b4387/src/Lucene.Net.Core/Support/Codecs/DefaultCodecFactory.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Support/Codecs/DefaultCodecFactory.cs b/src/Lucene.Net.Core/Support/Codecs/DefaultCodecFactory.cs
index db18400..c1cbd6e 100644
--- a/src/Lucene.Net.Core/Support/Codecs/DefaultCodecFactory.cs
+++ b/src/Lucene.Net.Core/Support/Codecs/DefaultCodecFactory.cs
@@ -118,7 +118,7 @@ namespace Lucene.Net.Codecs
             m_codecNameToTypeMap.TryGetValue(name, out codecType);
             if (codecType == null)
             {
-                throw new InvalidOperationException(string.Format("Codec '{0}' cannot be loaded. If the codec is not " +
+                throw new ArgumentException(string.Format("Codec '{0}' cannot be loaded. If the codec is not " +
                     "in a Lucene.Net assembly, you must subclass DefaultCodecFactory and call ScanForCodecs() with the " + 
                     "target assembly from the subclass constructor.", name));
             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/971b4387/src/Lucene.Net.Core/Support/Codecs/DefaultDocValuesFormatFactory.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Support/Codecs/DefaultDocValuesFormatFactory.cs b/src/Lucene.Net.Core/Support/Codecs/DefaultDocValuesFormatFactory.cs
index 0835b56..772026c 100644
--- a/src/Lucene.Net.Core/Support/Codecs/DefaultDocValuesFormatFactory.cs
+++ b/src/Lucene.Net.Core/Support/Codecs/DefaultDocValuesFormatFactory.cs
@@ -118,7 +118,7 @@ namespace Lucene.Net.Codecs
             m_docValuesFormatNameToTypeMap.TryGetValue(name, out codecType);
             if (codecType == null)
             {
-                throw new InvalidOperationException(string.Format("DocValuesFormat '{0}' cannot be loaded. If the format is not " +
+                throw new ArgumentException(string.Format("DocValuesFormat '{0}' cannot be loaded. If the format is not " +
                     "in a Lucene.Net assembly, you must subclass DefaultDocValuesFormatFactory and call ScanForDocValuesFormats() with the " +
                     "target assembly from the subclass constructor.", name));
             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/971b4387/src/Lucene.Net.Core/Support/Codecs/DefaultPostingsFormatFactory.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Support/Codecs/DefaultPostingsFormatFactory.cs b/src/Lucene.Net.Core/Support/Codecs/DefaultPostingsFormatFactory.cs
index 89dd47c..d9e0511 100644
--- a/src/Lucene.Net.Core/Support/Codecs/DefaultPostingsFormatFactory.cs
+++ b/src/Lucene.Net.Core/Support/Codecs/DefaultPostingsFormatFactory.cs
@@ -118,7 +118,7 @@ namespace Lucene.Net.Codecs
             m_postingsFormatNameToTypeMap.TryGetValue(name, out codecType);
             if (codecType == null)
             {
-                throw new InvalidOperationException(string.Format("PostingsFormat '{0}' cannot be loaded. If the format is not " +
+                throw new ArgumentException(string.Format("PostingsFormat '{0}' cannot be loaded. If the format is not " +
                     "in a Lucene.Net assembly, you must subclass DefaultPostingsFormatFactory and call ScanForPostingsFormats() with the " +
                     "target assembly from the subclass constructor.", name));
             }


[09/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestBooleanMinShouldMatch.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestBooleanMinShouldMatch.cs b/src/Lucene.Net.Tests/Search/TestBooleanMinShouldMatch.cs
new file mode 100644
index 0000000..29471a0
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestBooleanMinShouldMatch.cs
@@ -0,0 +1,527 @@
+using System;
+using System.Globalization;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using Lucene.Net.Randomized.Generators;
+    using NUnit.Framework;
+    using DefaultSimilarity = Lucene.Net.Search.Similarities.DefaultSimilarity;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Similarity = Lucene.Net.Search.Similarities.Similarity;
+    using Term = Lucene.Net.Index.Term;
+
+    /// <summary>
+    /// Test that BooleanQuery.setMinimumNumberShouldMatch works.
+    /// </summary>
+    [TestFixture]
+    public class TestBooleanMinShouldMatch : LuceneTestCase
+    {
+        private static Directory Index;
+        private static IndexReader r;
+        private static IndexSearcher s;
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Is non-static because NewStringField is no longer static.
+        /// </summary>
+        [OneTimeSetUp]
+        public void BeforeClass()
+        {
+            string[] data = new string[] { "A 1 2 3 4 5 6", "Z       4 5 6", null, "B   2   4 5 6", "Y     3   5 6", null, "C     3     6", "X       4 5 6" };
+
+            Index = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), Index, Similarity, TimeZone);
+
+            for (int i = 0; i < data.Length; i++)
+            {
+                Document doc = new Document();
+                doc.Add(NewStringField("id", Convert.ToString(i), Field.Store.YES)); //Field.Keyword("id",String.valueOf(i)));
+                doc.Add(NewStringField("all", "all", Field.Store.YES)); //Field.Keyword("all","all"));
+                if (null != data[i])
+                {
+                    doc.Add(NewTextField("data", data[i], Field.Store.YES)); //Field.Text("data",data[i]));
+                }
+                w.AddDocument(doc);
+            }
+
+            r = w.Reader;
+            s = NewSearcher(r);
+            w.Dispose();
+            //System.out.println("Set up " + getName());
+        }
+
+        [OneTimeTearDown]
+        public static void AfterClass()
+        {
+            s = null;
+            r.Dispose();
+            r = null;
+            Index.Dispose();
+            Index = null;
+        }
+
+        public virtual void VerifyNrHits(Query q, int expected)
+        {
+            // bs1
+            ScoreDoc[] h = s.Search(q, null, 1000).ScoreDocs;
+            if (expected != h.Length)
+            {
+                PrintHits(TestName, h, s);
+            }
+            Assert.AreEqual(expected, h.Length, "result count");
+            //System.out.println("TEST: now check");
+            // bs2
+            TopScoreDocCollector collector = TopScoreDocCollector.Create(1000, true);
+            s.Search(q, collector);
+            ScoreDoc[] h2 = collector.GetTopDocs().ScoreDocs;
+            if (expected != h2.Length)
+            {
+                PrintHits(TestName, h2, s);
+            }
+            Assert.AreEqual(expected, h2.Length, "result count (bs2)");
+
+            QueryUtils.Check(Random(), q, s, Similarity);
+        }
+
+        [Test]
+        public virtual void TestAllOptional()
+        {
+            BooleanQuery q = new BooleanQuery();
+            for (int i = 1; i <= 4; i++)
+            {
+                q.Add(new TermQuery(new Term("data", "" + i)), Occur.SHOULD); //false, false);
+            }
+            q.MinimumNumberShouldMatch = 2; // match at least two of 4
+            VerifyNrHits(q, 2);
+        }
+
+        [Test]
+        public virtual void TestOneReqAndSomeOptional()
+        {
+            /* one required, some optional */
+            BooleanQuery q = new BooleanQuery();
+            q.Add(new TermQuery(new Term("all", "all")), Occur.MUST); //true,  false);
+            q.Add(new TermQuery(new Term("data", "5")), Occur.SHOULD); //false, false);
+            q.Add(new TermQuery(new Term("data", "4")), Occur.SHOULD); //false, false);
+            q.Add(new TermQuery(new Term("data", "3")), Occur.SHOULD); //false, false);
+
+            q.MinimumNumberShouldMatch = 2; // 2 of 3 optional
+
+            VerifyNrHits(q, 5);
+        }
+
+        [Test]
+        public virtual void TestSomeReqAndSomeOptional()
+        {
+            /* two required, some optional */
+            BooleanQuery q = new BooleanQuery();
+            q.Add(new TermQuery(new Term("all", "all")), Occur.MUST); //true,  false);
+            q.Add(new TermQuery(new Term("data", "6")), Occur.MUST); //true,  false);
+            q.Add(new TermQuery(new Term("data", "5")), Occur.SHOULD); //false, false);
+            q.Add(new TermQuery(new Term("data", "4")), Occur.SHOULD); //false, false);
+            q.Add(new TermQuery(new Term("data", "3")), Occur.SHOULD); //false, false);
+
+            q.MinimumNumberShouldMatch = 2; // 2 of 3 optional
+
+            VerifyNrHits(q, 5);
+        }
+
+        [Test]
+        public virtual void TestOneProhibAndSomeOptional()
+        {
+            /* one prohibited, some optional */
+            BooleanQuery q = new BooleanQuery();
+            q.Add(new TermQuery(new Term("data", "1")), Occur.SHOULD); //false, false);
+            q.Add(new TermQuery(new Term("data", "2")), Occur.SHOULD); //false, false);
+            q.Add(new TermQuery(new Term("data", "3")), Occur.MUST_NOT); //false, true );
+            q.Add(new TermQuery(new Term("data", "4")), Occur.SHOULD); //false, false);
+
+            q.MinimumNumberShouldMatch = 2; // 2 of 3 optional
+
+            VerifyNrHits(q, 1);
+        }
+
+        [Test]
+        public virtual void TestSomeProhibAndSomeOptional()
+        {
+            /* two prohibited, some optional */
+            BooleanQuery q = new BooleanQuery();
+            q.Add(new TermQuery(new Term("data", "1")), Occur.SHOULD); //false, false);
+            q.Add(new TermQuery(new Term("data", "2")), Occur.SHOULD); //false, false);
+            q.Add(new TermQuery(new Term("data", "3")), Occur.MUST_NOT); //false, true );
+            q.Add(new TermQuery(new Term("data", "4")), Occur.SHOULD); //false, false);
+            q.Add(new TermQuery(new Term("data", "C")), Occur.MUST_NOT); //false, true );
+
+            q.MinimumNumberShouldMatch = 2; // 2 of 3 optional
+
+            VerifyNrHits(q, 1);
+        }
+
+        [Test]
+        public virtual void TestOneReqOneProhibAndSomeOptional()
+        {
+            /* one required, one prohibited, some optional */
+            BooleanQuery q = new BooleanQuery();
+            q.Add(new TermQuery(new Term("data", "6")), Occur.MUST); // true,  false);
+            q.Add(new TermQuery(new Term("data", "5")), Occur.SHOULD); //false, false);
+            q.Add(new TermQuery(new Term("data", "4")), Occur.SHOULD); //false, false);
+            q.Add(new TermQuery(new Term("data", "3")), Occur.MUST_NOT); //false, true );
+            q.Add(new TermQuery(new Term("data", "2")), Occur.SHOULD); //false, false);
+            q.Add(new TermQuery(new Term("data", "1")), Occur.SHOULD); //false, false);
+
+            q.MinimumNumberShouldMatch = 3; // 3 of 4 optional
+
+            VerifyNrHits(q, 1);
+        }
+
+        [Test]
+        public virtual void TestSomeReqOneProhibAndSomeOptional()
+        {
+            /* two required, one prohibited, some optional */
+            BooleanQuery q = new BooleanQuery();
+            q.Add(new TermQuery(new Term("all", "all")), Occur.MUST); //true,  false);
+            q.Add(new TermQuery(new Term("data", "6")), Occur.MUST); //true,  false);
+            q.Add(new TermQuery(new Term("data", "5")), Occur.SHOULD); //false, false);
+            q.Add(new TermQuery(new Term("data", "4")), Occur.SHOULD); //false, false);
+            q.Add(new TermQuery(new Term("data", "3")), Occur.MUST_NOT); //false, true );
+            q.Add(new TermQuery(new Term("data", "2")), Occur.SHOULD); //false, false);
+            q.Add(new TermQuery(new Term("data", "1")), Occur.SHOULD); //false, false);
+
+            q.MinimumNumberShouldMatch = 3; // 3 of 4 optional
+
+            VerifyNrHits(q, 1);
+        }
+
+        [Test]
+        public virtual void TestOneReqSomeProhibAndSomeOptional()
+        {
+            /* one required, two prohibited, some optional */
+            BooleanQuery q = new BooleanQuery();
+            q.Add(new TermQuery(new Term("data", "6")), Occur.MUST); //true,  false);
+            q.Add(new TermQuery(new Term("data", "5")), Occur.SHOULD); //false, false);
+            q.Add(new TermQuery(new Term("data", "4")), Occur.SHOULD); //false, false);
+            q.Add(new TermQuery(new Term("data", "3")), Occur.MUST_NOT); //false, true );
+            q.Add(new TermQuery(new Term("data", "2")), Occur.SHOULD); //false, false);
+            q.Add(new TermQuery(new Term("data", "1")), Occur.SHOULD); //false, false);
+            q.Add(new TermQuery(new Term("data", "C")), Occur.MUST_NOT); //false, true );
+
+            q.MinimumNumberShouldMatch = 3; // 3 of 4 optional
+
+            VerifyNrHits(q, 1);
+        }
+
+        [Test]
+        public virtual void TestSomeReqSomeProhibAndSomeOptional()
+        {
+            /* two required, two prohibited, some optional */
+            BooleanQuery q = new BooleanQuery();
+            q.Add(new TermQuery(new Term("all", "all")), Occur.MUST); //true,  false);
+            q.Add(new TermQuery(new Term("data", "6")), Occur.MUST); //true,  false);
+            q.Add(new TermQuery(new Term("data", "5")), Occur.SHOULD); //false, false);
+            q.Add(new TermQuery(new Term("data", "4")), Occur.SHOULD); //false, false);
+            q.Add(new TermQuery(new Term("data", "3")), Occur.MUST_NOT); //false, true );
+            q.Add(new TermQuery(new Term("data", "2")), Occur.SHOULD); //false, false);
+            q.Add(new TermQuery(new Term("data", "1")), Occur.SHOULD); //false, false);
+            q.Add(new TermQuery(new Term("data", "C")), Occur.MUST_NOT); //false, true );
+
+            q.MinimumNumberShouldMatch = 3; // 3 of 4 optional
+
+            VerifyNrHits(q, 1);
+        }
+
+        [Test]
+        public virtual void TestMinHigherThenNumOptional()
+        {
+            /* two required, two prohibited, some optional */
+            BooleanQuery q = new BooleanQuery();
+            q.Add(new TermQuery(new Term("all", "all")), Occur.MUST); //true,  false);
+            q.Add(new TermQuery(new Term("data", "6")), Occur.MUST); //true,  false);
+            q.Add(new TermQuery(new Term("data", "5")), Occur.SHOULD); //false, false);
+            q.Add(new TermQuery(new Term("data", "4")), Occur.SHOULD); //false, false);
+            q.Add(new TermQuery(new Term("data", "3")), Occur.MUST_NOT); //false, true );
+            q.Add(new TermQuery(new Term("data", "2")), Occur.SHOULD); //false, false);
+            q.Add(new TermQuery(new Term("data", "1")), Occur.SHOULD); //false, false);
+            q.Add(new TermQuery(new Term("data", "C")), Occur.MUST_NOT); //false, true );
+
+            q.MinimumNumberShouldMatch = 90; // 90 of 4 optional ?!?!?!
+
+            VerifyNrHits(q, 0);
+        }
+
+        [Test]
+        public virtual void TestMinEqualToNumOptional()
+        {
+            /* two required, two optional */
+            BooleanQuery q = new BooleanQuery();
+            q.Add(new TermQuery(new Term("all", "all")), Occur.SHOULD); //false, false);
+            q.Add(new TermQuery(new Term("data", "6")), Occur.MUST); //true,  false);
+            q.Add(new TermQuery(new Term("data", "3")), Occur.MUST); //true,  false);
+            q.Add(new TermQuery(new Term("data", "2")), Occur.SHOULD); //false, false);
+
+            q.MinimumNumberShouldMatch = 2; // 2 of 2 optional
+
+            VerifyNrHits(q, 1);
+        }
+
+        [Test]
+        public virtual void TestOneOptionalEqualToMin()
+        {
+            /* two required, one optional */
+            BooleanQuery q = new BooleanQuery();
+            q.Add(new TermQuery(new Term("all", "all")), Occur.MUST); //true,  false);
+            q.Add(new TermQuery(new Term("data", "3")), Occur.SHOULD); //false, false);
+            q.Add(new TermQuery(new Term("data", "2")), Occur.MUST); //true,  false);
+
+            q.MinimumNumberShouldMatch = 1; // 1 of 1 optional
+
+            VerifyNrHits(q, 1);
+        }
+
+        [Test]
+        public virtual void TestNoOptionalButMin()
+        {
+            /* two required, no optional */
+            BooleanQuery q = new BooleanQuery();
+            q.Add(new TermQuery(new Term("all", "all")), Occur.MUST); //true,  false);
+            q.Add(new TermQuery(new Term("data", "2")), Occur.MUST); //true,  false);
+
+            q.MinimumNumberShouldMatch = 1; // 1 of 0 optional
+
+            VerifyNrHits(q, 0);
+        }
+
+        [Test]
+        public virtual void TestNoOptionalButMin2()
+        {
+            /* one required, no optional */
+            BooleanQuery q = new BooleanQuery();
+            q.Add(new TermQuery(new Term("all", "all")), Occur.MUST); //true,  false);
+
+            q.MinimumNumberShouldMatch = 1; // 1 of 0 optional
+
+            VerifyNrHits(q, 0);
+        }
+
+        [Test]
+        public virtual void TestRandomQueries()
+        {
+            const string field = "data";
+            string[] vals = new string[] { "1", "2", "3", "4", "5", "6", "A", "Z", "B", "Y", "Z", "X", "foo" };
+            int maxLev = 4;
+
+            // callback object to set a random setMinimumNumberShouldMatch
+            TestBoolean2.Callback minNrCB = new CallbackAnonymousInnerClassHelper(this, field, vals);
+
+            // increase number of iterations for more complete testing
+            int num = AtLeast(20);
+            for (int i = 0; i < num; i++)
+            {
+                int lev = Random().Next(maxLev);
+                int seed = Random().Next();
+                BooleanQuery q1 = TestBoolean2.RandBoolQuery(new Random(seed), true, lev, field, vals, null);
+                // BooleanQuery q2 = TestBoolean2.randBoolQuery(new Random(seed), lev, field, vals, minNrCB);
+                BooleanQuery q2 = TestBoolean2.RandBoolQuery(new Random(seed), true, lev, field, vals, null);
+                // only set minimumNumberShouldMatch on the top level query since setting
+                // at a lower level can change the score.
+                minNrCB.PostCreate(q2);
+
+                // Can't use Hits because normalized scores will mess things
+                // up.  The non-sorting version of search() that returns TopDocs
+                // will not normalize scores.
+                TopDocs top1 = s.Search(q1, null, 100);
+                TopDocs top2 = s.Search(q2, null, 100);
+                if (i < 100)
+                {
+                    QueryUtils.Check(Random(), q1, s, Similarity);
+                    QueryUtils.Check(Random(), q2, s, Similarity);
+                }
+                AssertSubsetOfSameScores(q2, top1, top2);
+            }
+            // System.out.println("Total hits:"+tot);
+        }
+
+        private class CallbackAnonymousInnerClassHelper : TestBoolean2.Callback
+        {
+            private readonly TestBooleanMinShouldMatch OuterInstance;
+
+            private string Field;
+            private string[] Vals;
+
+            public CallbackAnonymousInnerClassHelper(TestBooleanMinShouldMatch outerInstance, string field, string[] vals)
+            {
+                this.OuterInstance = outerInstance;
+                this.Field = field;
+                this.Vals = vals;
+            }
+
+            public virtual void PostCreate(BooleanQuery q)
+            {
+                BooleanClause[] c = q.GetClauses();
+                int opt = 0;
+                for (int i = 0; i < c.Length; i++)
+                {
+                    if (c[i].Occur == Occur.SHOULD)
+                    {
+                        opt++;
+                    }
+                }
+                q.MinimumNumberShouldMatch = Random().Next(opt + 2);
+                if (Random().NextBoolean())
+                {
+                    // also add a random negation
+                    Term randomTerm = new Term(Field, Vals[Random().Next(Vals.Length)]);
+                    q.Add(new TermQuery(randomTerm), Occur.MUST_NOT);
+                }
+            }
+        }
+
+        private void AssertSubsetOfSameScores(Query q, TopDocs top1, TopDocs top2)
+        {
+            // The constrained query
+            // should be a subset to the unconstrained query.
+            if (top2.TotalHits > top1.TotalHits)
+            {
+                Assert.Fail("Constrained results not a subset:\n" + CheckHits.TopdocsString(top1, 0, 0) + CheckHits.TopdocsString(top2, 0, 0) + "for query:" + q.ToString());
+            }
+
+            for (int hit = 0; hit < top2.TotalHits; hit++)
+            {
+                int id = top2.ScoreDocs[hit].Doc;
+                float score = top2.ScoreDocs[hit].Score;
+                bool found = false;
+                // find this doc in other hits
+                for (int other = 0; other < top1.TotalHits; other++)
+                {
+                    if (top1.ScoreDocs[other].Doc == id)
+                    {
+                        found = true;
+                        float otherScore = top1.ScoreDocs[other].Score;
+                        // check if scores match
+                        Assert.AreEqual(score, otherScore, CheckHits.ExplainToleranceDelta(score, otherScore), "Doc " + id + " scores don't match\n" + CheckHits.TopdocsString(top1, 0, 0) + CheckHits.TopdocsString(top2, 0, 0) + "for query:" + q.ToString());
+                    }
+                }
+
+                // check if subset
+                if (!found)
+                {
+                    Assert.Fail("Doc " + id + " not found\n" + CheckHits.TopdocsString(top1, 0, 0) + CheckHits.TopdocsString(top2, 0, 0) + "for query:" + q.ToString());
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestRewriteCoord1()
+        {
+            Similarity oldSimilarity = s.Similarity;
+            try
+            {
+                s.Similarity = new DefaultSimilarityAnonymousInnerClassHelper(this);
+                BooleanQuery q1 = new BooleanQuery();
+                q1.Add(new TermQuery(new Term("data", "1")), Occur.SHOULD);
+                BooleanQuery q2 = new BooleanQuery();
+                q2.Add(new TermQuery(new Term("data", "1")), Occur.SHOULD);
+                q2.MinimumNumberShouldMatch = 1;
+                TopDocs top1 = s.Search(q1, null, 100);
+                TopDocs top2 = s.Search(q2, null, 100);
+                AssertSubsetOfSameScores(q2, top1, top2);
+            }
+            finally
+            {
+                s.Similarity = oldSimilarity;
+            }
+        }
+
+        private class DefaultSimilarityAnonymousInnerClassHelper : DefaultSimilarity
+        {
+            private readonly TestBooleanMinShouldMatch OuterInstance;
+
+            public DefaultSimilarityAnonymousInnerClassHelper(TestBooleanMinShouldMatch outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            public override float Coord(int overlap, int maxOverlap)
+            {
+                return overlap / ((float)maxOverlap + 1);
+            }
+        }
+
+        [Test]
+        public virtual void TestRewriteNegate()
+        {
+            Similarity oldSimilarity = s.Similarity;
+            try
+            {
+                s.Similarity = new DefaultSimilarityAnonymousInnerClassHelper2(this);
+                BooleanQuery q1 = new BooleanQuery();
+                q1.Add(new TermQuery(new Term("data", "1")), Occur.SHOULD);
+                BooleanQuery q2 = new BooleanQuery();
+                q2.Add(new TermQuery(new Term("data", "1")), Occur.SHOULD);
+                q2.Add(new TermQuery(new Term("data", "Z")), Occur.MUST_NOT);
+                TopDocs top1 = s.Search(q1, null, 100);
+                TopDocs top2 = s.Search(q2, null, 100);
+                AssertSubsetOfSameScores(q2, top1, top2);
+            }
+            finally
+            {
+                s.Similarity = oldSimilarity;
+            }
+        }
+
+        private class DefaultSimilarityAnonymousInnerClassHelper2 : DefaultSimilarity
+        {
+            private readonly TestBooleanMinShouldMatch OuterInstance;
+
+            public DefaultSimilarityAnonymousInnerClassHelper2(TestBooleanMinShouldMatch outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            public override float Coord(int overlap, int maxOverlap)
+            {
+                return overlap / ((float)maxOverlap + 1);
+            }
+        }
+
+        protected internal virtual void PrintHits(string test, ScoreDoc[] h, IndexSearcher searcher)
+        {
+            Console.Error.WriteLine("------- " + test + " -------");
+
+            NumberFormatInfo f = new NumberFormatInfo();
+            f.NumberDecimalSeparator = ".";
+
+            //DecimalFormat f = new DecimalFormat("0.000000", DecimalFormatSymbols.getInstance(Locale.ROOT));
+
+            for (int i = 0; i < h.Length; i++)
+            {
+                Document d = searcher.Doc(h[i].Doc);
+                decimal score = (decimal)h[i].Score;
+                Console.Error.WriteLine("#" + i + ": " + score.ToString(f) + " - " + d.Get("id") + " - " + d.Get("data"));
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestBooleanOr.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestBooleanOr.cs b/src/Lucene.Net.Tests/Search/TestBooleanOr.cs
new file mode 100644
index 0000000..ba4f218
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestBooleanOr.cs
@@ -0,0 +1,253 @@
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using FixedBitSet = Lucene.Net.Util.FixedBitSet;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Term = Lucene.Net.Index.Term;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using TextField = TextField;
+
+    [TestFixture]
+    public class TestBooleanOr : LuceneTestCase
+    {
+        private static string FIELD_T = "T";
+        private static string FIELD_C = "C";
+
+        private TermQuery T1 = new TermQuery(new Term(FIELD_T, "files"));
+        private TermQuery T2 = new TermQuery(new Term(FIELD_T, "deleting"));
+        private TermQuery C1 = new TermQuery(new Term(FIELD_C, "production"));
+        private TermQuery C2 = new TermQuery(new Term(FIELD_C, "optimize"));
+
+        private IndexSearcher Searcher = null;
+        private Directory Dir;
+        private IndexReader Reader;
+
+        private int Search(Query q)
+        {
+            QueryUtils.Check(Random(), q, Searcher, Similarity);
+            return Searcher.Search(q, null, 1000).TotalHits;
+        }
+
+        [Test]
+        public virtual void TestElements()
+        {
+            Assert.AreEqual(1, Search(T1));
+            Assert.AreEqual(1, Search(T2));
+            Assert.AreEqual(1, Search(C1));
+            Assert.AreEqual(1, Search(C2));
+        }
+
+        /// <summary>
+        /// <code>T:files T:deleting C:production C:optimize </code>
+        /// it works.
+        /// </summary>
+        [Test]
+        public virtual void TestFlat()
+        {
+            BooleanQuery q = new BooleanQuery();
+            q.Add(new BooleanClause(T1, Occur.SHOULD));
+            q.Add(new BooleanClause(T2, Occur.SHOULD));
+            q.Add(new BooleanClause(C1, Occur.SHOULD));
+            q.Add(new BooleanClause(C2, Occur.SHOULD));
+            Assert.AreEqual(1, Search(q));
+        }
+
+        /// <summary>
+        /// <code>(T:files T:deleting) (+C:production +C:optimize)</code>
+        /// it works.
+        /// </summary>
+        [Test]
+        public virtual void TestParenthesisMust()
+        {
+            BooleanQuery q3 = new BooleanQuery();
+            q3.Add(new BooleanClause(T1, Occur.SHOULD));
+            q3.Add(new BooleanClause(T2, Occur.SHOULD));
+            BooleanQuery q4 = new BooleanQuery();
+            q4.Add(new BooleanClause(C1, Occur.MUST));
+            q4.Add(new BooleanClause(C2, Occur.MUST));
+            BooleanQuery q2 = new BooleanQuery();
+            q2.Add(q3, Occur.SHOULD);
+            q2.Add(q4, Occur.SHOULD);
+            Assert.AreEqual(1, Search(q2));
+        }
+
+        /// <summary>
+        /// <code>(T:files T:deleting) +(C:production C:optimize)</code>
+        /// not working. results NO HIT.
+        /// </summary>
+        [Test]
+        public virtual void TestParenthesisMust2()
+        {
+            BooleanQuery q3 = new BooleanQuery();
+            q3.Add(new BooleanClause(T1, Occur.SHOULD));
+            q3.Add(new BooleanClause(T2, Occur.SHOULD));
+            BooleanQuery q4 = new BooleanQuery();
+            q4.Add(new BooleanClause(C1, Occur.SHOULD));
+            q4.Add(new BooleanClause(C2, Occur.SHOULD));
+            BooleanQuery q2 = new BooleanQuery();
+            q2.Add(q3, Occur.SHOULD);
+            q2.Add(q4, Occur.MUST);
+            Assert.AreEqual(1, Search(q2));
+        }
+
+        /// <summary>
+        /// <code>(T:files T:deleting) (C:production C:optimize)</code>
+        /// not working. results NO HIT.
+        /// </summary>
+        [Test]
+        public virtual void TestParenthesisShould()
+        {
+            BooleanQuery q3 = new BooleanQuery();
+            q3.Add(new BooleanClause(T1, Occur.SHOULD));
+            q3.Add(new BooleanClause(T2, Occur.SHOULD));
+            BooleanQuery q4 = new BooleanQuery();
+            q4.Add(new BooleanClause(C1, Occur.SHOULD));
+            q4.Add(new BooleanClause(C2, Occur.SHOULD));
+            BooleanQuery q2 = new BooleanQuery();
+            q2.Add(q3, Occur.SHOULD);
+            q2.Add(q4, Occur.SHOULD);
+            Assert.AreEqual(1, Search(q2));
+        }
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+
+            //
+            Dir = NewDirectory();
+
+            //
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), Dir, Similarity, TimeZone);
+
+            //
+            Document d = new Document();
+            d.Add(NewField(FIELD_T, "Optimize not deleting all files", TextField.TYPE_STORED));
+            d.Add(NewField(FIELD_C, "Deleted When I run an optimize in our production environment.", TextField.TYPE_STORED));
+
+            //
+            writer.AddDocument(d);
+
+            Reader = writer.Reader;
+            //
+            Searcher = NewSearcher(Reader);
+            writer.Dispose();
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            Reader.Dispose();
+            Dir.Dispose();
+            base.TearDown();
+        }
+
+        [Test]
+        public virtual void TestBooleanScorerMax()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter riw = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+
+            int docCount = AtLeast(10000);
+
+            for (int i = 0; i < docCount; i++)
+            {
+                Document doc = new Document();
+                doc.Add(NewField("field", "a", TextField.TYPE_NOT_STORED));
+                riw.AddDocument(doc);
+            }
+
+            riw.ForceMerge(1);
+            IndexReader r = riw.Reader;
+            riw.Dispose();
+
+            IndexSearcher s = NewSearcher(r);
+            BooleanQuery bq = new BooleanQuery();
+            bq.Add(new TermQuery(new Term("field", "a")), Occur.SHOULD);
+            bq.Add(new TermQuery(new Term("field", "a")), Occur.SHOULD);
+
+            Weight w = s.CreateNormalizedWeight(bq);
+
+            Assert.AreEqual(1, s.IndexReader.Leaves.Count);
+            BulkScorer scorer = w.GetBulkScorer(s.IndexReader.Leaves[0], false, null);
+
+            FixedBitSet hits = new FixedBitSet(docCount);
+            AtomicInt32 end = new AtomicInt32();
+            ICollector c = new CollectorAnonymousInnerClassHelper(this, scorer, hits, end);
+
+            while (end.Get() < docCount)
+            {
+                int inc = TestUtil.NextInt(Random(), 1, 1000);
+                end.AddAndGet(inc);
+                scorer.Score(c, end.Get());
+            }
+
+            Assert.AreEqual(docCount, hits.Cardinality());
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        private class CollectorAnonymousInnerClassHelper : ICollector
+        {
+            private readonly TestBooleanOr OuterInstance;
+
+            private BulkScorer scorer;
+            private FixedBitSet Hits;
+            private AtomicInt32 End;
+
+            public CollectorAnonymousInnerClassHelper(TestBooleanOr outerInstance, BulkScorer scorer, FixedBitSet hits, AtomicInt32 end)
+            {
+                this.OuterInstance = outerInstance;
+                this.scorer = scorer;
+                this.Hits = hits;
+                this.End = end;
+            }
+
+            public virtual void SetNextReader(AtomicReaderContext context)
+            {
+            }
+
+            public virtual void Collect(int doc)
+            {
+                Assert.IsTrue(doc < End.Get(), "collected doc=" + doc + " beyond max=" + End);
+                Hits.Set(doc);
+            }
+
+            public virtual void SetScorer(Scorer scorer)
+            {
+            }
+
+            public virtual bool AcceptsDocsOutOfOrder
+            {
+                get { return true; }
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestBooleanQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestBooleanQuery.cs b/src/Lucene.Net.Tests/Search/TestBooleanQuery.cs
new file mode 100644
index 0000000..4e8d377
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestBooleanQuery.cs
@@ -0,0 +1,412 @@
+using System;
+using System.Collections.Generic;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using Index;
+    using NUnit.Framework;
+    using Support;
+    using System.Threading.Tasks;
+    using Util;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Analyzer = Lucene.Net.Analysis.Analyzer;
+    using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
+    using DefaultSimilarity = Lucene.Net.Search.Similarities.DefaultSimilarity;
+    using Directory = Lucene.Net.Store.Directory;
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using IndexWriter = Lucene.Net.Index.IndexWriter;
+    using IndexWriterConfig = Lucene.Net.Index.IndexWriterConfig;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using SpanQuery = Lucene.Net.Search.Spans.SpanQuery;
+    using SpanTermQuery = Lucene.Net.Search.Spans.SpanTermQuery;
+    using Term = Lucene.Net.Index.Term;
+    using TextField = TextField;
+
+    [TestFixture]
+    public class TestBooleanQuery : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestEquality()
+        {
+            BooleanQuery bq1 = new BooleanQuery();
+            bq1.Add(new TermQuery(new Term("field", "value1")), Occur.SHOULD);
+            bq1.Add(new TermQuery(new Term("field", "value2")), Occur.SHOULD);
+            BooleanQuery nested1 = new BooleanQuery();
+            nested1.Add(new TermQuery(new Term("field", "nestedvalue1")), Occur.SHOULD);
+            nested1.Add(new TermQuery(new Term("field", "nestedvalue2")), Occur.SHOULD);
+            bq1.Add(nested1, Occur.SHOULD);
+
+            BooleanQuery bq2 = new BooleanQuery();
+            bq2.Add(new TermQuery(new Term("field", "value1")), Occur.SHOULD);
+            bq2.Add(new TermQuery(new Term("field", "value2")), Occur.SHOULD);
+            BooleanQuery nested2 = new BooleanQuery();
+            nested2.Add(new TermQuery(new Term("field", "nestedvalue1")), Occur.SHOULD);
+            nested2.Add(new TermQuery(new Term("field", "nestedvalue2")), Occur.SHOULD);
+            bq2.Add(nested2, Occur.SHOULD);
+
+            Assert.IsTrue(bq1.Equals(bq2));
+            //Assert.AreEqual(bq1, bq2);
+        }
+
+        [Test]
+        public virtual void TestException()
+        {
+            try
+            {
+                BooleanQuery.MaxClauseCount = 0;
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException e)
+#pragma warning restore 168
+            {
+                // okay
+            }
+        }
+
+        // LUCENE-1630
+        [Test]
+        public virtual void TestNullOrSubScorer()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            Document doc = new Document();
+            doc.Add(NewTextField("field", "a b c d", Field.Store.NO));
+            w.AddDocument(doc);
+
+            IndexReader r = w.Reader;
+            IndexSearcher s = NewSearcher(r);
+            // this test relies upon coord being the default implementation,
+            // otherwise scores are different!
+            s.Similarity = new DefaultSimilarity();
+
+            BooleanQuery q = new BooleanQuery();
+            q.Add(new TermQuery(new Term("field", "a")), Occur.SHOULD);
+
+            // LUCENE-2617: make sure that a term not in the index still contributes to the score via coord factor
+            float score = s.Search(q, 10).MaxScore;
+            Query subQuery = new TermQuery(new Term("field", "not_in_index"));
+            subQuery.Boost = 0;
+            q.Add(subQuery, Occur.SHOULD);
+            float score2 = s.Search(q, 10).MaxScore;
+            Assert.AreEqual(score * .5F, score2, 1e-6);
+
+            // LUCENE-2617: make sure that a clause not in the index still contributes to the score via coord factor
+            BooleanQuery qq = (BooleanQuery)q.Clone();
+            PhraseQuery phrase = new PhraseQuery();
+            phrase.Add(new Term("field", "not_in_index"));
+            phrase.Add(new Term("field", "another_not_in_index"));
+            phrase.Boost = 0;
+            qq.Add(phrase, Occur.SHOULD);
+            score2 = s.Search(qq, 10).MaxScore;
+            Assert.AreEqual(score * (1 / 3F), score2, 1e-6);
+
+            // now test BooleanScorer2
+            subQuery = new TermQuery(new Term("field", "b"));
+            subQuery.Boost = 0;
+            q.Add(subQuery, Occur.MUST);
+            score2 = s.Search(q, 10).MaxScore;
+            Assert.AreEqual(score * (2 / 3F), score2, 1e-6);
+
+            // PhraseQuery w/ no terms added returns a null scorer
+            PhraseQuery pq = new PhraseQuery();
+            q.Add(pq, Occur.SHOULD);
+            Assert.AreEqual(1, s.Search(q, 10).TotalHits);
+
+            // A required clause which returns null scorer should return null scorer to
+            // IndexSearcher.
+            q = new BooleanQuery();
+            pq = new PhraseQuery();
+            q.Add(new TermQuery(new Term("field", "a")), Occur.SHOULD);
+            q.Add(pq, Occur.MUST);
+            Assert.AreEqual(0, s.Search(q, 10).TotalHits);
+
+            DisjunctionMaxQuery dmq = new DisjunctionMaxQuery(1.0f);
+            dmq.Add(new TermQuery(new Term("field", "a")));
+            dmq.Add(pq);
+            Assert.AreEqual(1, s.Search(dmq, 10).TotalHits);
+
+            r.Dispose();
+            w.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestDeMorgan()
+        {
+            Directory dir1 = NewDirectory();
+            RandomIndexWriter iw1 = new RandomIndexWriter(Random(), dir1, Similarity, TimeZone);
+            Document doc1 = new Document();
+            doc1.Add(NewTextField("field", "foo bar", Field.Store.NO));
+            iw1.AddDocument(doc1);
+            IndexReader reader1 = iw1.Reader;
+            iw1.Dispose();
+
+            Directory dir2 = NewDirectory();
+            RandomIndexWriter iw2 = new RandomIndexWriter(Random(), dir2, Similarity, TimeZone);
+            Document doc2 = new Document();
+            doc2.Add(NewTextField("field", "foo baz", Field.Store.NO));
+            iw2.AddDocument(doc2);
+            IndexReader reader2 = iw2.Reader;
+            iw2.Dispose();
+
+            BooleanQuery query = new BooleanQuery(); // Query: +foo -ba*
+            query.Add(new TermQuery(new Term("field", "foo")), Occur.MUST);
+            WildcardQuery wildcardQuery = new WildcardQuery(new Term("field", "ba*"));
+            wildcardQuery.MultiTermRewriteMethod = (MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
+            query.Add(wildcardQuery, Occur.MUST_NOT);
+
+            MultiReader multireader = new MultiReader(reader1, reader2);
+            IndexSearcher searcher = NewSearcher(multireader);
+            Assert.AreEqual(0, searcher.Search(query, 10).TotalHits);
+
+
+            Task foo = new Task(TestDeMorgan);
+
+            TaskScheduler es = TaskScheduler.Default;
+            searcher = new IndexSearcher(multireader, es);
+            if (VERBOSE)
+            {
+                Console.WriteLine("rewritten form: " + searcher.Rewrite(query));
+            }
+            Assert.AreEqual(0, searcher.Search(query, 10).TotalHits);
+
+            multireader.Dispose();
+            reader1.Dispose();
+            reader2.Dispose();
+            dir1.Dispose();
+            dir2.Dispose();
+        }
+
+        [Test]
+        public virtual void TestBS2DisjunctionNextVsAdvance()
+        {
+            Directory d = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), d, Similarity, TimeZone);
+            int numDocs = AtLeast(300);
+            for (int docUpto = 0; docUpto < numDocs; docUpto++)
+            {
+                string contents = "a";
+                if (Random().Next(20) <= 16)
+                {
+                    contents += " b";
+                }
+                if (Random().Next(20) <= 8)
+                {
+                    contents += " c";
+                }
+                if (Random().Next(20) <= 4)
+                {
+                    contents += " d";
+                }
+                if (Random().Next(20) <= 2)
+                {
+                    contents += " e";
+                }
+                if (Random().Next(20) <= 1)
+                {
+                    contents += " f";
+                }
+                Document doc = new Document();
+                doc.Add(new TextField("field", contents, Field.Store.NO));
+                w.AddDocument(doc);
+            }
+            w.ForceMerge(1);
+            IndexReader r = w.Reader;
+            IndexSearcher s = NewSearcher(r);
+            w.Dispose();
+
+            for (int iter = 0; iter < 10 * RANDOM_MULTIPLIER; iter++)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("iter=" + iter);
+                }
+                IList<string> terms = new List<string>(Arrays.AsList("a", "b", "c", "d", "e", "f"));
+                int numTerms = TestUtil.NextInt(Random(), 1, terms.Count);
+                while (terms.Count > numTerms)
+                {
+                    terms.RemoveAt(Random().Next(terms.Count));
+                }
+
+                if (VERBOSE)
+                {
+                    Console.WriteLine("  terms=" + terms);
+                }
+
+                BooleanQuery q = new BooleanQuery();
+                foreach (string term in terms)
+                {
+                    q.Add(new BooleanClause(new TermQuery(new Term("field", term)), Occur.SHOULD));
+                }
+
+                Weight weight = s.CreateNormalizedWeight(q);
+
+                Scorer scorer = weight.GetScorer(s.m_leafContexts[0], null);
+
+                // First pass: just use .NextDoc() to gather all hits
+                IList<ScoreDoc> hits = new List<ScoreDoc>();
+                while (scorer.NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
+                {
+                    hits.Add(new ScoreDoc(scorer.DocID, scorer.GetScore()));
+                }
+
+                if (VERBOSE)
+                {
+                    Console.WriteLine("  " + hits.Count + " hits");
+                }
+
+                // Now, randomly next/advance through the list and
+                // verify exact match:
+                for (int iter2 = 0; iter2 < 10; iter2++)
+                {
+                    weight = s.CreateNormalizedWeight(q);
+                    scorer = weight.GetScorer(s.m_leafContexts[0], null);
+
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("  iter2=" + iter2);
+                    }
+
+                    int upto = -1;
+                    while (upto < hits.Count)
+                    {
+                        int nextUpto;
+                        int nextDoc;
+                        int left = hits.Count - upto;
+                        if (left == 1 || Random().nextBoolean())
+                        {
+                            // next
+                            nextUpto = 1 + upto;
+                            nextDoc = scorer.NextDoc();
+                        }
+                        else
+                        {
+                            // advance
+                            int inc = TestUtil.NextInt(Random(), 1, left - 1);
+                            nextUpto = inc + upto;
+                            nextDoc = scorer.Advance(hits[nextUpto].Doc);
+                        }
+
+                        if (nextUpto == hits.Count)
+                        {
+                            Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, nextDoc);
+                        }
+                        else
+                        {
+                            ScoreDoc hit = hits[nextUpto];
+                            Assert.AreEqual(hit.Doc, nextDoc);
+                            // Test for precise float equality:
+                            Assert.IsTrue(hit.Score == scorer.GetScore(), "doc " + hit.Doc + " has wrong score: expected=" + hit.Score + " actual=" + scorer.GetScore());
+                        }
+                        upto = nextUpto;
+                    }
+                }
+            }
+
+            r.Dispose();
+            d.Dispose();
+        }
+
+        // LUCENE-4477 / LUCENE-4401:
+        [Test]
+        public virtual void TestBooleanSpanQuery()
+        {
+            bool failed = false;
+            int hits = 0;
+            Directory directory = NewDirectory();
+            Analyzer indexerAnalyzer = new MockAnalyzer(Random());
+
+            IndexWriterConfig config = new IndexWriterConfig(TEST_VERSION_CURRENT, indexerAnalyzer);
+            IndexWriter writer = new IndexWriter(directory, config);
+            string FIELD = "content";
+            Document d = new Document();
+            d.Add(new TextField(FIELD, "clockwork orange", Field.Store.YES));
+            writer.AddDocument(d);
+            writer.Dispose();
+
+            IndexReader indexReader = DirectoryReader.Open(directory);
+            IndexSearcher searcher = NewSearcher(indexReader);
+
+            BooleanQuery query = new BooleanQuery();
+            SpanQuery sq1 = new SpanTermQuery(new Term(FIELD, "clockwork"));
+            SpanQuery sq2 = new SpanTermQuery(new Term(FIELD, "clckwork"));
+            query.Add(sq1, Occur.SHOULD);
+            query.Add(sq2, Occur.SHOULD);
+            TopScoreDocCollector collector = TopScoreDocCollector.Create(1000, true);
+            searcher.Search(query, collector);
+            hits = collector.GetTopDocs().ScoreDocs.Length;
+            foreach (ScoreDoc scoreDoc in collector.GetTopDocs().ScoreDocs)
+            {
+                Console.WriteLine(scoreDoc.Doc);
+            }
+            indexReader.Dispose();
+            Assert.AreEqual(failed, false, "Bug in boolean query composed of span queries");
+            Assert.AreEqual(hits, 1, "Bug in boolean query composed of span queries");
+            directory.Dispose();
+        }
+
+        // LUCENE-5487
+        [Test]
+        public virtual void TestInOrderWithMinShouldMatch()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            Document doc = new Document();
+            doc.Add(NewTextField("field", "some text here", Field.Store.NO));
+            w.AddDocument(doc);
+            IndexReader r = w.Reader;
+            w.Dispose();
+            IndexSearcher s = new IndexSearcherAnonymousInnerClassHelper(this, r);
+            BooleanQuery bq = new BooleanQuery();
+            bq.Add(new TermQuery(new Term("field", "some")), Occur.SHOULD);
+            bq.Add(new TermQuery(new Term("field", "text")), Occur.SHOULD);
+            bq.Add(new TermQuery(new Term("field", "here")), Occur.SHOULD);
+            bq.MinimumNumberShouldMatch = 2;
+            s.Search(bq, 10);
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        private class IndexSearcherAnonymousInnerClassHelper : IndexSearcher
+        {
+            private readonly TestBooleanQuery OuterInstance;
+
+            public IndexSearcherAnonymousInnerClassHelper(TestBooleanQuery outerInstance, IndexReader r)
+                : base(r)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            protected override void Search(IList<AtomicReaderContext> leaves, Weight weight, ICollector collector)
+            {
+                Assert.AreEqual(-1, collector.GetType().Name.IndexOf("OutOfOrder"));
+                base.Search(leaves, weight, collector);
+            }
+        }
+    }
+}
+ 
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestBooleanQueryVisitSubscorers.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestBooleanQueryVisitSubscorers.cs b/src/Lucene.Net.Tests/Search/TestBooleanQueryVisitSubscorers.cs
new file mode 100644
index 0000000..6a83426
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestBooleanQueryVisitSubscorers.cs
@@ -0,0 +1,206 @@
+using System.Collections.Generic;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using NUnit.Framework;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Analyzer = Lucene.Net.Analysis.Analyzer;
+    using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
+    using ChildScorer = Lucene.Net.Search.Scorer.ChildScorer;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using IndexWriterConfig = Lucene.Net.Index.IndexWriterConfig;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Store = Field.Store;
+    using Term = Lucene.Net.Index.Term;
+    using TextField = TextField;
+
+    // TODO: refactor to a base class, that collects freqs from the scorer tree
+    // and test all queries with it
+    [TestFixture]
+    public class TestBooleanQueryVisitSubscorers : LuceneTestCase
+    {
+        internal Analyzer Analyzer;
+        internal IndexReader Reader;
+        internal IndexSearcher Searcher;
+        internal Directory Dir;
+
+        internal const string F1 = "title";
+        internal const string F2 = "body";
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Analyzer = new MockAnalyzer(Random());
+            Dir = NewDirectory();
+            IndexWriterConfig config = NewIndexWriterConfig(TEST_VERSION_CURRENT, Analyzer);
+            config.SetMergePolicy(NewLogMergePolicy()); // we will use docids to validate
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), Dir, config);
+            writer.AddDocument(Doc("lucene", "lucene is a very popular search engine library"));
+            writer.AddDocument(Doc("solr", "solr is a very popular search server and is using lucene"));
+            writer.AddDocument(Doc("nutch", "nutch is an internet search engine with web crawler and is using lucene and hadoop"));
+            Reader = writer.Reader;
+            writer.Dispose();
+            Searcher = NewSearcher(Reader);
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            Reader.Dispose();
+            Dir.Dispose();
+            base.TearDown();
+        }
+
+        [Test]
+        public virtual void TestDisjunctions()
+        {
+            BooleanQuery bq = new BooleanQuery();
+            bq.Add(new TermQuery(new Term(F1, "lucene")), Occur.SHOULD);
+            bq.Add(new TermQuery(new Term(F2, "lucene")), Occur.SHOULD);
+            bq.Add(new TermQuery(new Term(F2, "search")), Occur.SHOULD);
+            IDictionary<int, int> tfs = GetDocCounts(Searcher, bq);
+            Assert.AreEqual(3, tfs.Count); // 3 documents
+            Assert.AreEqual(3, (int)tfs[0]); // f1:lucene + f2:lucene + f2:search
+            Assert.AreEqual(2, (int)tfs[1]); // f2:search + f2:lucene
+            Assert.AreEqual(2, (int)tfs[2]); // f2:search + f2:lucene
+        }
+
+        [Test]
+        public virtual void TestNestedDisjunctions()
+        {
+            BooleanQuery bq = new BooleanQuery();
+            bq.Add(new TermQuery(new Term(F1, "lucene")), Occur.SHOULD);
+            BooleanQuery bq2 = new BooleanQuery();
+            bq2.Add(new TermQuery(new Term(F2, "lucene")), Occur.SHOULD);
+            bq2.Add(new TermQuery(new Term(F2, "search")), Occur.SHOULD);
+            bq.Add(bq2, Occur.SHOULD);
+            IDictionary<int, int> tfs = GetDocCounts(Searcher, bq);
+            Assert.AreEqual(3, tfs.Count); // 3 documents
+            Assert.AreEqual(3, (int)tfs[0]); // f1:lucene + f2:lucene + f2:search
+            Assert.AreEqual(2, (int)tfs[1]); // f2:search + f2:lucene
+            Assert.AreEqual(2, (int)tfs[2]); // f2:search + f2:lucene
+        }
+
+        [Test]
+        public virtual void TestConjunctions()
+        {
+            BooleanQuery bq = new BooleanQuery();
+            bq.Add(new TermQuery(new Term(F2, "lucene")), Occur.MUST);
+            bq.Add(new TermQuery(new Term(F2, "is")), Occur.MUST);
+            IDictionary<int, int> tfs = GetDocCounts(Searcher, bq);
+            Assert.AreEqual(3, tfs.Count); // 3 documents
+            Assert.AreEqual(2, (int)tfs[0]); // f2:lucene + f2:is
+            Assert.AreEqual(3, (int)tfs[1]); // f2:is + f2:is + f2:lucene
+            Assert.AreEqual(3, (int)tfs[2]); // f2:is + f2:is + f2:lucene
+        }
+
+        internal static Document Doc(string v1, string v2)
+        {
+            Document doc = new Document();
+            doc.Add(new TextField(F1, v1, Store.YES));
+            doc.Add(new TextField(F2, v2, Store.YES));
+            return doc;
+        }
+
+        internal static IDictionary<int, int> GetDocCounts(IndexSearcher searcher, Query query)
+        {
+            MyCollector collector = new MyCollector();
+            searcher.Search(query, collector);
+            return collector.DocCounts;
+        }
+
+        internal class MyCollector : ICollector
+        {
+            internal TopDocsCollector<ScoreDoc> Collector;
+            internal int DocBase;
+
+            public readonly IDictionary<int, int> DocCounts = new Dictionary<int, int>();
+            internal readonly HashSet<Scorer> TqsSet = new HashSet<Scorer>();
+
+            internal MyCollector()
+            {
+                Collector = TopScoreDocCollector.Create(10, true);
+            }
+
+            public virtual bool AcceptsDocsOutOfOrder
+            {
+                get { return false; }
+            }
+
+            public virtual void Collect(int doc)
+            {
+                int freq = 0;
+                foreach (Scorer scorer in TqsSet)
+                {
+                    if (doc == scorer.DocID)
+                    {
+                        freq += scorer.Freq;
+                    }
+                }
+                DocCounts[doc + DocBase] = freq;
+                Collector.Collect(doc);
+            }
+
+            public virtual void SetNextReader(AtomicReaderContext context)
+            {
+                this.DocBase = context.DocBase;
+                Collector.SetNextReader(context);
+            }
+
+            public virtual void SetScorer(Scorer scorer)
+            {
+                Collector.SetScorer(scorer);
+                TqsSet.Clear();
+                FillLeaves(scorer, TqsSet);
+            }
+
+            internal virtual void FillLeaves(Scorer scorer, ISet<Scorer> set)
+            {
+                if (scorer.Weight.Query is TermQuery)
+                {
+                    set.Add(scorer);
+                }
+                else
+                {
+                    foreach (ChildScorer child in scorer.GetChildren())
+                    {
+                        FillLeaves(child.Child, set);
+                    }
+                }
+            }
+
+            public virtual TopDocs GetTopDocs()
+            {
+                return Collector.GetTopDocs();
+            }
+
+            public virtual int Freq(int doc)
+            {
+                return DocCounts[doc];
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestBooleanScorer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestBooleanScorer.cs b/src/Lucene.Net.Tests/Search/TestBooleanScorer.cs
new file mode 100644
index 0000000..8131ae3
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestBooleanScorer.cs
@@ -0,0 +1,330 @@
+using System;
+using System.Collections.Generic;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using System.Diagnostics;
+    using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
+    using IBits = Lucene.Net.Util.IBits;
+    using BooleanWeight = Lucene.Net.Search.BooleanQuery.BooleanWeight;
+    using Directory = Lucene.Net.Store.Directory;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Term = Lucene.Net.Index.Term;
+    using TextField = TextField;
+
+    [TestFixture]
+    public class TestBooleanScorer : LuceneTestCase
+    {
+        private const string FIELD = "category";
+
+        [Test]
+        public virtual void TestMethod()
+        {
+            Directory directory = NewDirectory();
+
+            string[] values = new string[] { "1", "2", "3", "4" };
+
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, Similarity, TimeZone);
+            for (int i = 0; i < values.Length; i++)
+            {
+                Document doc = new Document();
+                doc.Add(NewStringField(FIELD, values[i], Field.Store.YES));
+                writer.AddDocument(doc);
+            }
+            IndexReader ir = writer.Reader;
+            writer.Dispose();
+
+            BooleanQuery booleanQuery1 = new BooleanQuery();
+            booleanQuery1.Add(new TermQuery(new Term(FIELD, "1")), Occur.SHOULD);
+            booleanQuery1.Add(new TermQuery(new Term(FIELD, "2")), Occur.SHOULD);
+
+            BooleanQuery query = new BooleanQuery();
+            query.Add(booleanQuery1, Occur.MUST);
+            query.Add(new TermQuery(new Term(FIELD, "9")), Occur.MUST_NOT);
+
+            IndexSearcher indexSearcher = NewSearcher(ir);
+            ScoreDoc[] hits = indexSearcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(2, hits.Length, "Number of matched documents");
+            ir.Dispose();
+            directory.Dispose();
+        }
+
+        [Test]
+        public virtual void TestEmptyBucketWithMoreDocs()
+        {
+            // this test checks the logic of nextDoc() when all sub scorers have docs
+            // beyond the first bucket (for example). Currently, the code relies on the
+            // 'more' variable to work properly, and this test ensures that if the logic
+            // changes, we have a test to back it up.
+
+            Directory directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, Similarity, TimeZone);
+            writer.Commit();
+            IndexReader ir = writer.Reader;
+            writer.Dispose();
+            IndexSearcher searcher = NewSearcher(ir);
+            BooleanWeight weight = (BooleanWeight)(new BooleanQuery()).CreateWeight(searcher);
+
+            BulkScorer[] scorers = new BulkScorer[] {
+            new BulkScorerAnonymousInnerClassHelper()
+        };
+
+            BooleanScorer bs = new BooleanScorer(weight, false, 1, Arrays.AsList(scorers), new List<BulkScorer>(), scorers.Length);
+
+            IList<int> hits = new List<int>();
+            bs.Score(new CollectorAnonymousInnerClassHelper(this, hits));
+
+            Assert.AreEqual(1, hits.Count, "should have only 1 hit");
+            Assert.AreEqual(3000, (int)hits[0], "hit should have been docID=3000");
+            ir.Dispose();
+            directory.Dispose();
+        }
+
+        private class BulkScorerAnonymousInnerClassHelper : BulkScorer
+        {
+            private int doc = -1;
+
+            public override bool Score(ICollector c, int maxDoc)
+            {
+                Debug.Assert(doc == -1);
+                doc = 3000;
+                FakeScorer fs = new FakeScorer();
+                fs.doc = doc;
+                fs.score = 1.0f;
+                c.SetScorer(fs);
+                c.Collect(3000);
+                return false;
+            }
+        }
+
+        private class CollectorAnonymousInnerClassHelper : ICollector
+        {
+            private readonly TestBooleanScorer OuterInstance;
+
+            private IList<int> Hits;
+
+            public CollectorAnonymousInnerClassHelper(TestBooleanScorer outerInstance, IList<int> hits)
+            {
+                this.OuterInstance = outerInstance;
+                this.Hits = hits;
+            }
+
+            internal int docBase;
+
+            public virtual void SetScorer(Scorer scorer)
+            {
+            }
+
+            public virtual void Collect(int doc)
+            {
+                Hits.Add(docBase + doc);
+            }
+
+            public virtual void SetNextReader(AtomicReaderContext context)
+            {
+                docBase = context.DocBase;
+            }
+
+            public virtual bool AcceptsDocsOutOfOrder
+            {
+                get { return true; }
+            }
+        }
+
+        [Test]
+        public virtual void TestMoreThan32ProhibitedClauses()
+        {
+            Directory d = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), d, Similarity, TimeZone);
+            Document doc = new Document();
+            doc.Add(new TextField("field", "0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33", Field.Store.NO));
+            w.AddDocument(doc);
+            doc = new Document();
+            doc.Add(new TextField("field", "33", Field.Store.NO));
+            w.AddDocument(doc);
+            IndexReader r = w.Reader;
+            w.Dispose();
+            // we don't wrap with AssertingIndexSearcher in order to have the original scorer in setScorer.
+            IndexSearcher s = NewSearcher(r, true, false);
+
+            BooleanQuery q = new BooleanQuery();
+            for (int term = 0; term < 33; term++)
+            {
+                q.Add(new BooleanClause(new TermQuery(new Term("field", "" + term)), Occur.MUST_NOT));
+            }
+            q.Add(new BooleanClause(new TermQuery(new Term("field", "33")), Occur.SHOULD));
+
+            int[] count = new int[1];
+            s.Search(q, new CollectorAnonymousInnerClassHelper2(this, doc, count));
+
+            Assert.AreEqual(1, count[0]);
+
+            r.Dispose();
+            d.Dispose();
+        }
+
+        private class CollectorAnonymousInnerClassHelper2 : ICollector
+        {
+            private readonly TestBooleanScorer OuterInstance;
+
+            private Document Doc;
+            private int[] Count;
+
+            public CollectorAnonymousInnerClassHelper2(TestBooleanScorer outerInstance, Document doc, int[] count)
+            {
+                this.OuterInstance = outerInstance;
+                this.Doc = doc;
+                this.Count = count;
+            }
+
+            public virtual void SetScorer(Scorer scorer)
+            {
+                // Make sure we got BooleanScorer:
+                Type clazz = scorer.GetType();
+                Assert.AreEqual(typeof(FakeScorer).Name, clazz.Name, "Scorer is implemented by wrong class");
+            }
+
+            public virtual void Collect(int doc)
+            {
+                Count[0]++;
+            }
+
+            public virtual void SetNextReader(AtomicReaderContext context)
+            {
+            }
+
+            public virtual bool AcceptsDocsOutOfOrder
+            {
+                get { return true; }
+            }
+        }
+
+        /// <summary>
+        /// Throws UOE if Weight.scorer is called </summary>
+        private class CrazyMustUseBulkScorerQuery : Query
+        {
+            public override string ToString(string field)
+            {
+                return "MustUseBulkScorerQuery";
+            }
+
+            public override Weight CreateWeight(IndexSearcher searcher)
+            {
+                return new WeightAnonymousInnerClassHelper(this);
+            }
+
+            private class WeightAnonymousInnerClassHelper : Weight
+            {
+                private readonly CrazyMustUseBulkScorerQuery OuterInstance;
+
+                public WeightAnonymousInnerClassHelper(CrazyMustUseBulkScorerQuery outerInstance)
+                {
+                    this.OuterInstance = outerInstance;
+                }
+
+                public override Explanation Explain(AtomicReaderContext context, int doc)
+                {
+                    throw new System.NotSupportedException();
+                }
+
+                public override Query Query
+                {
+                    get
+                    {
+                        return OuterInstance;
+                    }
+                }
+
+                public override float GetValueForNormalization()
+                {
+                    return 1.0f;
+                }
+
+                public override void Normalize(float norm, float topLevelBoost)
+                {
+                }
+
+                public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs)
+                {
+                    throw new System.NotSupportedException();
+                }
+
+                public override BulkScorer GetBulkScorer(AtomicReaderContext context, bool scoreDocsInOrder, IBits acceptDocs)
+                {
+                    return new BulkScorerAnonymousInnerClassHelper(this);
+                }
+
+                private class BulkScorerAnonymousInnerClassHelper : BulkScorer
+                {
+                    private readonly WeightAnonymousInnerClassHelper OuterInstance;
+
+                    public BulkScorerAnonymousInnerClassHelper(WeightAnonymousInnerClassHelper outerInstance)
+                    {
+                        this.OuterInstance = outerInstance;
+                    }
+
+                    public override bool Score(ICollector collector, int max)
+                    {
+                        collector.SetScorer(new FakeScorer());
+                        collector.Collect(0);
+                        return false;
+                    }
+                }
+            }
+        }
+
+        /// <summary>
+        /// Make sure BooleanScorer can embed another
+        ///  BooleanScorer.
+        /// </summary>
+        [Test]
+        public virtual void TestEmbeddedBooleanScorer()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            Document doc = new Document();
+            doc.Add(NewTextField("field", "doctors are people who prescribe medicines of which they know little, to cure diseases of which they know less, in human beings of whom they know nothing", Field.Store.NO));
+            w.AddDocument(doc);
+            IndexReader r = w.Reader;
+            w.Dispose();
+
+            IndexSearcher s = NewSearcher(r);
+            BooleanQuery q1 = new BooleanQuery();
+            q1.Add(new TermQuery(new Term("field", "little")), Occur.SHOULD);
+            q1.Add(new TermQuery(new Term("field", "diseases")), Occur.SHOULD);
+
+            BooleanQuery q2 = new BooleanQuery();
+            q2.Add(q1, Occur.SHOULD);
+            q2.Add(new CrazyMustUseBulkScorerQuery(), Occur.SHOULD);
+
+            Assert.AreEqual(1, s.Search(q2, 10).TotalHits);
+            r.Dispose();
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestCachingCollector.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestCachingCollector.cs b/src/Lucene.Net.Tests/Search/TestCachingCollector.cs
new file mode 100644
index 0000000..a0ed92a
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestCachingCollector.cs
@@ -0,0 +1,252 @@
+namespace Lucene.Net.Search
+{
+    using NUnit.Framework;
+    using System;
+
+    /*
+             * Licensed to the Apache Software Foundation (ASF) under one or more
+             * contributor license agreements.  See the NOTICE file distributed with
+             * this work for additional information regarding copyright ownership.
+             * The ASF licenses this file to You under the Apache License, Version 2.0
+             * (the "License"); you may not use this file except in compliance with
+             * the License.  You may obtain a copy of the License at
+             *
+             *     http://www.apache.org/licenses/LICENSE-2.0
+             *
+             * Unless required by applicable law or agreed to in writing, software
+             * distributed under the License is distributed on an "AS IS" BASIS,
+             * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+             * See the License for the specific language governing permissions and
+             * limitations under the License.
+             */
+
+    using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    [TestFixture]
+    public class TestCachingCollector : LuceneTestCase
+    {
+        private const double ONE_BYTE = 1.0 / (1024 * 1024); // 1 byte out of MB
+
+        private class MockScorer : Scorer
+        {
+            internal MockScorer()
+                : base((Weight)null)
+            {
+            }
+
+            public override float GetScore()
+            {
+                return 0;
+            }
+
+            public override int Freq
+            {
+                get { return 0; }
+            }
+
+            public override int DocID
+            {
+                get { return 0; }
+            }
+
+            public override int NextDoc()
+            {
+                return 0;
+            }
+
+            public override int Advance(int target)
+            {
+                return 0;
+            }
+
+            public override long GetCost()
+            {
+                return 1;
+            }
+        }
+
+        private class NoOpCollector : ICollector
+        {
+            internal readonly bool AcceptDocsOutOfOrder;
+
+            public NoOpCollector(bool acceptDocsOutOfOrder)
+            {
+                this.AcceptDocsOutOfOrder = acceptDocsOutOfOrder;
+            }
+
+            public virtual void SetScorer(Scorer scorer)
+            {
+            }
+
+            public virtual void Collect(int doc)
+            {
+            }
+
+            public virtual void SetNextReader(AtomicReaderContext context)
+            {
+            }
+
+            public virtual bool AcceptsDocsOutOfOrder
+            {
+                get { return AcceptDocsOutOfOrder; }
+            }
+        }
+
+        [Test]
+        public virtual void TestBasic()
+        {
+            foreach (bool cacheScores in new bool[] { false, true })
+            {
+                CachingCollector cc = CachingCollector.Create(new NoOpCollector(false), cacheScores, 1.0);
+                cc.SetScorer(new MockScorer());
+
+                // collect 1000 docs
+                for (int i = 0; i < 1000; i++)
+                {
+                    cc.Collect(i);
+                }
+
+                // now replay them
+                cc.Replay(new CollectorAnonymousInnerClassHelper(this));
+            }
+        }
+
+        private class CollectorAnonymousInnerClassHelper : ICollector
+        {
+            private readonly TestCachingCollector OuterInstance;
+
+            public CollectorAnonymousInnerClassHelper(TestCachingCollector outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+                prevDocID = -1;
+            }
+
+            internal int prevDocID;
+
+            public virtual void SetScorer(Scorer scorer)
+            {
+            }
+
+            public virtual void SetNextReader(AtomicReaderContext context)
+            {
+            }
+
+            public virtual void Collect(int doc)
+            {
+                Assert.AreEqual(prevDocID + 1, doc);
+                prevDocID = doc;
+            }
+
+            public virtual bool AcceptsDocsOutOfOrder
+            {
+                get { return false; }
+            }
+        }
+
+        [Test]
+        public virtual void TestIllegalStateOnReplay()
+        {
+            CachingCollector cc = CachingCollector.Create(new NoOpCollector(false), true, 50 * ONE_BYTE);
+            cc.SetScorer(new MockScorer());
+
+            // collect 130 docs, this should be enough for triggering cache abort.
+            for (int i = 0; i < 130; i++)
+            {
+                cc.Collect(i);
+            }
+
+            Assert.IsFalse(cc.IsCached, "CachingCollector should not be cached due to low memory limit");
+
+            try
+            {
+                cc.Replay(new NoOpCollector(false));
+                Assert.Fail("replay should fail if CachingCollector is not cached");
+            }
+#pragma warning disable 168
+            catch (InvalidOperationException e)
+#pragma warning restore 168
+            {
+                // expected
+            }
+        }
+
+        [Test]
+        public virtual void TestIllegalCollectorOnReplay()
+        {
+            // tests that the Collector passed to replay() has an out-of-order mode that
+            // is valid with the Collector passed to the ctor
+
+            // 'src' Collector does not support out-of-order
+            CachingCollector cc = CachingCollector.Create(new NoOpCollector(false), true, 50 * ONE_BYTE);
+            cc.SetScorer(new MockScorer());
+            for (int i = 0; i < 10; i++)
+            {
+                cc.Collect(i);
+            }
+            cc.Replay(new NoOpCollector(true)); // this call should not fail
+            cc.Replay(new NoOpCollector(false)); // this call should not fail
+
+            // 'src' Collector supports out-of-order
+            cc = CachingCollector.Create(new NoOpCollector(true), true, 50 * ONE_BYTE);
+            cc.SetScorer(new MockScorer());
+            for (int i = 0; i < 10; i++)
+            {
+                cc.Collect(i);
+            }
+            cc.Replay(new NoOpCollector(true)); // this call should not fail
+            try
+            {
+                cc.Replay(new NoOpCollector(false)); // this call should fail
+                Assert.Fail("should have failed if an in-order Collector was given to replay(), " + "while CachingCollector was initialized with out-of-order collection");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException e)
+#pragma warning restore 168
+            {
+                // ok
+            }
+        }
+
+        [Test]
+        public virtual void TestCachedArraysAllocation()
+        {
+            // tests the cached arrays allocation -- if the 'nextLength' was too high,
+            // caching would terminate even if a smaller length would suffice.
+
+            // set RAM limit enough for 150 docs + random(10000)
+            int numDocs = Random().Next(10000) + 150;
+            foreach (bool cacheScores in new bool[] { false, true })
+            {
+                int bytesPerDoc = cacheScores ? 8 : 4;
+                CachingCollector cc = CachingCollector.Create(new NoOpCollector(false), cacheScores, bytesPerDoc * ONE_BYTE * numDocs);
+                cc.SetScorer(new MockScorer());
+                for (int i = 0; i < numDocs; i++)
+                {
+                    cc.Collect(i);
+                }
+                Assert.IsTrue(cc.IsCached);
+
+                // The 151's document should terminate caching
+                cc.Collect(numDocs);
+                Assert.IsFalse(cc.IsCached);
+            }
+        }
+
+        [Test]
+        public virtual void TestNoWrappedCollector()
+        {
+            foreach (bool cacheScores in new bool[] { false, true })
+            {
+                // create w/ null wrapped collector, and test that the methods work
+                CachingCollector cc = CachingCollector.Create(true, cacheScores, 50 * ONE_BYTE);
+                cc.SetNextReader(null);
+                cc.SetScorer(new MockScorer());
+                cc.Collect(0);
+
+                Assert.IsTrue(cc.IsCached);
+                cc.Replay(new NoOpCollector(true));
+            }
+        }
+    }
+}
\ No newline at end of file


[25/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestLogMergePolicy.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestLogMergePolicy.cs b/src/Lucene.Net.Tests/Index/TestLogMergePolicy.cs
new file mode 100644
index 0000000..921b719
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestLogMergePolicy.cs
@@ -0,0 +1,42 @@
+using NUnit.Framework;
+
+namespace Lucene.Net.Index
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    public class TestLogMergePolicy : BaseMergePolicyTestCase
+    {
+        protected internal override MergePolicy MergePolicy()
+        {
+            return NewLogMergePolicy(Random());
+        }
+
+        #region BaseMergePolicyTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestForceMergeNotNeeded()
+        {
+            base.TestForceMergeNotNeeded();
+        }
+
+        #endregion
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestLongPostings.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestLongPostings.cs b/src/Lucene.Net.Tests/Index/TestLongPostings.cs
new file mode 100644
index 0000000..0f06912
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestLongPostings.cs
@@ -0,0 +1,570 @@
+using Lucene.Net.Analysis.TokenAttributes;
+using System;
+using System.Diagnostics;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Randomized.Generators;
+    using NUnit.Framework;
+    using System.IO;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Analyzer = Lucene.Net.Analysis.Analyzer;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldType = FieldType;
+    using FixedBitSet = Lucene.Net.Util.FixedBitSet;
+    using IOUtils = Lucene.Net.Util.IOUtils;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using TextField = TextField;
+    using TokenStream = Lucene.Net.Analysis.TokenStream;
+
+    [SuppressCodecs("SimpleText", "Memory", "Direct")]
+    [TestFixture]
+    public class TestLongPostings : LuceneTestCase
+    {
+        // Produces a realistic unicode random string that
+        // survives MockAnalyzer unchanged:
+        private string GetRandomTerm(string other)
+        {
+            Analyzer a = new MockAnalyzer(Random());
+            while (true)
+            {
+                string s = TestUtil.RandomRealisticUnicodeString(Random());
+                if (other != null && s.Equals(other))
+                {
+                    continue;
+                }
+                IOException priorException = null;
+                TokenStream ts = a.TokenStream("foo", new StringReader(s));
+                try
+                {
+                    ITermToBytesRefAttribute termAtt = ts.GetAttribute<ITermToBytesRefAttribute>();
+                    BytesRef termBytes = termAtt.BytesRef;
+                    ts.Reset();
+
+                    int count = 0;
+                    bool changed = false;
+
+                    while (ts.IncrementToken())
+                    {
+                        termAtt.FillBytesRef();
+                        if (count == 0 && !termBytes.Utf8ToString().Equals(s))
+                        {
+                            // The value was changed during analysis.  Keep iterating so the
+                            // tokenStream is exhausted.
+                            changed = true;
+                        }
+                        count++;
+                    }
+
+                    ts.End();
+                    // Did we iterate just once and the value was unchanged?
+                    if (!changed && count == 1)
+                    {
+                        return s;
+                    }
+                }
+                catch (IOException e)
+                {
+                    priorException = e;
+                }
+                finally
+                {
+                    IOUtils.CloseWhileHandlingException(priorException, ts);
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestLongPostings_Mem()
+        {
+            // Don't use TestUtil.getTempDir so that we own the
+            // randomness (ie same seed will point to same dir):
+            Directory dir = NewFSDirectory(CreateTempDir("longpostings" + "." + Random().NextLong()));
+
+            int NUM_DOCS = AtLeast(2000);
+
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: NUM_DOCS=" + NUM_DOCS);
+            }
+
+            string s1 = GetRandomTerm(null);
+            string s2 = GetRandomTerm(s1);
+
+            if (VERBOSE)
+            {
+                Console.WriteLine("\nTEST: s1=" + s1 + " s2=" + s2);
+                /*
+                for(int idx=0;idx<s1.Length();idx++) {
+                  System.out.println("  s1 ch=0x" + Integer.toHexString(s1.charAt(idx)));
+                }
+                for(int idx=0;idx<s2.Length();idx++) {
+                  System.out.println("  s2 ch=0x" + Integer.toHexString(s2.charAt(idx)));
+                }
+                */
+            }
+
+            FixedBitSet isS1 = new FixedBitSet(NUM_DOCS);
+            for (int idx = 0; idx < NUM_DOCS; idx++)
+            {
+                if (Random().NextBoolean())
+                {
+                    isS1.Set(idx);
+                }
+            }
+
+            IndexReader r;
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE).SetMergePolicy(NewLogMergePolicy());
+            iwc.SetRAMBufferSizeMB(16.0 + 16.0 * Random().NextDouble());
+            iwc.SetMaxBufferedDocs(-1);
+            RandomIndexWriter riw = new RandomIndexWriter(Random(), dir, iwc);
+
+            for (int idx = 0; idx < NUM_DOCS; idx++)
+            {
+                Document doc = new Document();
+                string s = isS1.Get(idx) ? s1 : s2;
+                Field f = NewTextField("field", s, Field.Store.NO);
+                int count = TestUtil.NextInt(Random(), 1, 4);
+                for (int ct = 0; ct < count; ct++)
+                {
+                    doc.Add(f);
+                }
+                riw.AddDocument(doc);
+            }
+
+            r = riw.Reader;
+            riw.Dispose();
+
+            /*
+            if (VERBOSE) {
+              System.out.println("TEST: terms");
+              TermEnum termEnum = r.Terms();
+              while(termEnum.Next()) {
+                System.out.println("  term=" + termEnum.Term() + " len=" + termEnum.Term().Text().Length());
+                Assert.IsTrue(termEnum.DocFreq() > 0);
+                System.out.println("    s1?=" + (termEnum.Term().Text().equals(s1)) + " s1len=" + s1.Length());
+                System.out.println("    s2?=" + (termEnum.Term().Text().equals(s2)) + " s2len=" + s2.Length());
+                final String s = termEnum.Term().Text();
+                for(int idx=0;idx<s.Length();idx++) {
+                  System.out.println("      ch=0x" + Integer.toHexString(s.charAt(idx)));
+                }
+              }
+            }
+            */
+
+            Assert.AreEqual(NUM_DOCS, r.NumDocs);
+            Assert.IsTrue(r.DocFreq(new Term("field", s1)) > 0);
+            Assert.IsTrue(r.DocFreq(new Term("field", s2)) > 0);
+
+            int num = AtLeast(1000);
+            for (int iter = 0; iter < num; iter++)
+            {
+                string term;
+                bool doS1;
+                if (Random().NextBoolean())
+                {
+                    term = s1;
+                    doS1 = true;
+                }
+                else
+                {
+                    term = s2;
+                    doS1 = false;
+                }
+
+                if (VERBOSE)
+                {
+                    Console.WriteLine("\nTEST: iter=" + iter + " doS1=" + doS1);
+                }
+
+                DocsAndPositionsEnum postings = MultiFields.GetTermPositionsEnum(r, null, "field", new BytesRef(term));
+
+                int docID = -1;
+                while (docID < DocIdSetIterator.NO_MORE_DOCS)
+                {
+                    int what = Random().Next(3);
+                    if (what == 0)
+                    {
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("TEST: docID=" + docID + "; do next()");
+                        }
+                        // nextDoc
+                        int expected = docID + 1;
+                        while (true)
+                        {
+                            if (expected == NUM_DOCS)
+                            {
+                                expected = int.MaxValue;
+                                break;
+                            }
+                            else if (isS1.Get(expected) == doS1)
+                            {
+                                break;
+                            }
+                            else
+                            {
+                                expected++;
+                            }
+                        }
+                        docID = postings.NextDoc();
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("  got docID=" + docID);
+                        }
+                        Assert.AreEqual(expected, docID);
+                        if (docID == DocIdSetIterator.NO_MORE_DOCS)
+                        {
+                            break;
+                        }
+
+                        if (Random().Next(6) == 3)
+                        {
+                            int freq = postings.Freq;
+                            Assert.IsTrue(freq >= 1 && freq <= 4);
+                            for (int pos = 0; pos < freq; pos++)
+                            {
+                                Assert.AreEqual(pos, postings.NextPosition());
+                                if (Random().NextBoolean())
+                                {
+                                    var dummy = postings.GetPayload();
+                                    if (Random().NextBoolean())
+                                    {
+                                        dummy = postings.GetPayload(); // get it again
+                                    }
+                                }
+                            }
+                        }
+                    }
+                    else
+                    {
+                        // advance
+                        int targetDocID;
+                        if (docID == -1)
+                        {
+                            targetDocID = Random().Next(NUM_DOCS + 1);
+                        }
+                        else
+                        {
+                            targetDocID = docID + TestUtil.NextInt(Random(), 1, NUM_DOCS - docID);
+                        }
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("TEST: docID=" + docID + "; do advance(" + targetDocID + ")");
+                        }
+                        int expected = targetDocID;
+                        while (true)
+                        {
+                            if (expected == NUM_DOCS)
+                            {
+                                expected = int.MaxValue;
+                                break;
+                            }
+                            else if (isS1.Get(expected) == doS1)
+                            {
+                                break;
+                            }
+                            else
+                            {
+                                expected++;
+                            }
+                        }
+
+                        docID = postings.Advance(targetDocID);
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("  got docID=" + docID);
+                        }
+                        Assert.AreEqual(expected, docID);
+                        if (docID == DocIdSetIterator.NO_MORE_DOCS)
+                        {
+                            break;
+                        }
+
+                        if (Random().Next(6) == 3)
+                        {
+                            int freq = postings.Freq;
+                            Assert.IsTrue(freq >= 1 && freq <= 4);
+                            for (int pos = 0; pos < freq; pos++)
+                            {
+                                Assert.AreEqual(pos, postings.NextPosition());
+                                if (Random().NextBoolean())
+                                {
+                                    var dummy = postings.GetPayload();
+                                    if (Random().NextBoolean())
+                                    {
+                                        dummy = postings.GetPayload(); // get it again
+                                    }
+                                }
+                            }
+                        }
+                    }
+                }
+            }
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        // a weaker form of testLongPostings, that doesnt check positions
+        [Test]
+        public virtual void TestLongPostingsNoPositions()
+        {
+            DoTestLongPostingsNoPositions(IndexOptions.DOCS_ONLY);
+            DoTestLongPostingsNoPositions(IndexOptions.DOCS_AND_FREQS);
+        }
+
+        public virtual void DoTestLongPostingsNoPositions(IndexOptions options)
+        {
+            // Don't use TestUtil.getTempDir so that we own the
+            // randomness (ie same seed will point to same dir):
+            Directory dir = NewFSDirectory(CreateTempDir("longpostings" + "." + Random().NextLong()));
+
+            int NUM_DOCS = AtLeast(2000);
+
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: NUM_DOCS=" + NUM_DOCS);
+            }
+
+            string s1 = GetRandomTerm(null);
+            string s2 = GetRandomTerm(s1);
+
+            if (VERBOSE)
+            {
+                Console.WriteLine("\nTEST: s1=" + s1 + " s2=" + s2);
+                /*
+                for(int idx=0;idx<s1.Length();idx++) {
+                  System.out.println("  s1 ch=0x" + Integer.toHexString(s1.charAt(idx)));
+                }
+                for(int idx=0;idx<s2.Length();idx++) {
+                  System.out.println("  s2 ch=0x" + Integer.toHexString(s2.charAt(idx)));
+                }
+                */
+            }
+
+            FixedBitSet isS1 = new FixedBitSet(NUM_DOCS);
+            for (int idx = 0; idx < NUM_DOCS; idx++)
+            {
+                if (Random().NextBoolean())
+                {
+                    isS1.Set(idx);
+                }
+            }
+
+            IndexReader r;
+            if (true)
+            {
+                IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE).SetMergePolicy(NewLogMergePolicy());
+                iwc.SetRAMBufferSizeMB(16.0 + 16.0 * Random().NextDouble());
+                iwc.SetMaxBufferedDocs(-1);
+                RandomIndexWriter riw = new RandomIndexWriter(Random(), dir, iwc);
+
+                FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
+                ft.IndexOptions = options;
+                for (int idx = 0; idx < NUM_DOCS; idx++)
+                {
+                    Document doc = new Document();
+                    string s = isS1.Get(idx) ? s1 : s2;
+                    Field f = NewField("field", s, ft);
+                    int count = TestUtil.NextInt(Random(), 1, 4);
+                    for (int ct = 0; ct < count; ct++)
+                    {
+                        doc.Add(f);
+                    }
+                    riw.AddDocument(doc);
+                }
+
+                r = riw.Reader;
+                riw.Dispose();
+            }
+            else
+            {
+#pragma warning disable 162
+                r = DirectoryReader.Open(dir);
+#pragma warning restore 162
+            }
+
+            /*
+            if (VERBOSE) {
+              System.out.println("TEST: terms");
+              TermEnum termEnum = r.Terms();
+              while(termEnum.Next()) {
+                System.out.println("  term=" + termEnum.Term() + " len=" + termEnum.Term().Text().Length());
+                Assert.IsTrue(termEnum.DocFreq() > 0);
+                System.out.println("    s1?=" + (termEnum.Term().Text().equals(s1)) + " s1len=" + s1.Length());
+                System.out.println("    s2?=" + (termEnum.Term().Text().equals(s2)) + " s2len=" + s2.Length());
+                final String s = termEnum.Term().Text();
+                for(int idx=0;idx<s.Length();idx++) {
+                  System.out.println("      ch=0x" + Integer.toHexString(s.charAt(idx)));
+                }
+              }
+            }
+            */
+
+            Assert.AreEqual(NUM_DOCS, r.NumDocs);
+            Assert.IsTrue(r.DocFreq(new Term("field", s1)) > 0);
+            Assert.IsTrue(r.DocFreq(new Term("field", s2)) > 0);
+
+            int num = AtLeast(1000);
+            for (int iter = 0; iter < num; iter++)
+            {
+                string term;
+                bool doS1;
+                if (Random().NextBoolean())
+                {
+                    term = s1;
+                    doS1 = true;
+                }
+                else
+                {
+                    term = s2;
+                    doS1 = false;
+                }
+
+                if (VERBOSE)
+                {
+                    Console.WriteLine("\nTEST: iter=" + iter + " doS1=" + doS1 + " term=" + term);
+                }
+
+                DocsEnum docs;
+                DocsEnum postings;
+
+                if (options == IndexOptions.DOCS_ONLY)
+                {
+                    docs = TestUtil.Docs(Random(), r, "field", new BytesRef(term), null, null, DocsEnum.FLAG_NONE);
+                    postings = null;
+                }
+                else
+                {
+                    docs = postings = TestUtil.Docs(Random(), r, "field", new BytesRef(term), null, null, DocsEnum.FLAG_FREQS);
+                    Debug.Assert(postings != null);
+                }
+                Debug.Assert(docs != null);
+
+                int docID = -1;
+                while (docID < DocIdSetIterator.NO_MORE_DOCS)
+                {
+                    int what = Random().Next(3);
+                    if (what == 0)
+                    {
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("TEST: docID=" + docID + "; do next()");
+                        }
+                        // nextDoc
+                        int expected = docID + 1;
+                        while (true)
+                        {
+                            if (expected == NUM_DOCS)
+                            {
+                                expected = int.MaxValue;
+                                break;
+                            }
+                            else if (isS1.Get(expected) == doS1)
+                            {
+                                break;
+                            }
+                            else
+                            {
+                                expected++;
+                            }
+                        }
+                        docID = docs.NextDoc();
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("  got docID=" + docID);
+                        }
+                        Assert.AreEqual(expected, docID);
+                        if (docID == DocIdSetIterator.NO_MORE_DOCS)
+                        {
+                            break;
+                        }
+
+                        if (Random().Next(6) == 3 && postings != null)
+                        {
+                            int freq = postings.Freq;
+                            Assert.IsTrue(freq >= 1 && freq <= 4);
+                        }
+                    }
+                    else
+                    {
+                        // advance
+                        int targetDocID;
+                        if (docID == -1)
+                        {
+                            targetDocID = Random().Next(NUM_DOCS + 1);
+                        }
+                        else
+                        {
+                            targetDocID = docID + TestUtil.NextInt(Random(), 1, NUM_DOCS - docID);
+                        }
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("TEST: docID=" + docID + "; do advance(" + targetDocID + ")");
+                        }
+                        int expected = targetDocID;
+                        while (true)
+                        {
+                            if (expected == NUM_DOCS)
+                            {
+                                expected = int.MaxValue;
+                                break;
+                            }
+                            else if (isS1.Get(expected) == doS1)
+                            {
+                                break;
+                            }
+                            else
+                            {
+                                expected++;
+                            }
+                        }
+
+                        docID = docs.Advance(targetDocID);
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("  got docID=" + docID);
+                        }
+                        Assert.AreEqual(expected, docID);
+                        if (docID == DocIdSetIterator.NO_MORE_DOCS)
+                        {
+                            break;
+                        }
+
+                        if (Random().Next(6) == 3 && postings != null)
+                        {
+                            int freq = postings.Freq;
+                            Assert.IsTrue(freq >= 1 && freq <= 4, "got invalid freq=" + freq);
+                        }
+                    }
+                }
+            }
+            r.Dispose();
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestMaxTermFrequency.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestMaxTermFrequency.cs b/src/Lucene.Net.Tests/Index/TestMaxTermFrequency.cs
new file mode 100644
index 0000000..cbbdc72
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestMaxTermFrequency.cs
@@ -0,0 +1,171 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockTokenizer = Lucene.Net.Analysis.MockTokenizer;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using TFIDFSimilarity = Lucene.Net.Search.Similarities.TFIDFSimilarity;
+
+    /// <summary>
+    /// Tests the maxTermFrequency statistic in FieldInvertState
+    /// </summary>
+    [TestFixture]
+    public class TestMaxTermFrequency : LuceneTestCase
+    {
+        internal Directory Dir;
+        internal IndexReader Reader;
+        /* expected maxTermFrequency values for our documents */
+        internal List<int?> Expected = new List<int?>();
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Dir = NewDirectory();
+            IndexWriterConfig config = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true)).SetMergePolicy(NewLogMergePolicy());
+            config.SetSimilarity(new TestSimilarity(this));
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), Dir, config);
+            Document doc = new Document();
+            Field foo = NewTextField("foo", "", Field.Store.NO);
+            doc.Add(foo);
+            for (int i = 0; i < 100; i++)
+            {
+                foo.SetStringValue(AddValue());
+                writer.AddDocument(doc);
+            }
+            Reader = writer.Reader;
+            writer.Dispose();
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            Reader.Dispose();
+            Dir.Dispose();
+            base.TearDown();
+        }
+
+        [Test]
+        public virtual void Test()
+        {
+            NumericDocValues fooNorms = MultiDocValues.GetNormValues(Reader, "foo");
+            for (int i = 0; i < Reader.MaxDoc; i++)
+            {
+                Assert.AreEqual((int)Expected[i], fooNorms.Get(i) & 0xff);
+            }
+        }
+
+        /// <summary>
+        /// Makes a bunch of single-char tokens (the max freq will at most be 255).
+        /// shuffles them around, and returns the whole list with Arrays.toString().
+        /// this works fine because we use lettertokenizer.
+        /// puts the max-frequency term into expected, to be checked against the norm.
+        /// </summary>
+        private string AddValue()
+        {
+            IList<string> terms = new List<string>();
+            int maxCeiling = TestUtil.NextInt(Random(), 0, 255);
+            int max = 0;
+            for (char ch = 'a'; ch <= 'z'; ch++)
+            {
+                int num = TestUtil.NextInt(Random(), 0, maxCeiling);
+                for (int i = 0; i < num; i++)
+                {
+                    terms.Add(char.ToString(ch));
+                }
+                max = Math.Max(max, num);
+            }
+            Expected.Add(max);
+
+            Collections.Shuffle(terms);
+            return Arrays.ToString(terms.ToArray());
+        }
+
+        /// <summary>
+        /// Simple similarity that encodes maxTermFrequency directly as a byte
+        /// </summary>
+        internal class TestSimilarity : TFIDFSimilarity
+        {
+            private readonly TestMaxTermFrequency OuterInstance;
+
+            public TestSimilarity(TestMaxTermFrequency outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            public override float LengthNorm(FieldInvertState state)
+            {
+                return state.MaxTermFrequency;
+            }
+
+            public override long EncodeNormValue(float f)
+            {
+                return (sbyte)f;
+            }
+
+            public override float DecodeNormValue(long norm)
+            {
+                return norm;
+            }
+
+            public override float Coord(int overlap, int maxOverlap)
+            {
+                return 0;
+            }
+
+            public override float QueryNorm(float sumOfSquaredWeights)
+            {
+                return 0;
+            }
+
+            public override float Tf(float freq)
+            {
+                return 0;
+            }
+
+            public override float Idf(long docFreq, long numDocs)
+            {
+                return 0;
+            }
+
+            public override float SloppyFreq(int distance)
+            {
+                return 0;
+            }
+
+            public override float ScorePayload(int doc, int start, int end, BytesRef payload)
+            {
+                return 0;
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestMixedCodecs.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestMixedCodecs.cs b/src/Lucene.Net.Tests/Index/TestMixedCodecs.cs
new file mode 100644
index 0000000..56ba215
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestMixedCodecs.cs
@@ -0,0 +1,107 @@
+using System;
+using System.Collections.Generic;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Randomized.Generators;
+    using NUnit.Framework;
+    using Codec = Lucene.Net.Codecs.Codec;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    [SuppressCodecs("Lucene3x")]
+    [TestFixture]
+    public class TestMixedCodecs : LuceneTestCase
+    {
+        [Test]
+        public virtual void Test()
+        {
+            int NUM_DOCS = AtLeast(1000);
+
+            Directory dir = NewDirectory();
+            RandomIndexWriter w = null;
+
+            int docsLeftInthisSegment = 0;
+
+            int docUpto = 0;
+            while (docUpto < NUM_DOCS)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: " + docUpto + " of " + NUM_DOCS);
+                }
+                if (docsLeftInthisSegment == 0)
+                {
+                    IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+                    if (Random().NextBoolean())
+                    {
+                        // Make sure we aggressively mix in SimpleText
+                        // since it has different impls for all codec
+                        // formats...
+                        iwc.SetCodec(Codec.ForName("Lucene46"));
+                    }
+                    if (w != null)
+                    {
+                        w.Dispose();
+                    }
+                    w = new RandomIndexWriter(Random(), dir, iwc);
+                    docsLeftInthisSegment = TestUtil.NextInt(Random(), 10, 100);
+                }
+                Document doc = new Document();
+                doc.Add(NewStringField("id", Convert.ToString(docUpto), Field.Store.YES));
+                w.AddDocument(doc);
+                docUpto++;
+                docsLeftInthisSegment--;
+            }
+
+            if (VERBOSE)
+            {
+                Console.WriteLine("\nTEST: now delete...");
+            }
+
+            // Random delete half the docs:
+            HashSet<int?> deleted = new HashSet<int?>();
+            while (deleted.Count < NUM_DOCS / 2)
+            {
+                int? toDelete = Random().Next(NUM_DOCS);
+                if (!deleted.Contains(toDelete))
+                {
+                    deleted.Add(toDelete);
+                    w.DeleteDocuments(new Term("id", Convert.ToString(toDelete)));
+                    if (Random().Next(17) == 6)
+                    {
+                        IndexReader r = w.Reader;
+                        Assert.AreEqual(NUM_DOCS - deleted.Count, r.NumDocs);
+                        r.Dispose();
+                    }
+                }
+            }
+
+            w.Dispose();
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestMixedDocValuesUpdates.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestMixedDocValuesUpdates.cs b/src/Lucene.Net.Tests/Index/TestMixedDocValuesUpdates.cs
new file mode 100644
index 0000000..4a3741d
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestMixedDocValuesUpdates.cs
@@ -0,0 +1,576 @@
+using System;
+using System.Threading;
+using System.Collections.Generic;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Randomized.Generators;
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using System.IO;
+    using BinaryDocValuesField = BinaryDocValuesField;
+    using IBits = Lucene.Net.Util.IBits;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using IOUtils = Lucene.Net.Util.IOUtils;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using NumericDocValuesField = NumericDocValuesField;
+    using Store = Field.Store;
+    using StringField = StringField;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using Attributes;
+
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    [SuppressCodecs("Appending", "Lucene3x", "Lucene40", "Lucene41", "Lucene42", "Lucene45")]
+    [TestFixture]
+    public class TestMixedDocValuesUpdates : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestManyReopensAndFields()
+        {
+            Directory dir = NewDirectory();
+            Random random = Random();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
+            LogMergePolicy lmp = NewLogMergePolicy();
+            lmp.MergeFactor = 3; // merge often
+            conf.SetMergePolicy(lmp);
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            bool isNRT = random.NextBoolean();
+            DirectoryReader reader;
+            if (isNRT)
+            {
+                reader = DirectoryReader.Open(writer, true);
+            }
+            else
+            {
+                writer.Commit();
+                reader = DirectoryReader.Open(dir);
+            }
+
+            int numFields = random.Next(4) + 3; // 3-7
+            int numNDVFields = random.Next(numFields / 2) + 1; // 1-3
+            long[] fieldValues = new long[numFields];
+            bool[] fieldHasValue = new bool[numFields];
+            Arrays.Fill(fieldHasValue, true);
+            for (int i = 0; i < fieldValues.Length; i++)
+            {
+                fieldValues[i] = 1;
+            }
+
+            int numRounds = AtLeast(15);
+            int docID = 0;
+            for (int i = 0; i < numRounds; i++)
+            {
+                int numDocs = AtLeast(5);
+                //      System.out.println("[" + Thread.currentThread().getName() + "]: round=" + i + ", numDocs=" + numDocs);
+                for (int j = 0; j < numDocs; j++)
+                {
+                    Document doc = new Document();
+                    doc.Add(new StringField("id", "doc-" + docID, Store.NO));
+                    doc.Add(new StringField("key", "all", Store.NO)); // update key
+                    // add all fields with their current value
+                    for (int f = 0; f < fieldValues.Length; f++)
+                    {
+                        if (f < numNDVFields)
+                        {
+                            doc.Add(new NumericDocValuesField("f" + f, fieldValues[f]));
+                        }
+                        else
+                        {
+                            doc.Add(new BinaryDocValuesField("f" + f, TestBinaryDocValuesUpdates.ToBytes(fieldValues[f])));
+                        }
+                    }
+                    writer.AddDocument(doc);
+                    ++docID;
+                }
+
+                // if field's value was unset before, unset it from all new added documents too
+                for (int field = 0; field < fieldHasValue.Length; field++)
+                {
+                    if (!fieldHasValue[field])
+                    {
+                        if (field < numNDVFields)
+                        {
+                            writer.UpdateNumericDocValue(new Term("key", "all"), "f" + field, null);
+                        }
+                        else
+                        {
+                            writer.UpdateBinaryDocValue(new Term("key", "all"), "f" + field, null);
+                        }
+                    }
+                }
+
+                int fieldIdx = random.Next(fieldValues.Length);
+                string updateField = "f" + fieldIdx;
+                if (random.NextBoolean())
+                {
+                    //        System.out.println("[" + Thread.currentThread().getName() + "]: unset field '" + updateField + "'");
+                    fieldHasValue[fieldIdx] = false;
+                    if (fieldIdx < numNDVFields)
+                    {
+                        writer.UpdateNumericDocValue(new Term("key", "all"), updateField, null);
+                    }
+                    else
+                    {
+                        writer.UpdateBinaryDocValue(new Term("key", "all"), updateField, null);
+                    }
+                }
+                else
+                {
+                    fieldHasValue[fieldIdx] = true;
+                    if (fieldIdx < numNDVFields)
+                    {
+                        writer.UpdateNumericDocValue(new Term("key", "all"), updateField, ++fieldValues[fieldIdx]);
+                    }
+                    else
+                    {
+                        writer.UpdateBinaryDocValue(new Term("key", "all"), updateField, TestBinaryDocValuesUpdates.ToBytes(++fieldValues[fieldIdx]));
+                    }
+                    //        System.out.println("[" + Thread.currentThread().getName() + "]: updated field '" + updateField + "' to value " + fieldValues[fieldIdx]);
+                }
+
+                if (random.NextDouble() < 0.2)
+                {
+                    int deleteDoc = random.Next(docID); // might also delete an already deleted document, ok!
+                    writer.DeleteDocuments(new Term("id", "doc-" + deleteDoc));
+                    //        System.out.println("[" + Thread.currentThread().getName() + "]: deleted document: doc-" + deleteDoc);
+                }
+
+                // verify reader
+                if (!isNRT)
+                {
+                    writer.Commit();
+                }
+
+                //      System.out.println("[" + Thread.currentThread().getName() + "]: reopen reader: " + reader);
+                DirectoryReader newReader = DirectoryReader.OpenIfChanged(reader);
+                Assert.IsNotNull(newReader);
+                reader.Dispose();
+                reader = newReader;
+                //      System.out.println("[" + Thread.currentThread().getName() + "]: reopened reader: " + reader);
+                Assert.IsTrue(reader.NumDocs > 0); // we delete at most one document per round
+                BytesRef scratch = new BytesRef();
+                foreach (AtomicReaderContext context in reader.Leaves)
+                {
+                    AtomicReader r = context.AtomicReader;
+                    //        System.out.println(((SegmentReader) r).getSegmentName());
+                    IBits liveDocs = r.LiveDocs;
+                    for (int field = 0; field < fieldValues.Length; field++)
+                    {
+                        string f = "f" + field;
+                        BinaryDocValues bdv = r.GetBinaryDocValues(f);
+                        NumericDocValues ndv = r.GetNumericDocValues(f);
+                        IBits docsWithField = r.GetDocsWithField(f);
+                        if (field < numNDVFields)
+                        {
+                            Assert.IsNotNull(ndv);
+                            Assert.IsNull(bdv);
+                        }
+                        else
+                        {
+                            Assert.IsNull(ndv);
+                            Assert.IsNotNull(bdv);
+                        }
+                        int maxDoc = r.MaxDoc;
+                        for (int doc = 0; doc < maxDoc; doc++)
+                        {
+                            if (liveDocs == null || liveDocs.Get(doc))
+                            {
+                                //              System.out.println("doc=" + (doc + context.DocBase) + " f='" + f + "' vslue=" + getValue(bdv, doc, scratch));
+                                if (fieldHasValue[field])
+                                {
+                                    Assert.IsTrue(docsWithField.Get(doc));
+                                    if (field < numNDVFields)
+                                    {
+                                        Assert.AreEqual(fieldValues[field], ndv.Get(doc), "invalid value for doc=" + doc + ", field=" + f + ", reader=" + r);
+                                    }
+                                    else
+                                    {
+                                        Assert.AreEqual(fieldValues[field], TestBinaryDocValuesUpdates.GetValue(bdv, doc, scratch), "invalid value for doc=" + doc + ", field=" + f + ", reader=" + r);
+                                    }
+                                }
+                                else
+                                {
+                                    Assert.IsFalse(docsWithField.Get(doc));
+                                }
+                            }
+                        }
+                    }
+                }
+                //      System.out.println();
+            }
+
+            IOUtils.Close(writer, reader, dir);
+        }
+
+        [Test]
+        public virtual void TestStressMultiThreading()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            // create index
+            int numThreads = TestUtil.NextInt(Random(), 3, 6);
+            int numDocs = AtLeast(2000);
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document doc = new Document();
+                doc.Add(new StringField("id", "doc" + i, Store.NO));
+                double group = Random().NextDouble();
+                string g;
+                if (group < 0.1)
+                {
+                    g = "g0";
+                }
+                else if (group < 0.5)
+                {
+                    g = "g1";
+                }
+                else if (group < 0.8)
+                {
+                    g = "g2";
+                }
+                else
+                {
+                    g = "g3";
+                }
+                doc.Add(new StringField("updKey", g, Store.NO));
+                for (int j = 0; j < numThreads; j++)
+                {
+                    long value = Random().Next();
+                    doc.Add(new BinaryDocValuesField("f" + j, TestBinaryDocValuesUpdates.ToBytes(value)));
+                    doc.Add(new NumericDocValuesField("cf" + j, value * 2)); // control, always updated to f * 2
+                }
+                writer.AddDocument(doc);
+            }
+
+            CountdownEvent done = new CountdownEvent(numThreads);
+            AtomicInt32 numUpdates = new AtomicInt32(AtLeast(100));
+
+            // same thread updates a field as well as reopens
+            ThreadClass[] threads = new ThreadClass[numThreads];
+            for (int i = 0; i < threads.Length; i++)
+            {
+                string f = "f" + i;
+                string cf = "cf" + i;
+                threads[i] = new ThreadAnonymousInnerClassHelper(this, "UpdateThread-" + i, writer, numDocs, done, numUpdates, f, cf);
+            }
+
+            foreach (ThreadClass t in threads)
+            {
+                t.Start();
+            }
+            done.Wait();
+            writer.Dispose();
+
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            BytesRef scratch = new BytesRef();
+            foreach (AtomicReaderContext context in reader.Leaves)
+            {
+                AtomicReader r = context.AtomicReader;
+                for (int i = 0; i < numThreads; i++)
+                {
+                    BinaryDocValues bdv = r.GetBinaryDocValues("f" + i);
+                    NumericDocValues control = r.GetNumericDocValues("cf" + i);
+                    IBits docsWithBdv = r.GetDocsWithField("f" + i);
+                    IBits docsWithControl = r.GetDocsWithField("cf" + i);
+                    IBits liveDocs = r.LiveDocs;
+                    for (int j = 0; j < r.MaxDoc; j++)
+                    {
+                        if (liveDocs == null || liveDocs.Get(j))
+                        {
+                            Assert.AreEqual(docsWithBdv.Get(j), docsWithControl.Get(j));
+                            if (docsWithBdv.Get(j))
+                            {
+                                long ctrlValue = control.Get(j);
+                                long bdvValue = TestBinaryDocValuesUpdates.GetValue(bdv, j, scratch) * 2;
+                                //              if (ctrlValue != bdvValue) {
+                                //                System.out.println("seg=" + r + ", f=f" + i + ", doc=" + j + ", group=" + r.Document(j).Get("updKey") + ", ctrlValue=" + ctrlValue + ", bdvBytes=" + scratch);
+                                //              }
+                                Assert.AreEqual(ctrlValue, bdvValue);
+                            }
+                        }
+                    }
+                }
+            }
+            reader.Dispose();
+
+            dir.Dispose();
+        }
+
+        private class ThreadAnonymousInnerClassHelper : ThreadClass
+        {
+            private readonly TestMixedDocValuesUpdates OuterInstance;
+
+            private IndexWriter Writer;
+            private int NumDocs;
+            private CountdownEvent Done;
+            private AtomicInt32 NumUpdates;
+            private string f;
+            private string Cf;
+
+            public ThreadAnonymousInnerClassHelper(TestMixedDocValuesUpdates outerInstance, string str, IndexWriter writer, int numDocs, CountdownEvent done, AtomicInt32 numUpdates, string f, string cf)
+                : base(str)
+            {
+                this.OuterInstance = outerInstance;
+                this.Writer = writer;
+                this.NumDocs = numDocs;
+                this.Done = done;
+                this.NumUpdates = numUpdates;
+                this.f = f;
+                this.Cf = cf;
+            }
+
+            public override void Run()
+            {
+                DirectoryReader reader = null;
+                bool success = false;
+                try
+                {
+                    Random random = Random();
+                    while (NumUpdates.GetAndDecrement() > 0)
+                    {
+                        double group = random.NextDouble();
+                        Term t;
+                        if (group < 0.1)
+                        {
+                            t = new Term("updKey", "g0");
+                        }
+                        else if (group < 0.5)
+                        {
+                            t = new Term("updKey", "g1");
+                        }
+                        else if (group < 0.8)
+                        {
+                            t = new Term("updKey", "g2");
+                        }
+                        else
+                        {
+                            t = new Term("updKey", "g3");
+                        }
+                        //              System.out.println("[" + Thread.currentThread().getName() + "] numUpdates=" + numUpdates + " updateTerm=" + t);
+                        if (random.NextBoolean()) // sometimes unset a value
+                        {
+                            //                System.err.println("[" + Thread.currentThread().getName() + "] t=" + t + ", f=" + f + ", updValue=UNSET");
+                            Writer.UpdateBinaryDocValue(t, f, null);
+                            Writer.UpdateNumericDocValue(t, Cf, null);
+                        }
+                        else
+                        {
+                            long updValue = random.Next();
+                            //                System.err.println("[" + Thread.currentThread().getName() + "] t=" + t + ", f=" + f + ", updValue=" + updValue);
+                            Writer.UpdateBinaryDocValue(t, f, TestBinaryDocValuesUpdates.ToBytes(updValue));
+                            Writer.UpdateNumericDocValue(t, Cf, updValue * 2);
+                        }
+
+                        if (random.NextDouble() < 0.2)
+                        {
+                            // delete a random document
+                            int doc = random.Next(NumDocs);
+                            //                System.out.println("[" + Thread.currentThread().getName() + "] deleteDoc=doc" + doc);
+                            Writer.DeleteDocuments(new Term("id", "doc" + doc));
+                        }
+
+                        if (random.NextDouble() < 0.05) // commit every 20 updates on average
+                        {
+                            //                  System.out.println("[" + Thread.currentThread().getName() + "] commit");
+                            Writer.Commit();
+                        }
+
+                        if (random.NextDouble() < 0.1) // reopen NRT reader (apply updates), on average once every 10 updates
+                        {
+                            if (reader == null)
+                            {
+                                //                  System.out.println("[" + Thread.currentThread().getName() + "] open NRT");
+                                reader = DirectoryReader.Open(Writer, true);
+                            }
+                            else
+                            {
+                                //                  System.out.println("[" + Thread.currentThread().getName() + "] reopen NRT");
+                                DirectoryReader r2 = DirectoryReader.OpenIfChanged(reader, Writer, true);
+                                if (r2 != null)
+                                {
+                                    reader.Dispose();
+                                    reader = r2;
+                                }
+                            }
+                        }
+                    }
+                    //            System.out.println("[" + Thread.currentThread().getName() + "] DONE");
+                    success = true;
+                }
+                catch (IOException e)
+                {
+                    throw new Exception(e.Message, e);
+                }
+                finally
+                {
+                    if (reader != null)
+                    {
+                        try
+                        {
+                            reader.Dispose();
+                        }
+                        catch (IOException e)
+                        {
+                            if (success) // suppress this exception only if there was another exception
+                            {
+                                throw new Exception(e.Message, e);
+                            }
+                        }
+                    }
+                    Done.Signal();
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestUpdateDifferentDocsInDifferentGens()
+        {
+            // update same document multiple times across generations
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            conf.SetMaxBufferedDocs(4);
+            IndexWriter writer = new IndexWriter(dir, conf);
+            int numDocs = AtLeast(10);
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document doc = new Document();
+                doc.Add(new StringField("id", "doc" + i, Store.NO));
+                long value = Random().Next();
+                doc.Add(new BinaryDocValuesField("f", TestBinaryDocValuesUpdates.ToBytes(value)));
+                doc.Add(new NumericDocValuesField("cf", value * 2));
+                writer.AddDocument(doc);
+            }
+
+            int numGens = AtLeast(5);
+            BytesRef scratch = new BytesRef();
+            for (int i = 0; i < numGens; i++)
+            {
+                int doc = Random().Next(numDocs);
+                Term t = new Term("id", "doc" + doc);
+                long value = Random().NextLong();
+                writer.UpdateBinaryDocValue(t, "f", TestBinaryDocValuesUpdates.ToBytes(value));
+                writer.UpdateNumericDocValue(t, "cf", value * 2);
+                DirectoryReader reader = DirectoryReader.Open(writer, true);
+                foreach (AtomicReaderContext context in reader.Leaves)
+                {
+                    AtomicReader r = context.AtomicReader;
+                    BinaryDocValues fbdv = r.GetBinaryDocValues("f");
+                    NumericDocValues cfndv = r.GetNumericDocValues("cf");
+                    for (int j = 0; j < r.MaxDoc; j++)
+                    {
+                        Assert.AreEqual(cfndv.Get(j), TestBinaryDocValuesUpdates.GetValue(fbdv, j, scratch) * 2);
+                    }
+                }
+                reader.Dispose();
+            }
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(80000)]
+#endif
+        [Test, HasTimeout]
+        public virtual void TestTonsOfUpdates()
+        {
+            // LUCENE-5248: make sure that when there are many updates, we don't use too much RAM
+            Directory dir = NewDirectory();
+            Random random = Random();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
+            conf.SetRAMBufferSizeMB(IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB);
+            conf.SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH); // don't flush by doc
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            // test data: lots of documents (few 10Ks) and lots of update terms (few hundreds)
+            int numDocs = AtLeast(20000);
+            int numBinaryFields = AtLeast(5);
+            int numTerms = TestUtil.NextInt(random, 10, 100); // terms should affect many docs
+            HashSet<string> updateTerms = new HashSet<string>();
+            while (updateTerms.Count < numTerms)
+            {
+                updateTerms.Add(TestUtil.RandomSimpleString(random));
+            }
+
+            //    System.out.println("numDocs=" + numDocs + " numBinaryFields=" + numBinaryFields + " numTerms=" + numTerms);
+
+            // build a large index with many BDV fields and update terms
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document doc = new Document();
+                int numUpdateTerms = TestUtil.NextInt(random, 1, numTerms / 10);
+                for (int j = 0; j < numUpdateTerms; j++)
+                {
+                    doc.Add(new StringField("upd", RandomInts.RandomFrom(random, updateTerms), Store.NO));
+                }
+                for (int j = 0; j < numBinaryFields; j++)
+                {
+                    long val = random.Next();
+                    doc.Add(new BinaryDocValuesField("f" + j, TestBinaryDocValuesUpdates.ToBytes(val)));
+                    doc.Add(new NumericDocValuesField("cf" + j, val * 2));
+                }
+                writer.AddDocument(doc);
+            }
+
+            writer.Commit(); // commit so there's something to apply to
+
+            // set to flush every 2048 bytes (approximately every 12 updates), so we get
+            // many flushes during binary updates
+            writer.Config.SetRAMBufferSizeMB(2048.0 / 1024 / 1024);
+            int numUpdates = AtLeast(100);
+            //    System.out.println("numUpdates=" + numUpdates);
+            for (int i = 0; i < numUpdates; i++)
+            {
+                int field = random.Next(numBinaryFields);
+                Term updateTerm = new Term("upd", RandomInts.RandomFrom(random, updateTerms));
+                long value = random.Next();
+                writer.UpdateBinaryDocValue(updateTerm, "f" + field, TestBinaryDocValuesUpdates.ToBytes(value));
+                writer.UpdateNumericDocValue(updateTerm, "cf" + field, value * 2);
+            }
+
+            writer.Dispose();
+
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            BytesRef scratch = new BytesRef();
+            foreach (AtomicReaderContext context in reader.Leaves)
+            {
+                for (int i = 0; i < numBinaryFields; i++)
+                {
+                    AtomicReader r = context.AtomicReader;
+                    BinaryDocValues f = r.GetBinaryDocValues("f" + i);
+                    NumericDocValues cf = r.GetNumericDocValues("cf" + i);
+                    for (int j = 0; j < r.MaxDoc; j++)
+                    {
+                        Assert.AreEqual(cf.Get(j), TestBinaryDocValuesUpdates.GetValue(f, j, scratch) * 2, "reader=" + r + ", field=f" + i + ", doc=" + j);
+                    }
+                }
+            }
+            reader.Dispose();
+
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestMultiDocValues.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestMultiDocValues.cs b/src/Lucene.Net.Tests/Index/TestMultiDocValues.cs
new file mode 100644
index 0000000..a4a4b84
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestMultiDocValues.cs
@@ -0,0 +1,439 @@
+using Lucene.Net.Documents;
+using Lucene.Net.Randomized.Generators;
+using System.Collections.Generic;
+
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using BinaryDocValuesField = BinaryDocValuesField;
+    using IBits = Lucene.Net.Util.IBits;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using NumericDocValuesField = NumericDocValuesField;
+    using SortedDocValuesField = SortedDocValuesField;
+    using SortedSetDocValuesField = SortedSetDocValuesField;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    /// <summary>
+    /// Tests MultiDocValues versus ordinary segment merging </summary>
+    [SuppressCodecs("Lucene3x")]
+    [TestFixture]
+    public class TestMultiDocValues : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestNumerics()
+        {
+            Directory dir = NewDirectory();
+            Document doc = new Document();
+            Field field = new NumericDocValuesField("numbers", 0);
+            doc.Add(field);
+
+            IndexWriterConfig iwc = NewIndexWriterConfig(Random(), TEST_VERSION_CURRENT, null);
+            iwc.SetMergePolicy(NewLogMergePolicy());
+            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwc);
+
+            int numDocs = AtLeast(500);
+            for (int i = 0; i < numDocs; i++)
+            {
+                field.SetInt64Value(Random().NextLong());
+                iw.AddDocument(doc);
+                if (Random().Next(17) == 0)
+                {
+                    iw.Commit();
+                }
+            }
+            DirectoryReader ir = iw.Reader;
+            iw.ForceMerge(1);
+            DirectoryReader ir2 = iw.Reader;
+            AtomicReader merged = GetOnlySegmentReader(ir2);
+            iw.Dispose();
+
+            NumericDocValues multi = MultiDocValues.GetNumericValues(ir, "numbers");
+            NumericDocValues single = merged.GetNumericDocValues("numbers");
+            for (int i = 0; i < numDocs; i++)
+            {
+                Assert.AreEqual(single.Get(i), multi.Get(i));
+            }
+            ir.Dispose();
+            ir2.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestBinary()
+        {
+            Directory dir = NewDirectory();
+            Document doc = new Document();
+            BytesRef @ref = new BytesRef();
+            Field field = new BinaryDocValuesField("bytes", @ref);
+            doc.Add(field);
+
+            IndexWriterConfig iwc = NewIndexWriterConfig(Random(), TEST_VERSION_CURRENT, null);
+            iwc.SetMergePolicy(NewLogMergePolicy());
+            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwc);
+
+            int numDocs = AtLeast(500);
+            for (int i = 0; i < numDocs; i++)
+            {
+                @ref.CopyChars(TestUtil.RandomUnicodeString(Random()));
+                iw.AddDocument(doc);
+                if (Random().Next(17) == 0)
+                {
+                    iw.Commit();
+                }
+            }
+            DirectoryReader ir = iw.Reader;
+            iw.ForceMerge(1);
+            DirectoryReader ir2 = iw.Reader;
+            AtomicReader merged = GetOnlySegmentReader(ir2);
+            iw.Dispose();
+
+            BinaryDocValues multi = MultiDocValues.GetBinaryValues(ir, "bytes");
+            BinaryDocValues single = merged.GetBinaryDocValues("bytes");
+            BytesRef actual = new BytesRef();
+            BytesRef expected = new BytesRef();
+            for (int i = 0; i < numDocs; i++)
+            {
+                single.Get(i, expected);
+                multi.Get(i, actual);
+                Assert.AreEqual(expected, actual);
+            }
+            ir.Dispose();
+            ir2.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestSorted()
+        {
+            Directory dir = NewDirectory();
+            Document doc = new Document();
+            BytesRef @ref = new BytesRef();
+            Field field = new SortedDocValuesField("bytes", @ref);
+            doc.Add(field);
+
+            IndexWriterConfig iwc = NewIndexWriterConfig(Random(), TEST_VERSION_CURRENT, null);
+            iwc.SetMergePolicy(NewLogMergePolicy());
+            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwc);
+
+            int numDocs = AtLeast(500);
+            for (int i = 0; i < numDocs; i++)
+            {
+                @ref.CopyChars(TestUtil.RandomUnicodeString(Random()));
+                if (DefaultCodecSupportsDocsWithField() && Random().Next(7) == 0)
+                {
+                    iw.AddDocument(new Document());
+                }
+                iw.AddDocument(doc);
+                if (Random().Next(17) == 0)
+                {
+                    iw.Commit();
+                }
+            }
+            DirectoryReader ir = iw.Reader;
+            iw.ForceMerge(1);
+            DirectoryReader ir2 = iw.Reader;
+            AtomicReader merged = GetOnlySegmentReader(ir2);
+            iw.Dispose();
+
+            SortedDocValues multi = MultiDocValues.GetSortedValues(ir, "bytes");
+            SortedDocValues single = merged.GetSortedDocValues("bytes");
+            Assert.AreEqual(single.ValueCount, multi.ValueCount);
+            BytesRef actual = new BytesRef();
+            BytesRef expected = new BytesRef();
+            for (int i = 0; i < numDocs; i++)
+            {
+                // check ord
+                Assert.AreEqual(single.GetOrd(i), multi.GetOrd(i));
+                // check value
+                single.Get(i, expected);
+                multi.Get(i, actual);
+                Assert.AreEqual(expected, actual);
+            }
+            ir.Dispose();
+            ir2.Dispose();
+            dir.Dispose();
+        }
+
+        // tries to make more dups than testSorted
+        [Test]
+        public virtual void TestSortedWithLotsOfDups()
+        {
+            Directory dir = NewDirectory();
+            Document doc = new Document();
+            BytesRef @ref = new BytesRef();
+            Field field = new SortedDocValuesField("bytes", @ref);
+            doc.Add(field);
+
+            IndexWriterConfig iwc = NewIndexWriterConfig(Random(), TEST_VERSION_CURRENT, null);
+            iwc.SetMergePolicy(NewLogMergePolicy());
+            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwc);
+
+            int numDocs = AtLeast(500);
+            for (int i = 0; i < numDocs; i++)
+            {
+                @ref.CopyChars(TestUtil.RandomSimpleString(Random(), 2));
+                iw.AddDocument(doc);
+                if (Random().Next(17) == 0)
+                {
+                    iw.Commit();
+                }
+            }
+            DirectoryReader ir = iw.Reader;
+            iw.ForceMerge(1);
+            DirectoryReader ir2 = iw.Reader;
+            AtomicReader merged = GetOnlySegmentReader(ir2);
+            iw.Dispose();
+
+            SortedDocValues multi = MultiDocValues.GetSortedValues(ir, "bytes");
+            SortedDocValues single = merged.GetSortedDocValues("bytes");
+            Assert.AreEqual(single.ValueCount, multi.ValueCount);
+            BytesRef actual = new BytesRef();
+            BytesRef expected = new BytesRef();
+            for (int i = 0; i < numDocs; i++)
+            {
+                // check ord
+                Assert.AreEqual(single.GetOrd(i), multi.GetOrd(i));
+                // check ord value
+                single.Get(i, expected);
+                multi.Get(i, actual);
+                Assert.AreEqual(expected, actual);
+            }
+            ir.Dispose();
+            ir2.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestSortedSet()
+        {
+            AssumeTrue("codec does not support SORTED_SET", DefaultCodecSupportsSortedSet());
+            Directory dir = NewDirectory();
+
+            IndexWriterConfig iwc = NewIndexWriterConfig(Random(), TEST_VERSION_CURRENT, null);
+            iwc.SetMergePolicy(NewLogMergePolicy());
+            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwc);
+
+            int numDocs = AtLeast(500);
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document doc = new Document();
+                int numValues = Random().Next(5);
+                for (int j = 0; j < numValues; j++)
+                {
+                    doc.Add(new SortedSetDocValuesField("bytes", new BytesRef(TestUtil.RandomUnicodeString(Random()))));
+                }
+                iw.AddDocument(doc);
+                if (Random().Next(17) == 0)
+                {
+                    iw.Commit();
+                }
+            }
+            DirectoryReader ir = iw.Reader;
+            iw.ForceMerge(1);
+            DirectoryReader ir2 = iw.Reader;
+            AtomicReader merged = GetOnlySegmentReader(ir2);
+            iw.Dispose();
+
+            SortedSetDocValues multi = MultiDocValues.GetSortedSetValues(ir, "bytes");
+            SortedSetDocValues single = merged.GetSortedSetDocValues("bytes");
+            if (multi == null)
+            {
+                Assert.IsNull(single);
+            }
+            else
+            {
+                Assert.AreEqual(single.ValueCount, multi.ValueCount);
+                BytesRef actual = new BytesRef();
+                BytesRef expected = new BytesRef();
+                // check values
+                for (long i = 0; i < single.ValueCount; i++)
+                {
+                    single.LookupOrd(i, expected);
+                    multi.LookupOrd(i, actual);
+                    Assert.AreEqual(expected, actual);
+                }
+                // check ord list
+                for (int i = 0; i < numDocs; i++)
+                {
+                    single.SetDocument(i);
+                    List<long> expectedList = new List<long>();
+                    long ord;
+                    while ((ord = single.NextOrd()) != SortedSetDocValues.NO_MORE_ORDS)
+                    {
+                        expectedList.Add(ord);
+                    }
+
+                    multi.SetDocument(i);
+                    int upto = 0;
+                    while ((ord = multi.NextOrd()) != SortedSetDocValues.NO_MORE_ORDS)
+                    {
+                        Assert.AreEqual(expectedList[upto], ord);
+                        upto++;
+                    }
+                    Assert.AreEqual(expectedList.Count, upto);
+                }
+            }
+
+            ir.Dispose();
+            ir2.Dispose();
+            dir.Dispose();
+        }
+
+        // tries to make more dups than testSortedSet
+        [Test]
+        public virtual void TestSortedSetWithDups()
+        {
+            AssumeTrue("codec does not support SORTED_SET", DefaultCodecSupportsSortedSet());
+            Directory dir = NewDirectory();
+
+            IndexWriterConfig iwc = NewIndexWriterConfig(Random(), TEST_VERSION_CURRENT, null);
+            iwc.SetMergePolicy(NewLogMergePolicy());
+            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwc);
+
+            int numDocs = AtLeast(500);
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document doc = new Document();
+                int numValues = Random().Next(5);
+                for (int j = 0; j < numValues; j++)
+                {
+                    doc.Add(new SortedSetDocValuesField("bytes", new BytesRef(TestUtil.RandomSimpleString(Random(), 2))));
+                }
+                iw.AddDocument(doc);
+                if (Random().Next(17) == 0)
+                {
+                    iw.Commit();
+                }
+            }
+            DirectoryReader ir = iw.Reader;
+            iw.ForceMerge(1);
+            DirectoryReader ir2 = iw.Reader;
+            AtomicReader merged = GetOnlySegmentReader(ir2);
+            iw.Dispose();
+
+            SortedSetDocValues multi = MultiDocValues.GetSortedSetValues(ir, "bytes");
+            SortedSetDocValues single = merged.GetSortedSetDocValues("bytes");
+            if (multi == null)
+            {
+                Assert.IsNull(single);
+            }
+            else
+            {
+                Assert.AreEqual(single.ValueCount, multi.ValueCount);
+                BytesRef actual = new BytesRef();
+                BytesRef expected = new BytesRef();
+                // check values
+                for (long i = 0; i < single.ValueCount; i++)
+                {
+                    single.LookupOrd(i, expected);
+                    multi.LookupOrd(i, actual);
+                    Assert.AreEqual(expected, actual);
+                }
+                // check ord list
+                for (int i = 0; i < numDocs; i++)
+                {
+                    single.SetDocument(i);
+                    List<long?> expectedList = new List<long?>();
+                    long ord;
+                    while ((ord = single.NextOrd()) != SortedSetDocValues.NO_MORE_ORDS)
+                    {
+                        expectedList.Add(ord);
+                    }
+
+                    multi.SetDocument(i);
+                    int upto = 0;
+                    while ((ord = multi.NextOrd()) != SortedSetDocValues.NO_MORE_ORDS)
+                    {
+                        Assert.AreEqual((long)expectedList[upto], ord);
+                        upto++;
+                    }
+                    Assert.AreEqual(expectedList.Count, upto);
+                }
+            }
+
+            ir.Dispose();
+            ir2.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestDocsWithField()
+        {
+            AssumeTrue("codec does not support docsWithField", DefaultCodecSupportsDocsWithField());
+            Directory dir = NewDirectory();
+
+            IndexWriterConfig iwc = NewIndexWriterConfig(Random(), TEST_VERSION_CURRENT, null);
+            iwc.SetMergePolicy(NewLogMergePolicy());
+            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwc);
+
+            int numDocs = AtLeast(500);
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document doc = new Document();
+                if (Random().Next(4) >= 0)
+                {
+                    doc.Add(new NumericDocValuesField("numbers", Random().NextLong()));
+                }
+                doc.Add(new NumericDocValuesField("numbersAlways", Random().NextLong()));
+                iw.AddDocument(doc);
+                if (Random().Next(17) == 0)
+                {
+                    iw.Commit();
+                }
+            }
+            DirectoryReader ir = iw.Reader;
+            iw.ForceMerge(1);
+            DirectoryReader ir2 = iw.Reader;
+            AtomicReader merged = GetOnlySegmentReader(ir2);
+            iw.Dispose();
+
+            IBits multi = MultiDocValues.GetDocsWithField(ir, "numbers");
+            IBits single = merged.GetDocsWithField("numbers");
+            if (multi == null)
+            {
+                Assert.IsNull(single);
+            }
+            else
+            {
+                Assert.AreEqual(single.Length, multi.Length);
+                for (int i = 0; i < numDocs; i++)
+                {
+                    Assert.AreEqual(single.Get(i), multi.Get(i));
+                }
+            }
+
+            multi = MultiDocValues.GetDocsWithField(ir, "numbersAlways");
+            single = merged.GetDocsWithField("numbersAlways");
+            Assert.AreEqual(single.Length, multi.Length);
+            for (int i = 0; i < numDocs; i++)
+            {
+                Assert.AreEqual(single.Get(i), multi.Get(i));
+            }
+            ir.Dispose();
+            ir2.Dispose();
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file


[31/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestIndexWriterCommit.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterCommit.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterCommit.cs
new file mode 100644
index 0000000..4cd35f2
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestIndexWriterCommit.cs
@@ -0,0 +1,772 @@
+using System;
+using System.Collections.Generic;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    using Lucene.Net.Analysis;
+    using Lucene.Net.Randomized.Generators;
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using System.IO;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+    using ScoreDoc = Lucene.Net.Search.ScoreDoc;
+    using TermQuery = Lucene.Net.Search.TermQuery;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    [TestFixture]
+    public class TestIndexWriterCommit : LuceneTestCase
+    {
+        private static readonly FieldType StoredTextType = new FieldType(TextField.TYPE_NOT_STORED);
+
+        /*
+         * Simple test for "commit on close": open writer then
+         * add a bunch of docs, making sure reader does not see
+         * these docs until writer is closed.
+         */
+
+        [Test]
+        public virtual void TestCommitOnClose()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            for (int i = 0; i < 14; i++)
+            {
+                AddDoc(writer);
+            }
+            writer.Dispose();
+
+            Term searchTerm = new Term("content", "aaa");
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            IndexSearcher searcher = NewSearcher(reader);
+            ScoreDoc[] hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
+            Assert.AreEqual(14, hits.Length, "first number of hits");
+            reader.Dispose();
+
+            reader = DirectoryReader.Open(dir);
+
+            writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            for (int i = 0; i < 3; i++)
+            {
+                for (int j = 0; j < 11; j++)
+                {
+                    AddDoc(writer);
+                }
+                IndexReader r = DirectoryReader.Open(dir);
+                searcher = NewSearcher(r);
+                hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
+                Assert.AreEqual(14, hits.Length, "reader incorrectly sees changes from writer");
+                r.Dispose();
+                Assert.IsTrue(reader.IsCurrent, "reader should have still been current");
+            }
+
+            // Now, close the writer:
+            writer.Dispose();
+            Assert.IsFalse(reader.IsCurrent, "reader should not be current now");
+
+            IndexReader ir = DirectoryReader.Open(dir);
+            searcher = NewSearcher(ir);
+            hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
+            Assert.AreEqual(47, hits.Length, "reader did not see changes after writer was closed");
+            ir.Dispose();
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        /*
+         * Simple test for "commit on close": open writer, then
+         * add a bunch of docs, making sure reader does not see
+         * them until writer has closed.  Then instead of
+         * closing the writer, call abort and verify reader sees
+         * nothing was added.  Then verify we can open the index
+         * and add docs to it.
+         */
+
+        [Test]
+        public virtual void TestCommitOnCloseAbort()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(10));
+            for (int i = 0; i < 14; i++)
+            {
+                AddDoc(writer);
+            }
+            writer.Dispose();
+
+            Term searchTerm = new Term("content", "aaa");
+            IndexReader reader = DirectoryReader.Open(dir);
+            IndexSearcher searcher = NewSearcher(reader);
+            ScoreDoc[] hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
+            Assert.AreEqual(14, hits.Length, "first number of hits");
+            reader.Dispose();
+
+            writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(10));
+            for (int j = 0; j < 17; j++)
+            {
+                AddDoc(writer);
+            }
+            // Delete all docs:
+            writer.DeleteDocuments(searchTerm);
+
+            reader = DirectoryReader.Open(dir);
+            searcher = NewSearcher(reader);
+            hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
+            Assert.AreEqual(14, hits.Length, "reader incorrectly sees changes from writer");
+            reader.Dispose();
+
+            // Now, close the writer:
+            writer.Rollback();
+
+            TestIndexWriter.AssertNoUnreferencedFiles(dir, "unreferenced files remain after rollback()");
+
+            reader = DirectoryReader.Open(dir);
+            searcher = NewSearcher(reader);
+            hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
+            Assert.AreEqual(14, hits.Length, "saw changes after writer.abort");
+            reader.Dispose();
+
+            // Now make sure we can re-open the index, add docs,
+            // and all is good:
+            writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(10));
+
+            // On abort, writer in fact may write to the same
+            // segments_N file:
+            if (dir is MockDirectoryWrapper)
+            {
+                ((MockDirectoryWrapper)dir).PreventDoubleWrite = false;
+            }
+
+            for (int i = 0; i < 12; i++)
+            {
+                for (int j = 0; j < 17; j++)
+                {
+                    AddDoc(writer);
+                }
+                IndexReader r = DirectoryReader.Open(dir);
+                searcher = NewSearcher(r);
+                hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
+                Assert.AreEqual(14, hits.Length, "reader incorrectly sees changes from writer");
+                r.Dispose();
+            }
+
+            writer.Dispose();
+            IndexReader ir = DirectoryReader.Open(dir);
+            searcher = NewSearcher(ir);
+            hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
+            Assert.AreEqual(218, hits.Length, "didn't see changes after close");
+            ir.Dispose();
+
+            dir.Dispose();
+        }
+
+        /*
+         * Verify that a writer with "commit on close" indeed
+         * cleans up the temp segments created after opening
+         * that are not referenced by the starting segments
+         * file.  We check this by using MockDirectoryWrapper to
+         * measure max temp disk space used.
+         */
+
+        [Test]
+        public virtual void TestCommitOnCloseDiskUsage()
+        {
+            // MemoryCodec, since it uses FST, is not necessarily
+            // "additive", ie if you add up N small FSTs, then merge
+            // them, the merged result can easily be larger than the
+            // sum because the merged FST may use array encoding for
+            // some arcs (which uses more space):
+
+            string idFormat = TestUtil.GetPostingsFormat("id");
+            string contentFormat = TestUtil.GetPostingsFormat("content");
+            AssumeFalse("this test cannot run with Memory codec", idFormat.Equals("Memory") || contentFormat.Equals("Memory"));
+            MockDirectoryWrapper dir = NewMockDirectory();
+            Analyzer analyzer;
+            if (Random().NextBoolean())
+            {
+                // no payloads
+                analyzer = new AnalyzerAnonymousInnerClassHelper(this);
+            }
+            else
+            {
+                // fixed length payloads
+                int length = Random().Next(200);
+                analyzer = new AnalyzerAnonymousInnerClassHelper2(this, length);
+            }
+
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetMaxBufferedDocs(10).SetReaderPooling(false).SetMergePolicy(NewLogMergePolicy(10)));
+            for (int j = 0; j < 30; j++)
+            {
+                AddDocWithIndex(writer, j);
+            }
+            writer.Dispose();
+            dir.ResetMaxUsedSizeInBytes();
+
+            dir.TrackDiskUsage = true;
+            long startDiskUsage = dir.MaxUsedSizeInBytes;
+            writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(10).SetMergeScheduler(new SerialMergeScheduler()).SetReaderPooling(false).SetMergePolicy(NewLogMergePolicy(10)));
+            for (int j = 0; j < 1470; j++)
+            {
+                AddDocWithIndex(writer, j);
+            }
+            long midDiskUsage = dir.MaxUsedSizeInBytes;
+            dir.ResetMaxUsedSizeInBytes();
+            writer.ForceMerge(1);
+            writer.Dispose();
+
+            DirectoryReader.Open(dir).Dispose();
+
+            long endDiskUsage = dir.MaxUsedSizeInBytes;
+
+            // Ending index is 50X as large as starting index; due
+            // to 3X disk usage normally we allow 150X max
+            // transient usage.  If something is wrong w/ deleter
+            // and it doesn't delete intermediate segments then it
+            // will exceed this 150X:
+            // System.out.println("start " + startDiskUsage + "; mid " + midDiskUsage + ";end " + endDiskUsage);
+            Assert.IsTrue(midDiskUsage < 150 * startDiskUsage, "writer used too much space while adding documents: mid=" + midDiskUsage + " start=" + startDiskUsage + " end=" + endDiskUsage + " max=" + (startDiskUsage * 150));
+            Assert.IsTrue(endDiskUsage < 150 * startDiskUsage, "writer used too much space after close: endDiskUsage=" + endDiskUsage + " startDiskUsage=" + startDiskUsage + " max=" + (startDiskUsage * 150));
+            dir.Dispose();
+        }
+
+        private class AnalyzerAnonymousInnerClassHelper : Analyzer
+        {
+            private readonly TestIndexWriterCommit OuterInstance;
+
+            public AnalyzerAnonymousInnerClassHelper(TestIndexWriterCommit outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                return new TokenStreamComponents(new MockTokenizer(reader, MockTokenizer.WHITESPACE, true));
+            }
+        }
+
+        private class AnalyzerAnonymousInnerClassHelper2 : Analyzer
+        {
+            private readonly TestIndexWriterCommit OuterInstance;
+
+            private int Length;
+
+            public AnalyzerAnonymousInnerClassHelper2(TestIndexWriterCommit outerInstance, int length)
+            {
+                this.OuterInstance = outerInstance;
+                this.Length = length;
+            }
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, true);
+                return new TokenStreamComponents(tokenizer, new MockFixedLengthPayloadFilter(Random(), tokenizer, Length));
+            }
+        }
+
+        /*
+         * Verify that calling forceMerge when writer is open for
+         * "commit on close" works correctly both for rollback()
+         * and close().
+         */
+
+        [Test]
+        public virtual void TestCommitOnCloseForceMerge()
+        {
+            Directory dir = NewDirectory();
+            // Must disable throwing exc on double-write: this
+            // test uses IW.rollback which easily results in
+            // writing to same file more than once
+            if (dir is MockDirectoryWrapper)
+            {
+                ((MockDirectoryWrapper)dir).PreventDoubleWrite = false;
+            }
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(10).SetMergePolicy(NewLogMergePolicy(10)));
+            for (int j = 0; j < 17; j++)
+            {
+                AddDocWithIndex(writer, j);
+            }
+            writer.Dispose();
+
+            writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND));
+            writer.ForceMerge(1);
+
+            // Open a reader before closing (commiting) the writer:
+            DirectoryReader reader = DirectoryReader.Open(dir);
+
+            // Reader should see index as multi-seg at this
+            // point:
+            Assert.IsTrue(reader.Leaves.Count > 1, "Reader incorrectly sees one segment");
+            reader.Dispose();
+
+            // Abort the writer:
+            writer.Rollback();
+            TestIndexWriter.AssertNoUnreferencedFiles(dir, "aborted writer after forceMerge");
+
+            // Open a reader after aborting writer:
+            reader = DirectoryReader.Open(dir);
+
+            // Reader should still see index as multi-segment
+            Assert.IsTrue(reader.Leaves.Count > 1, "Reader incorrectly sees one segment");
+            reader.Dispose();
+
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: do real full merge");
+            }
+            writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND));
+            writer.ForceMerge(1);
+            writer.Dispose();
+
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: writer closed");
+            }
+            TestIndexWriter.AssertNoUnreferencedFiles(dir, "aborted writer after forceMerge");
+
+            // Open a reader after aborting writer:
+            reader = DirectoryReader.Open(dir);
+
+            // Reader should see index as one segment
+            Assert.AreEqual(1, reader.Leaves.Count, "Reader incorrectly sees more than one segment");
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        // LUCENE-2095: make sure with multiple threads commit
+        // doesn't return until all changes are in fact in the
+        // index
+        [Test]
+        public virtual void TestCommitThreadSafety()
+        {
+            const int NUM_THREADS = 5;
+            const double RUN_SEC = 0.5;
+            var dir = NewDirectory();
+            var w = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()));
+            TestUtil.ReduceOpenFiles(w.w);
+            w.Commit();
+            var failed = new AtomicBoolean();
+            var threads = new ThreadClass[NUM_THREADS];
+            long endTime = Environment.TickCount + ((long)(RUN_SEC * 1000));
+            for (int i = 0; i < NUM_THREADS; i++)
+            {
+                int finalI = i;
+                threads[i] = new ThreadAnonymousInnerClassHelper(dir, w, failed, endTime, finalI, NewStringField);
+                threads[i].Start();
+            }
+            for (int i = 0; i < NUM_THREADS; i++)
+            {
+                threads[i].Join();
+            }
+            Assert.IsFalse(failed.Get());
+            w.Dispose();
+            dir.Dispose();
+        }
+
+        private class ThreadAnonymousInnerClassHelper : ThreadClass
+        {
+            private readonly Func<string, string, Field.Store, Field> NewStringField;
+            private Directory Dir;
+            private RandomIndexWriter w;
+            private AtomicBoolean Failed;
+            private long EndTime;
+            private int FinalI;
+
+            /// <param name="newStringField">
+            /// LUCENENET specific
+            /// This is passed in because <see cref="LuceneTestCase.NewStringField(string, string, Field.Store)"/>
+            /// is no longer static.
+            /// </param>
+            public ThreadAnonymousInnerClassHelper(Directory dir, RandomIndexWriter w, AtomicBoolean failed, long endTime, int finalI, Func<string, string, Field.Store, Field> newStringField)
+            {
+                NewStringField = newStringField;
+                this.Dir = dir;
+                this.w = w;
+                this.Failed = failed;
+                this.EndTime = endTime;
+                this.FinalI = finalI;
+            }
+
+            public override void Run()
+            {
+                try
+                {
+                    Document doc = new Document();
+                    DirectoryReader r = DirectoryReader.Open(Dir);
+                    Field f = NewStringField("f", "", Field.Store.NO);
+                    doc.Add(f);
+                    int count = 0;
+                    do
+                    {
+                        if (Failed.Get())
+                        {
+                            break;
+                        }
+                        for (int j = 0; j < 10; j++)
+                        {
+                            string s = FinalI + "_" + Convert.ToString(count++);
+                            f.SetStringValue(s);
+                            w.AddDocument(doc);
+                            w.Commit();
+                            DirectoryReader r2 = DirectoryReader.OpenIfChanged(r);
+                            Assert.IsNotNull(r2);
+                            Assert.IsTrue(!r2.Equals(r));
+                            r.Dispose();
+                            r = r2;
+                            Assert.AreEqual(1, r.DocFreq(new Term("f", s)), "term=f:" + s + "; r=" + r);
+                        }
+                    } while (Environment.TickCount < EndTime);
+                    r.Dispose();
+                }
+                catch (Exception t)
+                {
+                    Failed.Set(true);
+                    throw new Exception(t.Message, t);
+                }
+            }
+        }
+
+        // LUCENE-1044: test writer.Commit() when ac=false
+        [Test]
+        public virtual void TestForceCommit()
+        {
+            Directory dir = NewDirectory();
+
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy(5)));
+            writer.Commit();
+
+            for (int i = 0; i < 23; i++)
+            {
+                AddDoc(writer);
+            }
+
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(0, reader.NumDocs);
+            writer.Commit();
+            DirectoryReader reader2 = DirectoryReader.OpenIfChanged(reader);
+            Assert.IsNotNull(reader2);
+            Assert.AreEqual(0, reader.NumDocs);
+            Assert.AreEqual(23, reader2.NumDocs);
+            reader.Dispose();
+
+            for (int i = 0; i < 17; i++)
+            {
+                AddDoc(writer);
+            }
+            Assert.AreEqual(23, reader2.NumDocs);
+            reader2.Dispose();
+            reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(23, reader.NumDocs);
+            reader.Dispose();
+            writer.Commit();
+
+            reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(40, reader.NumDocs);
+            reader.Dispose();
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestFutureCommit()
+        {
+            Directory dir = NewDirectory();
+
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetIndexDeletionPolicy(NoDeletionPolicy.INSTANCE));
+            Document doc = new Document();
+            w.AddDocument(doc);
+
+            // commit to "first"
+            IDictionary<string, string> commitData = new Dictionary<string, string>();
+            commitData["tag"] = "first";
+            w.CommitData = commitData;
+            w.Commit();
+
+            // commit to "second"
+            w.AddDocument(doc);
+            commitData["tag"] = "second";
+            w.CommitData = commitData;
+            w.Dispose();
+
+            // open "first" with IndexWriter
+            IndexCommit commit = null;
+            foreach (IndexCommit c in DirectoryReader.ListCommits(dir))
+            {
+                if (c.UserData["tag"].Equals("first"))
+                {
+                    commit = c;
+                    break;
+                }
+            }
+
+            Assert.IsNotNull(commit);
+
+            w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetIndexDeletionPolicy(NoDeletionPolicy.INSTANCE).SetIndexCommit(commit));
+
+            Assert.AreEqual(1, w.NumDocs);
+
+            // commit IndexWriter to "third"
+            w.AddDocument(doc);
+            commitData["tag"] = "third";
+            w.CommitData = commitData;
+            w.Dispose();
+
+            // make sure "second" commit is still there
+            commit = null;
+            foreach (IndexCommit c in DirectoryReader.ListCommits(dir))
+            {
+                if (c.UserData["tag"].Equals("second"))
+                {
+                    commit = c;
+                    break;
+                }
+            }
+
+            Assert.IsNotNull(commit);
+
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestZeroCommits()
+        {
+            // Tests that if we don't call commit(), the directory has 0 commits. this has
+            // changed since LUCENE-2386, where before IW would always commit on a fresh
+            // new index.
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            try
+            {
+                DirectoryReader.ListCommits(dir);
+                Assert.Fail("listCommits should have thrown an exception over empty index");
+            }
+#pragma warning disable 168
+            catch (IndexNotFoundException e)
+#pragma warning restore 168
+            {
+                // that's expected !
+            }
+            // No changes still should generate a commit, because it's a new index.
+            writer.Dispose();
+            Assert.AreEqual(1, DirectoryReader.ListCommits(dir).Count, "expected 1 commits!");
+            dir.Dispose();
+        }
+
+        // LUCENE-1274: test writer.PrepareCommit()
+        [Test]
+        public virtual void TestPrepareCommit()
+        {
+            Directory dir = NewDirectory();
+
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy(5)));
+            writer.Commit();
+
+            for (int i = 0; i < 23; i++)
+            {
+                AddDoc(writer);
+            }
+
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(0, reader.NumDocs);
+
+            writer.PrepareCommit();
+
+            IndexReader reader2 = DirectoryReader.Open(dir);
+            Assert.AreEqual(0, reader2.NumDocs);
+
+            writer.Commit();
+
+            IndexReader reader3 = DirectoryReader.OpenIfChanged(reader);
+            Assert.IsNotNull(reader3);
+            Assert.AreEqual(0, reader.NumDocs);
+            Assert.AreEqual(0, reader2.NumDocs);
+            Assert.AreEqual(23, reader3.NumDocs);
+            reader.Dispose();
+            reader2.Dispose();
+
+            for (int i = 0; i < 17; i++)
+            {
+                AddDoc(writer);
+            }
+
+            Assert.AreEqual(23, reader3.NumDocs);
+            reader3.Dispose();
+            reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(23, reader.NumDocs);
+            reader.Dispose();
+
+            writer.PrepareCommit();
+
+            reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(23, reader.NumDocs);
+            reader.Dispose();
+
+            writer.Commit();
+            reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(40, reader.NumDocs);
+            reader.Dispose();
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        // LUCENE-1274: test writer.PrepareCommit()
+        [Test]
+        public virtual void TestPrepareCommitRollback()
+        {
+            Directory dir = NewDirectory();
+            if (dir is MockDirectoryWrapper)
+            {
+                ((MockDirectoryWrapper)dir).PreventDoubleWrite = false;
+            }
+
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy(5)));
+            writer.Commit();
+
+            for (int i = 0; i < 23; i++)
+            {
+                AddDoc(writer);
+            }
+
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(0, reader.NumDocs);
+
+            writer.PrepareCommit();
+
+            IndexReader reader2 = DirectoryReader.Open(dir);
+            Assert.AreEqual(0, reader2.NumDocs);
+
+            writer.Rollback();
+
+            IndexReader reader3 = DirectoryReader.OpenIfChanged(reader);
+            Assert.IsNull(reader3);
+            Assert.AreEqual(0, reader.NumDocs);
+            Assert.AreEqual(0, reader2.NumDocs);
+            reader.Dispose();
+            reader2.Dispose();
+
+            writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            for (int i = 0; i < 17; i++)
+            {
+                AddDoc(writer);
+            }
+
+            reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(0, reader.NumDocs);
+            reader.Dispose();
+
+            writer.PrepareCommit();
+
+            reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(0, reader.NumDocs);
+            reader.Dispose();
+
+            writer.Commit();
+            reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(17, reader.NumDocs);
+            reader.Dispose();
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        // LUCENE-1274
+        [Test]
+        public virtual void TestPrepareCommitNoChanges()
+        {
+            Directory dir = NewDirectory();
+
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            writer.PrepareCommit();
+            writer.Commit();
+            writer.Dispose();
+
+            IndexReader reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(0, reader.NumDocs);
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        // LUCENE-1382
+        [Test]
+        public virtual void TestCommitUserData()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter w = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2));
+            for (int j = 0; j < 17; j++)
+            {
+                AddDoc(w);
+            }
+            w.Dispose();
+
+            DirectoryReader r = DirectoryReader.Open(dir);
+            // commit(Map) never called for this index
+            Assert.AreEqual(0, r.IndexCommit.UserData.Count);
+            r.Dispose();
+
+            w = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2));
+            for (int j = 0; j < 17; j++)
+            {
+                AddDoc(w);
+            }
+            IDictionary<string, string> data = new Dictionary<string, string>();
+            data["label"] = "test1";
+            w.CommitData = data;
+            w.Dispose();
+
+            r = DirectoryReader.Open(dir);
+            Assert.AreEqual("test1", r.IndexCommit.UserData["label"]);
+            r.Dispose();
+
+            w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            w.ForceMerge(1);
+            w.Dispose();
+
+            dir.Dispose();
+        }
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Copied from <see cref="TestIndexWriter.AddDoc(IndexWriter)"/>
+        /// to remove inter-class dependency on <see cref="TestIndexWriter"/>
+        /// </summary>
+        private void AddDoc(IndexWriter writer)
+        {
+            Document doc = new Document();
+            doc.Add(NewTextField("content", "aaa", Field.Store.NO));
+            writer.AddDocument(doc);
+        }
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Copied from <seealso cref="TestIndexWriter.AddDocWithIndex(IndexWriter, int)"/>
+        /// to remove inter-class dependency on <see cref="TestIndexWriter"/>.
+        /// </summary>
+        private void AddDocWithIndex(IndexWriter writer, int index)
+        {
+            Document doc = new Document();
+            doc.Add(NewField("content", "aaa " + index, StoredTextType));
+            doc.Add(NewField("id", "" + index, StoredTextType));
+            writer.AddDocument(doc);
+        }
+
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestIndexWriterConfig.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterConfig.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterConfig.cs
new file mode 100644
index 0000000..3dcb3ff
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestIndexWriterConfig.cs
@@ -0,0 +1,539 @@
+using Lucene.Net.Documents;
+using Lucene.Net.Util;
+using System.Collections.Generic;
+using System.Reflection;
+
+namespace Lucene.Net.Index
+{
+    //using AlreadySetException = Lucene.Net.Util.SetOnce.AlreadySetException;
+    using NUnit.Framework;
+    using Codec = Lucene.Net.Codecs.Codec;
+    using DefaultSimilarity = Lucene.Net.Search.Similarities.DefaultSimilarity;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using IndexingChain = Lucene.Net.Index.DocumentsWriterPerThread.IndexingChain;
+    using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+    using InfoStream = Lucene.Net.Util.InfoStream;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using Store = Field.Store;
+
+    [TestFixture]
+    public class TestIndexWriterConfig : LuceneTestCase
+    {
+        private sealed class MySimilarity : DefaultSimilarity
+        {
+            // Does not implement anything - used only for type checking on IndexWriterConfig.
+        }
+
+        private sealed class MyIndexingChain : IndexingChain
+        {
+            // Does not implement anything - used only for type checking on IndexWriterConfig.
+            internal override DocConsumer GetChain(DocumentsWriterPerThread documentsWriter)
+            {
+                return null;
+            }
+        }
+
+        [Test]
+        public virtual void TestDefaults()
+        {
+            IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            Assert.AreEqual(typeof(MockAnalyzer), conf.Analyzer.GetType());
+            Assert.IsNull(conf.IndexCommit);
+            Assert.AreEqual(typeof(KeepOnlyLastCommitDeletionPolicy), conf.IndexDeletionPolicy.GetType());
+#if FEATURE_TASKMERGESCHEDULER
+            Assert.AreEqual(typeof(TaskMergeScheduler), conf.MergeScheduler.GetType());
+#else
+            Assert.AreEqual(typeof(ConcurrentMergeScheduler), conf.MergeScheduler.GetType());
+#endif
+            Assert.AreEqual(OpenMode.CREATE_OR_APPEND, conf.OpenMode);
+            // we don't need to assert this, it should be unspecified
+            Assert.IsTrue(IndexSearcher.DefaultSimilarity == conf.Similarity);
+            Assert.AreEqual(IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL, conf.TermIndexInterval);
+            Assert.AreEqual(IndexWriterConfig.DefaultWriteLockTimeout, conf.WriteLockTimeout);
+            Assert.AreEqual(IndexWriterConfig.WRITE_LOCK_TIMEOUT, IndexWriterConfig.DefaultWriteLockTimeout);
+            Assert.AreEqual(IndexWriterConfig.DEFAULT_MAX_BUFFERED_DELETE_TERMS, conf.MaxBufferedDeleteTerms);
+            Assert.AreEqual(IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB, conf.RAMBufferSizeMB, 0.0);
+            Assert.AreEqual(IndexWriterConfig.DEFAULT_MAX_BUFFERED_DOCS, conf.MaxBufferedDocs);
+            Assert.AreEqual(IndexWriterConfig.DEFAULT_READER_POOLING, conf.ReaderPooling);
+            Assert.IsTrue(DocumentsWriterPerThread.DefaultIndexingChain == conf.IndexingChain);
+            Assert.IsNull(conf.MergedSegmentWarmer);
+            Assert.AreEqual(IndexWriterConfig.DEFAULT_READER_TERMS_INDEX_DIVISOR, conf.ReaderTermsIndexDivisor);
+            Assert.AreEqual(typeof(TieredMergePolicy), conf.MergePolicy.GetType());
+            Assert.AreEqual(typeof(ThreadAffinityDocumentsWriterThreadPool), conf.IndexerThreadPool.GetType());
+            Assert.AreEqual(typeof(FlushByRamOrCountsPolicy), conf.FlushPolicy.GetType());
+            Assert.AreEqual(IndexWriterConfig.DEFAULT_RAM_PER_THREAD_HARD_LIMIT_MB, conf.RAMPerThreadHardLimitMB);
+            Assert.AreEqual(Codec.Default, conf.Codec);
+            Assert.AreEqual(InfoStream.Default, conf.InfoStream);
+            Assert.AreEqual(IndexWriterConfig.DEFAULT_USE_COMPOUND_FILE_SYSTEM, conf.UseCompoundFile);
+            // Sanity check - validate that all getters are covered.
+            HashSet<string> getters = new HashSet<string>();
+            getters.Add("getAnalyzer");
+            getters.Add("getIndexCommit");
+            getters.Add("getIndexDeletionPolicy");
+            getters.Add("getMaxFieldLength");
+            getters.Add("getMergeScheduler");
+            getters.Add("getOpenMode");
+            getters.Add("getSimilarity");
+            getters.Add("getTermIndexInterval");
+            getters.Add("getWriteLockTimeout");
+            getters.Add("getDefaultWriteLockTimeout");
+            getters.Add("getMaxBufferedDeleteTerms");
+            getters.Add("getRAMBufferSizeMB");
+            getters.Add("getMaxBufferedDocs");
+            getters.Add("getIndexingChain");
+            getters.Add("getMergedSegmentWarmer");
+            getters.Add("getMergePolicy");
+            getters.Add("getMaxThreadStates");
+            getters.Add("getReaderPooling");
+            getters.Add("getIndexerThreadPool");
+            getters.Add("getReaderTermsIndexDivisor");
+            getters.Add("getFlushPolicy");
+            getters.Add("getRAMPerThreadHardLimitMB");
+            getters.Add("getCodec");
+            getters.Add("getInfoStream");
+            getters.Add("getUseCompoundFile");
+
+            foreach (MethodInfo m in typeof(IndexWriterConfig).GetMethods())
+            {
+                if (m.DeclaringType == typeof(IndexWriterConfig) && m.Name.StartsWith("get") && !m.Name.StartsWith("get_"))
+                {
+                    Assert.IsTrue(getters.Contains(m.Name), "method " + m.Name + " is not tested for defaults");
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestSettersChaining()
+        {
+            // Ensures that every setter returns IndexWriterConfig to allow chaining.
+            HashSet<string> liveSetters = new HashSet<string>();
+            HashSet<string> allSetters = new HashSet<string>();
+            foreach (MethodInfo m in typeof(IndexWriterConfig).GetMethods())
+            {
+                if (m.Name.StartsWith("Set") && !m.IsStatic)
+                {
+                    allSetters.Add(m.Name);
+                    // setters overridden from LiveIndexWriterConfig are returned twice, once with
+                    // IndexWriterConfig return type and second with LiveIndexWriterConfig. The ones
+                    // from LiveIndexWriterConfig are marked 'synthetic', so just collect them and
+                    // assert in the end that we also received them from IWC.
+                    // In C# we do not have them marked synthetic so we look at the declaring type instead.
+                    if (m.DeclaringType.Name == "LiveIndexWriterConfig")
+                    {
+                        liveSetters.Add(m.Name);
+                    }
+                    else
+                    {
+                        Assert.AreEqual(typeof(IndexWriterConfig), m.ReturnType, "method " + m.Name + " does not return IndexWriterConfig");
+                    }
+                }
+            }
+            foreach (string setter in liveSetters)
+            {
+                Assert.IsTrue(allSetters.Contains(setter), "setter method not overridden by IndexWriterConfig: " + setter);
+            }
+        }
+
+        [Test]
+        public virtual void TestReuse()
+        {
+            Directory dir = NewDirectory();
+            // test that IWC cannot be reused across two IWs
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, null);
+            (new RandomIndexWriter(Random(), dir, conf)).Dispose();
+
+            // this should fail
+            try
+            {
+                Assert.IsNotNull(new RandomIndexWriter(Random(), dir, conf));
+                Assert.Fail("should have hit AlreadySetException");
+            }
+#pragma warning disable 168
+            catch (SetOnce<IndexWriter>.AlreadySetException e)
+#pragma warning restore 168
+            {
+                // expected
+            }
+
+            // also cloning it won't help, after it has been used already
+            try
+            {
+                Assert.IsNotNull(new RandomIndexWriter(Random(), dir, (IndexWriterConfig)conf.Clone()));
+                Assert.Fail("should have hit AlreadySetException");
+            }
+#pragma warning disable 168
+            catch (SetOnce<IndexWriter>.AlreadySetException e)
+#pragma warning restore 168
+            {
+                // expected
+            }
+
+            // if it's cloned in advance, it should be ok
+            conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, null);
+            (new RandomIndexWriter(Random(), dir, (IndexWriterConfig)conf.Clone())).Dispose();
+            (new RandomIndexWriter(Random(), dir, (IndexWriterConfig)conf.Clone())).Dispose();
+
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestOverrideGetters()
+        {
+            // Test that IndexWriterConfig overrides all getters, so that javadocs
+            // contain all methods for the users. Also, ensures that IndexWriterConfig
+            // doesn't declare getters that are not declared on LiveIWC.
+            HashSet<string> liveGetters = new HashSet<string>();
+            foreach (MethodInfo m in typeof(LiveIndexWriterConfig).GetMethods())
+            {
+                if (m.Name.StartsWith("get") && !m.IsStatic)
+                {
+                    liveGetters.Add(m.Name);
+                }
+            }
+
+            foreach (MethodInfo m in typeof(IndexWriterConfig).GetMethods())
+            {
+                if (m.Name.StartsWith("get") && !m.Name.StartsWith("get_") && !m.IsStatic)
+                {
+                    Assert.AreEqual(typeof(IndexWriterConfig), m.DeclaringType, "method " + m.Name + " not overrided by IndexWriterConfig");
+                    Assert.IsTrue(liveGetters.Contains(m.Name), "method " + m.Name + " not declared on LiveIndexWriterConfig");
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestConstants()
+        {
+            // Tests that the values of the constants does not change
+            Assert.AreEqual(1000, IndexWriterConfig.WRITE_LOCK_TIMEOUT);
+            Assert.AreEqual(32, IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL);
+            Assert.AreEqual(-1, IndexWriterConfig.DISABLE_AUTO_FLUSH);
+            Assert.AreEqual(IndexWriterConfig.DISABLE_AUTO_FLUSH, IndexWriterConfig.DEFAULT_MAX_BUFFERED_DELETE_TERMS);
+            Assert.AreEqual(IndexWriterConfig.DISABLE_AUTO_FLUSH, IndexWriterConfig.DEFAULT_MAX_BUFFERED_DOCS);
+            Assert.AreEqual(16.0, IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB, 0.0);
+            Assert.AreEqual(false, IndexWriterConfig.DEFAULT_READER_POOLING);
+            Assert.AreEqual(true, IndexWriterConfig.DEFAULT_USE_COMPOUND_FILE_SYSTEM);
+            Assert.AreEqual(DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, IndexWriterConfig.DEFAULT_READER_TERMS_INDEX_DIVISOR);
+        }
+
+        [Test]
+        public virtual void TestToString()
+        {
+            string str = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).ToString();
+            foreach (System.Reflection.FieldInfo f in (typeof(IndexWriterConfig).GetFields(
+                BindingFlags.Instance |
+                BindingFlags.NonPublic |
+                BindingFlags.Public |
+                BindingFlags.DeclaredOnly |
+                BindingFlags.Static)))
+            {
+                if (f.IsStatic)
+                {
+                    // Skip static final fields, they are only constants
+                    continue;
+                }
+                else if ("indexingChain".Equals(f.Name))
+                {
+                    // indexingChain is a package-private setting and thus is not output by
+                    // toString.
+                    continue;
+                }
+                if (f.Name.Equals("inUseByIndexWriter"))
+                {
+                    continue;
+                }
+                Assert.IsTrue(str.IndexOf(f.Name) != -1, f.Name + " not found in toString");
+            }
+        }
+
+        [Test]
+        public virtual void TestClone()
+        {
+            IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriterConfig clone = (IndexWriterConfig)conf.Clone();
+
+            // Make sure parameters that can't be reused are cloned
+            IndexDeletionPolicy delPolicy = conf.IndexDeletionPolicy;
+            IndexDeletionPolicy delPolicyClone = clone.IndexDeletionPolicy;
+            Assert.IsTrue(delPolicy.GetType() == delPolicyClone.GetType() && (delPolicy != delPolicyClone || delPolicy.Clone() == delPolicyClone.Clone()));
+
+            FlushPolicy flushPolicy = conf.FlushPolicy;
+            FlushPolicy flushPolicyClone = clone.FlushPolicy;
+            Assert.IsTrue(flushPolicy.GetType() == flushPolicyClone.GetType() && (flushPolicy != flushPolicyClone || flushPolicy.Clone() == flushPolicyClone.Clone()));
+
+            DocumentsWriterPerThreadPool pool = conf.IndexerThreadPool;
+            DocumentsWriterPerThreadPool poolClone = clone.IndexerThreadPool;
+            Assert.IsTrue(pool.GetType() == poolClone.GetType() && (pool != poolClone || pool.Clone() == poolClone.Clone()));
+
+            MergePolicy mergePolicy = conf.MergePolicy;
+            MergePolicy mergePolicyClone = clone.MergePolicy;
+            Assert.IsTrue(mergePolicy.GetType() == mergePolicyClone.GetType() && (mergePolicy != mergePolicyClone || mergePolicy.Clone() == mergePolicyClone.Clone()));
+
+            IMergeScheduler mergeSched = conf.MergeScheduler;
+            IMergeScheduler mergeSchedClone = clone.MergeScheduler;
+            Assert.IsTrue(mergeSched.GetType() == mergeSchedClone.GetType() && (mergeSched != mergeSchedClone || mergeSched.Clone() == mergeSchedClone.Clone()));
+
+            conf.SetMergeScheduler(new SerialMergeScheduler());
+#if FEATURE_TASKMERGESCHEDULER
+            Assert.AreEqual(typeof(TaskMergeScheduler), clone.MergeScheduler.GetType());
+#else
+            Assert.AreEqual(typeof(ConcurrentMergeScheduler), clone.MergeScheduler.GetType());
+#endif
+        }
+
+        [Test]
+        public virtual void TestInvalidValues()
+        {
+            IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+
+            // Test IndexDeletionPolicy
+            Assert.AreEqual(typeof(KeepOnlyLastCommitDeletionPolicy), conf.IndexDeletionPolicy.GetType());
+            conf.SetIndexDeletionPolicy(new SnapshotDeletionPolicy(null));
+            Assert.AreEqual(typeof(SnapshotDeletionPolicy), conf.IndexDeletionPolicy.GetType());
+            try
+            {
+                conf.SetIndexDeletionPolicy(null);
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException e)
+#pragma warning restore 168
+            {
+                // ok
+            }
+
+            // Test MergeScheduler
+#if FEATURE_TASKMERGESCHEDULER
+            Assert.AreEqual(typeof(TaskMergeScheduler), conf.MergeScheduler.GetType());
+#else
+            Assert.AreEqual(typeof(ConcurrentMergeScheduler), conf.MergeScheduler.GetType());
+#endif
+            conf.SetMergeScheduler(new SerialMergeScheduler());
+            Assert.AreEqual(typeof(SerialMergeScheduler), conf.MergeScheduler.GetType());
+            try
+            {
+                conf.SetMergeScheduler(null);
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException e)
+#pragma warning restore 168
+            {
+                // ok
+            }
+
+            // Test Similarity:
+            // we shouldnt assert what the default is, just that its not null.
+            Assert.IsTrue(IndexSearcher.DefaultSimilarity == conf.Similarity);
+            conf.SetSimilarity(new MySimilarity());
+            Assert.AreEqual(typeof(MySimilarity), conf.Similarity.GetType());
+            try
+            {
+                conf.SetSimilarity(null);
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException e)
+#pragma warning restore 168
+            {
+                // ok
+            }
+
+            // Test IndexingChain
+            Assert.IsTrue(DocumentsWriterPerThread.DefaultIndexingChain == conf.IndexingChain);
+            conf.SetIndexingChain(new MyIndexingChain());
+            Assert.AreEqual(typeof(MyIndexingChain), conf.IndexingChain.GetType());
+            try
+            {
+                conf.SetIndexingChain(null);
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException e)
+#pragma warning restore 168
+            {
+                // ok
+            }
+
+            try
+            {
+                conf.SetMaxBufferedDeleteTerms(0);
+                Assert.Fail("should not have succeeded to set maxBufferedDeleteTerms to 0");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException e)
+#pragma warning restore 168
+            {
+                // this is expected
+            }
+
+            try
+            {
+                conf.SetMaxBufferedDocs(1);
+                Assert.Fail("should not have succeeded to set maxBufferedDocs to 1");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException e)
+#pragma warning restore 168
+            {
+                // this is expected
+            }
+
+            try
+            {
+                // Disable both MAX_BUF_DOCS and RAM_SIZE_MB
+                conf.SetMaxBufferedDocs(4);
+                conf.SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH);
+                conf.SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH);
+                Assert.Fail("should not have succeeded to disable maxBufferedDocs when ramBufferSizeMB is disabled as well");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException e)
+#pragma warning restore 168
+            {
+                // this is expected
+            }
+
+            conf.SetRAMBufferSizeMB(IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB);
+            conf.SetMaxBufferedDocs(IndexWriterConfig.DEFAULT_MAX_BUFFERED_DOCS);
+            try
+            {
+                conf.SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH);
+                Assert.Fail("should not have succeeded to disable ramBufferSizeMB when maxBufferedDocs is disabled as well");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException e)
+#pragma warning restore 168
+            {
+                // this is expected
+            }
+
+            // Test setReaderTermsIndexDivisor
+            try
+            {
+                conf.SetReaderTermsIndexDivisor(0);
+                Assert.Fail("should not have succeeded to set termsIndexDivisor to 0");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException e)
+#pragma warning restore 168
+            {
+                // this is expected
+            }
+
+            // Setting to -1 is ok
+            conf.SetReaderTermsIndexDivisor(-1);
+            try
+            {
+                conf.SetReaderTermsIndexDivisor(-2);
+                Assert.Fail("should not have succeeded to set termsIndexDivisor to < -1");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException e)
+#pragma warning restore 168
+            {
+                // this is expected
+            }
+
+            try
+            {
+                conf.SetRAMPerThreadHardLimitMB(2048);
+                Assert.Fail("should not have succeeded to set RAMPerThreadHardLimitMB to >= 2048");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException e)
+#pragma warning restore 168
+            {
+                // this is expected
+            }
+
+            try
+            {
+                conf.SetRAMPerThreadHardLimitMB(0);
+                Assert.Fail("should not have succeeded to set RAMPerThreadHardLimitMB to 0");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException e)
+#pragma warning restore 168
+            {
+                // this is expected
+            }
+
+            // Test MergePolicy
+            Assert.AreEqual(typeof(TieredMergePolicy), conf.MergePolicy.GetType());
+            conf.SetMergePolicy(new LogDocMergePolicy());
+            Assert.AreEqual(typeof(LogDocMergePolicy), conf.MergePolicy.GetType());
+            try
+            {
+                conf.SetMergePolicy(null);
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException e)
+#pragma warning restore 168
+            {
+                // ok
+            }
+        }
+
+        [Test]
+        public virtual void TestLiveChangeToCFS()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            iwc.SetMergePolicy(NewLogMergePolicy(true));
+            // Start false:
+            iwc.SetUseCompoundFile(false);
+            iwc.MergePolicy.NoCFSRatio = 0.0d;
+            IndexWriter w = new IndexWriter(dir, iwc);
+            // Change to true:
+            w.Config.SetUseCompoundFile(true);
+
+            Document doc = new Document();
+            doc.Add(NewStringField("field", "foo", Store.NO));
+            w.AddDocument(doc);
+            w.Commit();
+            Assert.IsTrue(w.NewestSegment().Info.UseCompoundFile, "Expected CFS after commit");
+
+            doc.Add(NewStringField("field", "foo", Store.NO));
+            w.AddDocument(doc);
+            w.Commit();
+            w.ForceMerge(1);
+            w.Commit();
+
+            // no compound files after merge
+            Assert.IsFalse(w.NewestSegment().Info.UseCompoundFile, "Expected Non-CFS after merge");
+
+            MergePolicy lmp = w.Config.MergePolicy;
+            lmp.NoCFSRatio = 1.0;
+            lmp.MaxCFSSegmentSizeMB = double.PositiveInfinity;
+
+            w.AddDocument(doc);
+            w.ForceMerge(1);
+            w.Commit();
+            Assert.IsTrue(w.NewestSegment().Info.UseCompoundFile, "Expected CFS after merge");
+            w.Dispose();
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file


[39/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestConcurrentMergeScheduler.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestConcurrentMergeScheduler.cs b/src/Lucene.Net.Tests/Index/TestConcurrentMergeScheduler.cs
new file mode 100644
index 0000000..da0395e
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestConcurrentMergeScheduler.cs
@@ -0,0 +1,438 @@
+using System;
+using System.Diagnostics;
+using System.Threading;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Randomized.Generators;
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using System.IO;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using Lucene41PostingsFormat = Lucene.Net.Codecs.Lucene41.Lucene41PostingsFormat;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+    using StringField = StringField;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using TextField = TextField;
+    using Attributes;
+
+    [TestFixture]
+    public class TestConcurrentMergeScheduler : LuceneTestCase
+    {
+        private class FailOnlyOnFlush : MockDirectoryWrapper.Failure
+        {
+            private readonly TestConcurrentMergeScheduler OuterInstance;
+
+            public FailOnlyOnFlush(TestConcurrentMergeScheduler outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            new internal bool DoFail;
+            internal bool HitExc;
+
+            public override void SetDoFail()
+            {
+                this.DoFail = true;
+                HitExc = false;
+            }
+
+            public override void ClearDoFail()
+            {
+                this.DoFail = false;
+            }
+
+            public override void Eval(MockDirectoryWrapper dir)
+            {
+                if (DoFail && TestThread() && Random().NextBoolean())
+                {
+                    bool isDoFlush = Util.StackTraceHelper.DoesStackTraceContainMethod("Flush");
+                    bool isClose = Util.StackTraceHelper.DoesStackTraceContainMethod("Close");    
+
+                    if (isDoFlush && !isClose )
+                    {
+                        HitExc = true;
+                        throw new IOException(Thread.CurrentThread.Name + ": now failing during flush");
+                    }
+                }
+            }
+        }
+
+        // Make sure running BG merges still work fine even when
+        // we are hitting exceptions during flushing.
+        [Test]
+        public virtual void TestFlushExceptions()
+        {
+            MockDirectoryWrapper directory = NewMockDirectory();
+            FailOnlyOnFlush failure = new FailOnlyOnFlush(this);
+            directory.FailOn(failure);
+
+            IndexWriter writer = new IndexWriter(directory, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2));
+            Document doc = new Document();
+            Field idField = NewStringField("id", "", Field.Store.YES);
+            doc.Add(idField);
+            int extraCount = 0;
+
+            for (int i = 0; i < 10; i++)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: iter=" + i);
+                }
+
+                for (int j = 0; j < 20; j++)
+                {
+                    idField.SetStringValue(Convert.ToString(i * 20 + j));
+                    writer.AddDocument(doc);
+                }
+
+                // must cycle here because sometimes the merge flushes
+                // the doc we just added and so there's nothing to
+                // flush, and we don't hit the exception
+                while (true)
+                {
+                    writer.AddDocument(doc);
+                    failure.SetDoFail();
+                    try
+                    {
+                        writer.Flush(true, true);
+                        if (failure.HitExc)
+                        {
+                            Assert.Fail("failed to hit IOException");
+                        }
+                        extraCount++;
+                    }
+                    catch (IOException ioe)
+                    {
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine(ioe.StackTrace);
+                        }
+                        failure.ClearDoFail();
+                        break;
+                    }
+                }
+                Assert.AreEqual(20 * (i + 1) + extraCount, writer.NumDocs);
+            }
+
+            writer.Dispose();
+            IndexReader reader = DirectoryReader.Open(directory);
+            Assert.AreEqual(200 + extraCount, reader.NumDocs);
+            reader.Dispose();
+            directory.Dispose();
+        }
+
+        // Test that deletes committed after a merge started and
+        // before it finishes, are correctly merged back:
+        [Test]
+        public virtual void TestDeleteMerging()
+        {
+            Directory directory = NewDirectory();
+
+            LogDocMergePolicy mp = new LogDocMergePolicy();
+            // Force degenerate merging so we can get a mix of
+            // merging of segments with and without deletes at the
+            // start:
+            mp.MinMergeDocs = 1000;
+            IndexWriter writer = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(mp));
+
+            Document doc = new Document();
+            Field idField = NewStringField("id", "", Field.Store.YES);
+            doc.Add(idField);
+            for (int i = 0; i < 10; i++)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("\nTEST: cycle");
+                }
+                for (int j = 0; j < 100; j++)
+                {
+                    idField.SetStringValue(Convert.ToString(i * 100 + j));
+                    writer.AddDocument(doc);
+                }
+
+                int delID = i;
+                while (delID < 100 * (1 + i))
+                {
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("TEST: del " + delID);
+                    }
+                    writer.DeleteDocuments(new Term("id", "" + delID));
+                    delID += 10;
+                }
+
+                writer.Commit();
+            }
+
+            writer.Dispose();
+            IndexReader reader = DirectoryReader.Open(directory);
+            // Verify that we did not lose any deletes...
+            Assert.AreEqual(450, reader.NumDocs);
+            reader.Dispose();
+            directory.Dispose();
+        }
+
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(300000)]
+#endif
+        [Test, HasTimeout]
+        public virtual void TestNoExtraFiles()
+        {
+            Directory directory = NewDirectory();
+            IndexWriter writer = new IndexWriter(directory, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2));
+
+            for (int iter = 0; iter < 7; iter++)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: iter=" + iter);
+                }
+
+                for (int j = 0; j < 21; j++)
+                {
+                    Document doc = new Document();
+                    doc.Add(NewTextField("content", "a b c", Field.Store.NO));
+                    writer.AddDocument(doc);
+                }
+
+                writer.Dispose();
+                TestIndexWriter.AssertNoUnreferencedFiles(directory, "testNoExtraFiles");
+
+                // Reopen
+                writer = new IndexWriter(directory, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(2));
+            }
+
+            writer.Dispose();
+
+            directory.Dispose();
+        }
+
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(300000)]
+#endif
+        [Test, HasTimeout]
+        public virtual void TestNoWaitClose()
+        {
+            Directory directory = NewDirectory();
+            Document doc = new Document();
+            Field idField = NewStringField("id", "", Field.Store.YES);
+            doc.Add(idField);
+
+            IndexWriter writer = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy(100)));
+
+            for (int iter = 0; iter < 10; iter++)
+            {
+                for (int j = 0; j < 201; j++)
+                {
+                    idField.SetStringValue(Convert.ToString(iter * 201 + j));
+                    writer.AddDocument(doc);
+                }
+
+                int delID = iter * 201;
+                for (int j = 0; j < 20; j++)
+                {
+                    writer.DeleteDocuments(new Term("id", Convert.ToString(delID)));
+                    delID += 5;
+                }
+
+                // Force a bunch of merge threads to kick off so we
+                // stress out aborting them on close:
+                ((LogMergePolicy)writer.Config.MergePolicy).MergeFactor = 3;
+                writer.AddDocument(doc);
+                writer.Commit();
+
+                writer.Dispose(false);
+
+                IndexReader reader = DirectoryReader.Open(directory);
+                Assert.AreEqual((1 + iter) * 182, reader.NumDocs);
+                reader.Dispose();
+
+                // Reopen
+                writer = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND).SetMergePolicy(NewLogMergePolicy(100)));
+            }
+            writer.Dispose();
+
+            directory.Dispose();
+        }
+
+        // LUCENE-4544
+        [Test]
+        public virtual void TestMaxMergeCount()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+
+            int maxMergeCount = TestUtil.NextInt(Random(), 1, 5);
+            int maxMergeThreads = TestUtil.NextInt(Random(), 1, maxMergeCount);
+            CountdownEvent enoughMergesWaiting = new CountdownEvent(maxMergeCount);
+            AtomicInt32 runningMergeCount = new AtomicInt32(0);
+            AtomicBoolean failed = new AtomicBoolean();
+
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: maxMergeCount=" + maxMergeCount + " maxMergeThreads=" + maxMergeThreads);
+            }
+
+            ConcurrentMergeScheduler cms = new ConcurrentMergeSchedulerAnonymousInnerClassHelper(this, maxMergeCount, enoughMergesWaiting, runningMergeCount, failed);
+            cms.SetMaxMergesAndThreads(maxMergeCount, maxMergeThreads);
+            iwc.SetMergeScheduler(cms);
+            iwc.SetMaxBufferedDocs(2);
+
+            TieredMergePolicy tmp = new TieredMergePolicy();
+            iwc.SetMergePolicy(tmp);
+            tmp.MaxMergeAtOnce = 2;
+            tmp.SegmentsPerTier = 2;
+
+            IndexWriter w = new IndexWriter(dir, iwc);
+            Document doc = new Document();
+            doc.Add(NewField("field", "field", TextField.TYPE_NOT_STORED));
+            while (enoughMergesWaiting.CurrentCount != 0 && !failed.Get())
+            {
+                for (int i = 0; i < 10; i++)
+                {
+                    w.AddDocument(doc);
+                }
+            }
+            w.Dispose(false);
+            dir.Dispose();
+        }
+
+        private class ConcurrentMergeSchedulerAnonymousInnerClassHelper : ConcurrentMergeScheduler
+        {
+            private readonly TestConcurrentMergeScheduler OuterInstance;
+
+            private int maxMergeCount;
+            private CountdownEvent EnoughMergesWaiting;
+            private AtomicInt32 RunningMergeCount;
+            private AtomicBoolean Failed;
+
+            public ConcurrentMergeSchedulerAnonymousInnerClassHelper(TestConcurrentMergeScheduler outerInstance, int maxMergeCount, CountdownEvent enoughMergesWaiting, AtomicInt32 runningMergeCount, AtomicBoolean failed)
+            {
+                this.OuterInstance = outerInstance;
+                this.maxMergeCount = maxMergeCount;
+                this.EnoughMergesWaiting = enoughMergesWaiting;
+                this.RunningMergeCount = runningMergeCount;
+                this.Failed = failed;
+            }
+
+            protected override void DoMerge(MergePolicy.OneMerge merge)
+            {
+                try
+                {
+                    // Stall all incoming merges until we see
+                    // maxMergeCount:
+                    int count = RunningMergeCount.IncrementAndGet();
+                    try
+                    {
+                        Assert.IsTrue(count <= maxMergeCount, "count=" + count + " vs maxMergeCount=" + maxMergeCount);
+                        EnoughMergesWaiting.Signal();
+
+                        // Stall this merge until we see exactly
+                        // maxMergeCount merges waiting
+                        while (true)
+                        {
+                            // wait for 10 milliseconds
+                            if (EnoughMergesWaiting.Wait(new TimeSpan(0, 0, 0, 0, 10)) || Failed.Get())
+                            {
+                                break;
+                            }
+                        }
+                        // Then sleep a bit to give a chance for the bug
+                        // (too many pending merges) to appear:
+                        Thread.Sleep(20);
+                        base.DoMerge(merge);
+                    }
+                    finally
+                    {
+                        RunningMergeCount.DecrementAndGet();
+                    }
+                }
+                catch (Exception t)
+                {
+                    Failed.Set(true);
+                    m_writer.MergeFinish(merge);
+                    throw new Exception(t.Message, t);
+                }
+            }
+        }
+
+        private class TrackingCMS : ConcurrentMergeScheduler
+        {
+            internal long TotMergedBytes;
+
+            public TrackingCMS()
+            {
+                SetMaxMergesAndThreads(5, 5);
+            }
+
+            protected override void DoMerge(MergePolicy.OneMerge merge)
+            {
+                TotMergedBytes += merge.TotalBytesSize;
+                base.DoMerge(merge);
+            }
+        }
+
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(300000)]
+#endif
+        [Test, HasTimeout]
+        public virtual void TestTotalBytesSize()
+        {
+            Directory d = NewDirectory();
+            if (d is MockDirectoryWrapper)
+            {
+                ((MockDirectoryWrapper)d).Throttling = MockDirectoryWrapper.Throttling_e.NEVER;
+            }
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            iwc.SetMaxBufferedDocs(5);
+            iwc.SetMergeScheduler(new TrackingCMS());
+            if (TestUtil.GetPostingsFormat("id").Equals("SimpleText"))
+            {
+                // no
+                iwc.SetCodec(TestUtil.AlwaysPostingsFormat(new Lucene41PostingsFormat()));
+            }
+            RandomIndexWriter w = new RandomIndexWriter(Random(), d, iwc);
+            for (int i = 0; i < 1000; i++)
+            {
+                Document doc = new Document();
+                doc.Add(new StringField("id", "" + i, Field.Store.NO));
+                w.AddDocument(doc);
+
+                if (Random().NextBoolean())
+                {
+                    w.DeleteDocuments(new Term("id", "" + Random().Next(i + 1)));
+                }
+            }
+            Assert.IsTrue(((TrackingCMS)w.w.Config.MergeScheduler).TotMergedBytes != 0);
+            w.Dispose();
+            d.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestConsistentFieldNumbers.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestConsistentFieldNumbers.cs b/src/Lucene.Net.Tests/Index/TestConsistentFieldNumbers.cs
new file mode 100644
index 0000000..b397eb3
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestConsistentFieldNumbers.cs
@@ -0,0 +1,421 @@
+using System;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Randomized.Generators;
+    using NUnit.Framework;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using FailOnNonBulkMergesInfoStream = Lucene.Net.Util.FailOnNonBulkMergesInfoStream;
+    using Field = Field;
+    using FieldType = FieldType;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using StoredField = StoredField;
+    using StringField = StringField;
+    using TextField = TextField;
+
+    [TestFixture]
+    public class TestConsistentFieldNumbers : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestSameFieldNumbersAcrossSegments()
+        {
+            for (int i = 0; i < 2; i++)
+            {
+                Directory dir = NewDirectory();
+                IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NoMergePolicy.COMPOUND_FILES));
+
+                Document d1 = new Document();
+                d1.Add(new StringField("f1", "first field", Field.Store.YES));
+                d1.Add(new StringField("f2", "second field", Field.Store.YES));
+                writer.AddDocument(d1);
+
+                if (i == 1)
+                {
+                    writer.Dispose();
+                    writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NoMergePolicy.COMPOUND_FILES));
+                }
+                else
+                {
+                    writer.Commit();
+                }
+
+                Document d2 = new Document();
+                FieldType customType2 = new FieldType(TextField.TYPE_STORED);
+                customType2.StoreTermVectors = true;
+                d2.Add(new TextField("f2", "second field", Field.Store.NO));
+                d2.Add(new Field("f1", "first field", customType2));
+                d2.Add(new TextField("f3", "third field", Field.Store.NO));
+                d2.Add(new TextField("f4", "fourth field", Field.Store.NO));
+                writer.AddDocument(d2);
+
+                writer.Dispose();
+
+                SegmentInfos sis = new SegmentInfos();
+                sis.Read(dir);
+                Assert.AreEqual(2, sis.Count);
+
+                FieldInfos fis1 = SegmentReader.ReadFieldInfos(sis.Info(0));
+                FieldInfos fis2 = SegmentReader.ReadFieldInfos(sis.Info(1));
+
+                Assert.AreEqual("f1", fis1.FieldInfo(0).Name);
+                Assert.AreEqual("f2", fis1.FieldInfo(1).Name);
+                Assert.AreEqual("f1", fis2.FieldInfo(0).Name);
+                Assert.AreEqual("f2", fis2.FieldInfo(1).Name);
+                Assert.AreEqual("f3", fis2.FieldInfo(2).Name);
+                Assert.AreEqual("f4", fis2.FieldInfo(3).Name);
+
+                writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+                writer.ForceMerge(1);
+                writer.Dispose();
+
+                sis = new SegmentInfos();
+                sis.Read(dir);
+                Assert.AreEqual(1, sis.Count);
+
+                FieldInfos fis3 = SegmentReader.ReadFieldInfos(sis.Info(0));
+
+                Assert.AreEqual("f1", fis3.FieldInfo(0).Name);
+                Assert.AreEqual("f2", fis3.FieldInfo(1).Name);
+                Assert.AreEqual("f3", fis3.FieldInfo(2).Name);
+                Assert.AreEqual("f4", fis3.FieldInfo(3).Name);
+
+                dir.Dispose();
+            }
+        }
+
+        [Test]
+        public virtual void TestAddIndexes()
+        {
+            Directory dir1 = NewDirectory();
+            Directory dir2 = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NoMergePolicy.COMPOUND_FILES));
+
+            Document d1 = new Document();
+            d1.Add(new TextField("f1", "first field", Field.Store.YES));
+            d1.Add(new TextField("f2", "second field", Field.Store.YES));
+            writer.AddDocument(d1);
+
+            writer.Dispose();
+            writer = new IndexWriter(dir2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NoMergePolicy.COMPOUND_FILES));
+
+            Document d2 = new Document();
+            FieldType customType2 = new FieldType(TextField.TYPE_STORED);
+            customType2.StoreTermVectors = true;
+            d2.Add(new TextField("f2", "second field", Field.Store.YES));
+            d2.Add(new Field("f1", "first field", customType2));
+            d2.Add(new TextField("f3", "third field", Field.Store.YES));
+            d2.Add(new TextField("f4", "fourth field", Field.Store.YES));
+            writer.AddDocument(d2);
+
+            writer.Dispose();
+
+            writer = new IndexWriter(dir1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NoMergePolicy.COMPOUND_FILES));
+            writer.AddIndexes(dir2);
+            writer.Dispose();
+
+            SegmentInfos sis = new SegmentInfos();
+            sis.Read(dir1);
+            Assert.AreEqual(2, sis.Count);
+
+            FieldInfos fis1 = SegmentReader.ReadFieldInfos(sis.Info(0));
+            FieldInfos fis2 = SegmentReader.ReadFieldInfos(sis.Info(1));
+
+            Assert.AreEqual("f1", fis1.FieldInfo(0).Name);
+            Assert.AreEqual("f2", fis1.FieldInfo(1).Name);
+            // make sure the ordering of the "external" segment is preserved
+            Assert.AreEqual("f2", fis2.FieldInfo(0).Name);
+            Assert.AreEqual("f1", fis2.FieldInfo(1).Name);
+            Assert.AreEqual("f3", fis2.FieldInfo(2).Name);
+            Assert.AreEqual("f4", fis2.FieldInfo(3).Name);
+
+            dir1.Dispose();
+            dir2.Dispose();
+        }
+
+        [Test]
+        public virtual void TestFieldNumberGaps()
+        {
+            int numIters = AtLeast(13);
+            for (int i = 0; i < numIters; i++)
+            {
+                Directory dir = NewDirectory();
+                {
+                    IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NoMergePolicy.NO_COMPOUND_FILES));
+                    Document d = new Document();
+                    d.Add(new TextField("f1", "d1 first field", Field.Store.YES));
+                    d.Add(new TextField("f2", "d1 second field", Field.Store.YES));
+                    writer.AddDocument(d);
+                    writer.Dispose();
+                    SegmentInfos sis = new SegmentInfos();
+                    sis.Read(dir);
+                    Assert.AreEqual(1, sis.Count);
+                    FieldInfos fis1 = SegmentReader.ReadFieldInfos(sis.Info(0));
+                    Assert.AreEqual("f1", fis1.FieldInfo(0).Name);
+                    Assert.AreEqual("f2", fis1.FieldInfo(1).Name);
+                }
+
+                {
+                    IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(Random().NextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES : NoMergePolicy.COMPOUND_FILES));
+                    Document d = new Document();
+                    d.Add(new TextField("f1", "d2 first field", Field.Store.YES));
+                    d.Add(new StoredField("f3", new byte[] { 1, 2, 3 }));
+                    writer.AddDocument(d);
+                    writer.Dispose();
+                    SegmentInfos sis = new SegmentInfos();
+                    sis.Read(dir);
+                    Assert.AreEqual(2, sis.Count);
+                    FieldInfos fis1 = SegmentReader.ReadFieldInfos(sis.Info(0));
+                    FieldInfos fis2 = SegmentReader.ReadFieldInfos(sis.Info(1));
+                    Assert.AreEqual("f1", fis1.FieldInfo(0).Name);
+                    Assert.AreEqual("f2", fis1.FieldInfo(1).Name);
+                    Assert.AreEqual("f1", fis2.FieldInfo(0).Name);
+                    Assert.IsNull(fis2.FieldInfo(1));
+                    Assert.AreEqual("f3", fis2.FieldInfo(2).Name);
+                }
+
+                {
+                    IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(Random().NextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES : NoMergePolicy.COMPOUND_FILES));
+                    Document d = new Document();
+                    d.Add(new TextField("f1", "d3 first field", Field.Store.YES));
+                    d.Add(new TextField("f2", "d3 second field", Field.Store.YES));
+                    d.Add(new StoredField("f3", new byte[] { 1, 2, 3, 4, 5 }));
+                    writer.AddDocument(d);
+                    writer.Dispose();
+                    SegmentInfos sis = new SegmentInfos();
+                    sis.Read(dir);
+                    Assert.AreEqual(3, sis.Count);
+                    FieldInfos fis1 = SegmentReader.ReadFieldInfos(sis.Info(0));
+                    FieldInfos fis2 = SegmentReader.ReadFieldInfos(sis.Info(1));
+                    FieldInfos fis3 = SegmentReader.ReadFieldInfos(sis.Info(2));
+                    Assert.AreEqual("f1", fis1.FieldInfo(0).Name);
+                    Assert.AreEqual("f2", fis1.FieldInfo(1).Name);
+                    Assert.AreEqual("f1", fis2.FieldInfo(0).Name);
+                    Assert.IsNull(fis2.FieldInfo(1));
+                    Assert.AreEqual("f3", fis2.FieldInfo(2).Name);
+                    Assert.AreEqual("f1", fis3.FieldInfo(0).Name);
+                    Assert.AreEqual("f2", fis3.FieldInfo(1).Name);
+                    Assert.AreEqual("f3", fis3.FieldInfo(2).Name);
+                }
+
+                {
+                    IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(Random().NextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES : NoMergePolicy.COMPOUND_FILES));
+                    writer.DeleteDocuments(new Term("f1", "d1"));
+                    // nuke the first segment entirely so that the segment with gaps is
+                    // loaded first!
+                    writer.ForceMergeDeletes();
+                    writer.Dispose();
+                }
+
+                IndexWriter writer_ = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(new LogByteSizeMergePolicy()).SetInfoStream(new FailOnNonBulkMergesInfoStream()));
+                writer_.ForceMerge(1);
+                writer_.Dispose();
+
+                SegmentInfos sis_ = new SegmentInfos();
+                sis_.Read(dir);
+                Assert.AreEqual(1, sis_.Count);
+                FieldInfos fis1_ = SegmentReader.ReadFieldInfos(sis_.Info(0));
+                Assert.AreEqual("f1", fis1_.FieldInfo(0).Name);
+                Assert.AreEqual("f2", fis1_.FieldInfo(1).Name);
+                Assert.AreEqual("f3", fis1_.FieldInfo(2).Name);
+                dir.Dispose();
+            }
+        }
+
+        [Test]
+        public virtual void TestManyFields()
+        {
+            int NUM_DOCS = AtLeast(200);
+            int MAX_FIELDS = AtLeast(50);
+
+            int[][] docs = RectangularArrays.ReturnRectangularIntArray(NUM_DOCS, 4);
+            for (int i = 0; i < docs.Length; i++)
+            {
+                for (int j = 0; j < docs[i].Length; j++)
+                {
+                    docs[i][j] = Random().Next(MAX_FIELDS);
+                }
+            }
+
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+
+            for (int i = 0; i < NUM_DOCS; i++)
+            {
+                Document d = new Document();
+                for (int j = 0; j < docs[i].Length; j++)
+                {
+                    d.Add(GetField(docs[i][j]));
+                }
+
+                writer.AddDocument(d);
+            }
+
+            writer.ForceMerge(1);
+            writer.Dispose();
+
+            SegmentInfos sis = new SegmentInfos();
+            sis.Read(dir);
+            foreach (SegmentCommitInfo si in sis.Segments)
+            {
+                FieldInfos fis = SegmentReader.ReadFieldInfos(si);
+
+                foreach (FieldInfo fi in fis)
+                {
+                    Field expected = GetField(Convert.ToInt32(fi.Name));
+                    Assert.AreEqual(expected.FieldType.IsIndexed, fi.IsIndexed);
+                    Assert.AreEqual(expected.FieldType.StoreTermVectors, fi.HasVectors);
+                }
+            }
+
+            dir.Dispose();
+        }
+
+        private Field GetField(int number)
+        {
+            int mode = number % 16;
+            string fieldName = "" + number;
+            FieldType customType = new FieldType(TextField.TYPE_STORED);
+
+            FieldType customType2 = new FieldType(TextField.TYPE_STORED);
+            customType2.IsTokenized = false;
+
+            FieldType customType3 = new FieldType(TextField.TYPE_NOT_STORED);
+            customType3.IsTokenized = false;
+
+            FieldType customType4 = new FieldType(TextField.TYPE_NOT_STORED);
+            customType4.IsTokenized = false;
+            customType4.StoreTermVectors = true;
+            customType4.StoreTermVectorOffsets = true;
+
+            FieldType customType5 = new FieldType(TextField.TYPE_NOT_STORED);
+            customType5.StoreTermVectors = true;
+            customType5.StoreTermVectorOffsets = true;
+
+            FieldType customType6 = new FieldType(TextField.TYPE_STORED);
+            customType6.IsTokenized = false;
+            customType6.StoreTermVectors = true;
+            customType6.StoreTermVectorOffsets = true;
+
+            FieldType customType7 = new FieldType(TextField.TYPE_NOT_STORED);
+            customType7.IsTokenized = false;
+            customType7.StoreTermVectors = true;
+            customType7.StoreTermVectorOffsets = true;
+
+            FieldType customType8 = new FieldType(TextField.TYPE_STORED);
+            customType8.IsTokenized = false;
+            customType8.StoreTermVectors = true;
+            customType8.StoreTermVectorPositions = true;
+
+            FieldType customType9 = new FieldType(TextField.TYPE_NOT_STORED);
+            customType9.StoreTermVectors = true;
+            customType9.StoreTermVectorPositions = true;
+
+            FieldType customType10 = new FieldType(TextField.TYPE_STORED);
+            customType10.IsTokenized = false;
+            customType10.StoreTermVectors = true;
+            customType10.StoreTermVectorPositions = true;
+
+            FieldType customType11 = new FieldType(TextField.TYPE_NOT_STORED);
+            customType11.IsTokenized = false;
+            customType11.StoreTermVectors = true;
+            customType11.StoreTermVectorPositions = true;
+
+            FieldType customType12 = new FieldType(TextField.TYPE_STORED);
+            customType12.StoreTermVectors = true;
+            customType12.StoreTermVectorOffsets = true;
+            customType12.StoreTermVectorPositions = true;
+
+            FieldType customType13 = new FieldType(TextField.TYPE_NOT_STORED);
+            customType13.StoreTermVectors = true;
+            customType13.StoreTermVectorOffsets = true;
+            customType13.StoreTermVectorPositions = true;
+
+            FieldType customType14 = new FieldType(TextField.TYPE_STORED);
+            customType14.IsTokenized = false;
+            customType14.StoreTermVectors = true;
+            customType14.StoreTermVectorOffsets = true;
+            customType14.StoreTermVectorPositions = true;
+
+            FieldType customType15 = new FieldType(TextField.TYPE_NOT_STORED);
+            customType15.IsTokenized = false;
+            customType15.StoreTermVectors = true;
+            customType15.StoreTermVectorOffsets = true;
+            customType15.StoreTermVectorPositions = true;
+
+            switch (mode)
+            {
+                case 0:
+                    return new Field(fieldName, "some text", customType);
+
+                case 1:
+                    return new TextField(fieldName, "some text", Field.Store.NO);
+
+                case 2:
+                    return new Field(fieldName, "some text", customType2);
+
+                case 3:
+                    return new Field(fieldName, "some text", customType3);
+
+                case 4:
+                    return new Field(fieldName, "some text", customType4);
+
+                case 5:
+                    return new Field(fieldName, "some text", customType5);
+
+                case 6:
+                    return new Field(fieldName, "some text", customType6);
+
+                case 7:
+                    return new Field(fieldName, "some text", customType7);
+
+                case 8:
+                    return new Field(fieldName, "some text", customType8);
+
+                case 9:
+                    return new Field(fieldName, "some text", customType9);
+
+                case 10:
+                    return new Field(fieldName, "some text", customType10);
+
+                case 11:
+                    return new Field(fieldName, "some text", customType11);
+
+                case 12:
+                    return new Field(fieldName, "some text", customType12);
+
+                case 13:
+                    return new Field(fieldName, "some text", customType13);
+
+                case 14:
+                    return new Field(fieldName, "some text", customType14);
+
+                case 15:
+                    return new Field(fieldName, "some text", customType15);
+
+                default:
+                    return null;
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestCrash.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestCrash.cs b/src/Lucene.Net.Tests/Index/TestCrash.cs
new file mode 100644
index 0000000..ee1db50
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestCrash.cs
@@ -0,0 +1,229 @@
+using System;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+    using NoLockFactory = Lucene.Net.Store.NoLockFactory;
+
+    [TestFixture]
+    public class TestCrash : LuceneTestCase
+    {
+        private IndexWriter InitIndex(IConcurrentMergeScheduler scheduler, Random random, bool initialCommit)
+        {
+            return InitIndex(scheduler, random, NewMockDirectory(random), initialCommit);
+        }
+
+        private IndexWriter InitIndex(IConcurrentMergeScheduler scheduler, Random random, MockDirectoryWrapper dir, bool initialCommit)
+        {
+            dir.SetLockFactory(NoLockFactory.GetNoLockFactory());
+
+            scheduler.SetSuppressExceptions();
+
+            IndexWriter writer = new IndexWriter(dir,
+                NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
+                .SetMaxBufferedDocs(10)
+                .SetMergeScheduler(scheduler));
+
+            if (initialCommit)
+            {
+                writer.Commit();
+            }
+
+            Document doc = new Document();
+            doc.Add(NewTextField("content", "aaa", Field.Store.NO));
+            doc.Add(NewTextField("id", "0", Field.Store.NO));
+            for (int i = 0; i < 157; i++)
+            {
+                writer.AddDocument(doc);
+            }
+
+            return writer;
+        }
+
+        private void Crash(IndexWriter writer)
+        {
+            MockDirectoryWrapper dir = (MockDirectoryWrapper)writer.Directory;
+            var cms = (IConcurrentMergeScheduler)writer.Config.MergeScheduler;
+            cms.Sync();
+            dir.Crash();
+            cms.Sync();
+            dir.ClearCrash();
+        }
+
+        [Test]
+        public virtual void TestCrashWhileIndexing(
+            [ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            // this test relies on being able to open a reader before any commit
+            // happened, so we must create an initial commit just to allow that, but
+            // before any documents were added.
+            IndexWriter writer = InitIndex(scheduler, Random(), true);
+            MockDirectoryWrapper dir = (MockDirectoryWrapper)writer.Directory;
+
+            // We create leftover files because merging could be
+            // running when we crash:
+            dir.AssertNoUnrefencedFilesOnClose = false;
+
+            Crash(writer);
+
+            IndexReader reader = DirectoryReader.Open(dir);
+            Assert.IsTrue(reader.NumDocs < 157);
+            reader.Dispose();
+
+            // Make a new dir, copying from the crashed dir, and
+            // open IW on it, to confirm IW "recovers" after a
+            // crash:
+            Directory dir2 = NewDirectory(dir);
+            dir.Dispose();
+
+            (new RandomIndexWriter(Random(), dir2, Similarity, TimeZone)).Dispose();
+            dir2.Dispose();
+        }
+
+        [Test]
+        public virtual void TestWriterAfterCrash(
+            [ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            // this test relies on being able to open a reader before any commit
+            // happened, so we must create an initial commit just to allow that, but
+            // before any documents were added.
+            Console.WriteLine("TEST: initIndex");
+            IndexWriter writer = InitIndex(scheduler, Random(), true);
+            Console.WriteLine("TEST: done initIndex");
+            MockDirectoryWrapper dir = (MockDirectoryWrapper)writer.Directory;
+
+            // We create leftover files because merging could be
+            // running / store files could be open when we crash:
+            dir.AssertNoUnrefencedFilesOnClose = false;
+
+            dir.PreventDoubleWrite = false;
+            Console.WriteLine("TEST: now crash");
+            Crash(writer);
+            writer = InitIndex(scheduler, Random(), dir, false);
+            writer.Dispose();
+
+            IndexReader reader = DirectoryReader.Open(dir);
+            Assert.IsTrue(reader.NumDocs < 314);
+            reader.Dispose();
+
+            // Make a new dir, copying from the crashed dir, and
+            // open IW on it, to confirm IW "recovers" after a
+            // crash:
+            Directory dir2 = NewDirectory(dir);
+            dir.Dispose();
+
+            (new RandomIndexWriter(Random(), dir2, Similarity, TimeZone)).Dispose();
+            dir2.Dispose();
+        }
+
+        [Test]
+        public virtual void TestCrashAfterReopen(
+            [ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            IndexWriter writer = InitIndex(scheduler, Random(), false);
+            MockDirectoryWrapper dir = (MockDirectoryWrapper)writer.Directory;
+
+            // We create leftover files because merging could be
+            // running when we crash:
+            dir.AssertNoUnrefencedFilesOnClose = false;
+
+            writer.Dispose();
+            writer = InitIndex(scheduler, Random(), dir, false);
+            Assert.AreEqual(314, writer.MaxDoc);
+            Crash(writer);
+
+            /*
+            System.out.println("\n\nTEST: open reader");
+            String[] l = dir.list();
+            Arrays.sort(l);
+            for(int i=0;i<l.Length;i++)
+              System.out.println("file " + i + " = " + l[i] + " " +
+            dir.FileLength(l[i]) + " bytes");
+            */
+
+            IndexReader reader = DirectoryReader.Open(dir);
+            Assert.IsTrue(reader.NumDocs >= 157);
+            reader.Dispose();
+
+            // Make a new dir, copying from the crashed dir, and
+            // open IW on it, to confirm IW "recovers" after a
+            // crash:
+            Directory dir2 = NewDirectory(dir);
+            dir.Dispose();
+
+            (new RandomIndexWriter(Random(), dir2, Similarity, TimeZone)).Dispose();
+            dir2.Dispose();
+        }
+
+        [Test]
+        public virtual void TestCrashAfterClose(
+            [ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            IndexWriter writer = InitIndex(scheduler, Random(), false);
+            MockDirectoryWrapper dir = (MockDirectoryWrapper)writer.Directory;
+
+            writer.Dispose();
+            dir.Crash();
+
+            /*
+            String[] l = dir.list();
+            Arrays.sort(l);
+            for(int i=0;i<l.Length;i++)
+              System.out.println("file " + i + " = " + l[i] + " " + dir.FileLength(l[i]) + " bytes");
+            */
+
+            IndexReader reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(157, reader.NumDocs);
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestCrashAfterCloseNoWait(
+            [ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            IndexWriter writer = InitIndex(scheduler, Random(), false);
+            MockDirectoryWrapper dir = (MockDirectoryWrapper)writer.Directory;
+
+            writer.Dispose(false);
+
+            dir.Crash();
+
+            /*
+            String[] l = dir.list();
+            Arrays.sort(l);
+            for(int i=0;i<l.Length;i++)
+              System.out.println("file " + i + " = " + l[i] + " " + dir.FileLength(l[i]) + " bytes");
+            */
+            IndexReader reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(157, reader.NumDocs);
+            reader.Dispose();
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestCrashCausesCorruptIndex.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestCrashCausesCorruptIndex.cs b/src/Lucene.Net.Tests/Index/TestCrashCausesCorruptIndex.cs
new file mode 100644
index 0000000..e53c933
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestCrashCausesCorruptIndex.cs
@@ -0,0 +1,201 @@
+using System;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+    using System.IO;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FilterDirectory = Lucene.Net.Store.FilterDirectory;
+    using FSDirectory = Lucene.Net.Store.FSDirectory;
+    using IndexOutput = Lucene.Net.Store.IndexOutput;
+    using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+    using IOContext = Lucene.Net.Store.IOContext;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using TermQuery = Lucene.Net.Search.TermQuery;
+    using TopDocs = Lucene.Net.Search.TopDocs;
+
+    [TestFixture]
+    public class TestCrashCausesCorruptIndex : LuceneTestCase
+    {
+        internal DirectoryInfo Path;
+
+        /// <summary>
+        /// LUCENE-3627: this test fails.
+        /// </summary>
+        [Test]
+        public virtual void TestCrashCorruptsIndexing()
+        {
+            Path = CreateTempDir("testCrashCorruptsIndexing");
+
+            IndexAndCrashOnCreateOutputSegments2();
+
+            SearchForFleas(2);
+
+            IndexAfterRestart();
+
+            SearchForFleas(3);
+        }
+
+        /// <summary>
+        /// index 1 document and commit.
+        /// prepare for crashing.
+        /// index 1 more document, and upon commit, creation of segments_2 will crash.
+        /// </summary>
+        private void IndexAndCrashOnCreateOutputSegments2()
+        {
+            Directory realDirectory = FSDirectory.Open(Path);
+            CrashAfterCreateOutput crashAfterCreateOutput = new CrashAfterCreateOutput(realDirectory);
+
+            // NOTE: cannot use RandomIndexWriter because it
+            // sometimes commits:
+            IndexWriter indexWriter = new IndexWriter(crashAfterCreateOutput, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+
+            indexWriter.AddDocument(Document);
+            // writes segments_1:
+            indexWriter.Commit();
+
+            crashAfterCreateOutput.GetCrashAfterCreateOutput = "segments_2";
+            indexWriter.AddDocument(Document);
+            try
+            {
+                // tries to write segments_2 but hits fake exc:
+                indexWriter.Commit();
+                Assert.Fail("should have hit CrashingException");
+            }
+#pragma warning disable 168
+            catch (CrashingException e)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            // writes segments_3
+            indexWriter.Dispose();
+            Assert.IsFalse(SlowFileExists(realDirectory, "segments_2"));
+            crashAfterCreateOutput.Dispose();
+        }
+
+        /// <summary>
+        /// Attempts to index another 1 document.
+        /// </summary>
+        private void IndexAfterRestart()
+        {
+            Directory realDirectory = NewFSDirectory(Path);
+
+            // LUCENE-3627 (before the fix): this line fails because
+            // it doesn't know what to do with the created but empty
+            // segments_2 file
+            IndexWriter indexWriter = new IndexWriter(realDirectory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+
+            // currently the test fails above.
+            // however, to test the fix, the following lines should pass as well.
+            indexWriter.AddDocument(Document);
+            indexWriter.Dispose();
+            Assert.IsFalse(SlowFileExists(realDirectory, "segments_2"));
+            realDirectory.Dispose();
+        }
+
+        /// <summary>
+        /// Run an example search.
+        /// </summary>
+        private void SearchForFleas(int expectedTotalHits)
+        {
+            Directory realDirectory = NewFSDirectory(Path);
+            IndexReader indexReader = DirectoryReader.Open(realDirectory);
+            IndexSearcher indexSearcher = NewSearcher(indexReader);
+            TopDocs topDocs = indexSearcher.Search(new TermQuery(new Term(TEXT_FIELD, "fleas")), 10);
+            Assert.IsNotNull(topDocs);
+            Assert.AreEqual(expectedTotalHits, topDocs.TotalHits);
+            indexReader.Dispose();
+            realDirectory.Dispose();
+        }
+
+        private const string TEXT_FIELD = "text";
+
+        /// <summary>
+        /// Gets a document with content "my dog has fleas".
+        /// </summary>
+        private Document Document
+        {
+            get
+            {
+                Document document = new Document();
+                document.Add(NewTextField(TEXT_FIELD, "my dog has fleas", Field.Store.NO));
+                return document;
+            }
+        }
+
+        /// <summary>
+        /// The marker RuntimeException that we use in lieu of an
+        /// actual machine crash.
+        /// </summary>
+        private class CrashingException : Exception
+        {
+            public CrashingException(string msg)
+                : base(msg)
+            {
+            }
+        }
+
+        /// <summary>
+        /// this test class provides direct access to "simulating" a crash right after
+        /// realDirectory.CreateOutput(..) has been called on a certain specified name.
+        /// </summary>
+        private class CrashAfterCreateOutput : FilterDirectory
+        {
+            internal string CrashAfterCreateOutput_Renamed;
+
+            public CrashAfterCreateOutput(Directory realDirectory)
+                : base(realDirectory)
+            {
+                SetLockFactory(realDirectory.LockFactory);
+            }
+
+            public virtual string GetCrashAfterCreateOutput
+            {
+                set
+                {
+                    this.CrashAfterCreateOutput_Renamed = value;
+                }
+            }
+
+            public override IndexOutput CreateOutput(string name, IOContext cxt)
+            {
+                IndexOutput indexOutput = m_input.CreateOutput(name, cxt);
+                if (null != CrashAfterCreateOutput_Renamed && name.Equals(CrashAfterCreateOutput_Renamed))
+                {
+                    // CRASH!
+                    indexOutput.Dispose();
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("TEST: now crash");
+                        Console.WriteLine(new Exception().StackTrace);
+                    }
+                    throw new CrashingException("crashAfterCreateOutput " + CrashAfterCreateOutput_Renamed);
+                }
+                return indexOutput;
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestCustomNorms.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestCustomNorms.cs b/src/Lucene.Net.Tests/Index/TestCustomNorms.cs
new file mode 100644
index 0000000..0db78f9
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestCustomNorms.cs
@@ -0,0 +1,144 @@
+using System;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using CollectionStatistics = Lucene.Net.Search.CollectionStatistics;
+    using DefaultSimilarity = Lucene.Net.Search.Similarities.DefaultSimilarity;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using LineFileDocs = Lucene.Net.Util.LineFileDocs;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using PerFieldSimilarityWrapper = Lucene.Net.Search.Similarities.PerFieldSimilarityWrapper;
+    using Similarity = Lucene.Net.Search.Similarities.Similarity;
+    using TermStatistics = Lucene.Net.Search.TermStatistics;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using TextField = TextField;
+
+    [SuppressCodecs("Lucene3x")]
+    [TestFixture]
+    public class TestCustomNorms : LuceneTestCase
+    {
+        internal readonly string FloatTestField = "normsTestFloat";
+        internal readonly string ExceptionTestField = "normsTestExcp";
+
+        [Test]
+        public virtual void TestFloatNorms()
+        {
+            Directory dir = NewDirectory();
+            MockAnalyzer analyzer = new MockAnalyzer(Random());
+            analyzer.MaxTokenLength = TestUtil.NextInt(Random(), 1, IndexWriter.MAX_TERM_LENGTH);
+
+            IndexWriterConfig config = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
+            Similarity provider = new MySimProvider(this);
+            config.SetSimilarity(provider);
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, config);
+            LineFileDocs docs = new LineFileDocs(Random());
+            int num = AtLeast(100);
+            for (int i = 0; i < num; i++)
+            {
+                Document doc = docs.NextDoc();
+                float nextFloat = (float)Random().NextDouble();
+                // Cast to a double to get more precision output to the string.
+                Field f = new TextField(FloatTestField, "" + (double)nextFloat, Field.Store.YES);
+                f.Boost = nextFloat;
+
+                doc.Add(f);
+                writer.AddDocument(doc);
+                doc.RemoveField(FloatTestField);
+                if (Rarely())
+                {
+                    writer.Commit();
+                }
+            }
+            writer.Commit();
+            writer.Dispose();
+            AtomicReader open = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir));
+            NumericDocValues norms = open.GetNormValues(FloatTestField);
+            Assert.IsNotNull(norms);
+            for (int i = 0; i < open.MaxDoc; i++)
+            {
+                Document document = open.Document(i);
+                float expected = Convert.ToSingle(document.Get(FloatTestField));
+                Assert.AreEqual(expected, Number.Int32BitsToSingle((int)norms.Get(i)), 0.0f);
+            }
+            open.Dispose();
+            dir.Dispose();
+            docs.Dispose();
+        }
+
+        public class MySimProvider : PerFieldSimilarityWrapper
+        {
+            private readonly TestCustomNorms OuterInstance;
+
+            public MySimProvider(TestCustomNorms outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            internal Similarity @delegate = new DefaultSimilarity();
+
+            public override float QueryNorm(float sumOfSquaredWeights)
+            {
+                return @delegate.QueryNorm(sumOfSquaredWeights);
+            }
+
+            public override Similarity Get(string field)
+            {
+                if (OuterInstance.FloatTestField.Equals(field))
+                {
+                    return new FloatEncodingBoostSimilarity();
+                }
+                else
+                {
+                    return @delegate;
+                }
+            }
+
+            public override float Coord(int overlap, int maxOverlap)
+            {
+                return @delegate.Coord(overlap, maxOverlap);
+            }
+        }
+
+        public class FloatEncodingBoostSimilarity : Similarity
+        {
+            public override long ComputeNorm(FieldInvertState state)
+            {
+                return Number.SingleToInt32Bits(state.Boost);
+            }
+
+            public override SimWeight ComputeWeight(float queryBoost, CollectionStatistics collectionStats, params TermStatistics[] termStats)
+            {
+                throw new System.NotSupportedException();
+            }
+
+            public override SimScorer GetSimScorer(SimWeight weight, AtomicReaderContext context)
+            {
+                throw new System.NotSupportedException();
+            }
+        }
+    }
+}
\ No newline at end of file


[24/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestMultiFields.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestMultiFields.cs b/src/Lucene.Net.Tests/Index/TestMultiFields.cs
new file mode 100644
index 0000000..0d70862
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestMultiFields.cs
@@ -0,0 +1,228 @@
+using Lucene.Net.Documents;
+using Lucene.Net.Randomized.Generators;
+using System;
+using System.Collections.Generic;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Analysis;
+    
+    using Lucene.Net.Store;
+    using Lucene.Net.Util;
+    using NUnit.Framework;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+
+    [TestFixture]
+    public class TestMultiFields : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestRandom()
+        {
+            int num = AtLeast(2);
+            for (int iter = 0; iter < num; iter++)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: iter=" + iter);
+                }
+
+                Directory dir = NewDirectory();
+
+                IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NoMergePolicy.COMPOUND_FILES));
+                // we can do this because we use NoMergePolicy (and dont merge to "nothing")
+                w.KeepFullyDeletedSegments = true;
+
+                IDictionary<BytesRef, IList<int?>> docs = new Dictionary<BytesRef, IList<int?>>();
+                HashSet<int?> deleted = new HashSet<int?>();
+                IList<BytesRef> terms = new List<BytesRef>();
+
+                int numDocs = TestUtil.NextInt(Random(), 1, 100 * RANDOM_MULTIPLIER);
+                Documents.Document doc = new Documents.Document();
+                Field f = NewStringField("field", "", Field.Store.NO);
+                doc.Add(f);
+                Field id = NewStringField("id", "", Field.Store.NO);
+                doc.Add(id);
+
+                bool onlyUniqueTerms = Random().NextBoolean();
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: onlyUniqueTerms=" + onlyUniqueTerms + " numDocs=" + numDocs);
+                }
+                HashSet<BytesRef> uniqueTerms = new HashSet<BytesRef>();
+                for (int i = 0; i < numDocs; i++)
+                {
+                    if (!onlyUniqueTerms && Random().NextBoolean() && terms.Count > 0)
+                    {
+                        // re-use existing term
+                        BytesRef term = terms[Random().Next(terms.Count)];
+                        docs[term].Add(i);
+                        f.SetStringValue(term.Utf8ToString());
+                    }
+                    else
+                    {
+                        string s = TestUtil.RandomUnicodeString(Random(), 10);
+                        BytesRef term = new BytesRef(s);
+                        if (!docs.ContainsKey(term))
+                        {
+                            docs[term] = new List<int?>();
+                        }
+                        docs[term].Add(i);
+                        terms.Add(term);
+                        uniqueTerms.Add(term);
+                        f.SetStringValue(s);
+                    }
+                    id.SetStringValue("" + i);
+                    w.AddDocument(doc);
+                    if (Random().Next(4) == 1)
+                    {
+                        w.Commit();
+                    }
+                    if (i > 0 && Random().Next(20) == 1)
+                    {
+                        int delID = Random().Next(i);
+                        deleted.Add(delID);
+                        w.DeleteDocuments(new Term("id", "" + delID));
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("TEST: delete " + delID);
+                        }
+                    }
+                }
+
+                if (VERBOSE)
+                {
+                    List<BytesRef> termsList = new List<BytesRef>(uniqueTerms);
+#pragma warning disable 612, 618
+                    termsList.Sort(BytesRef.UTF8SortedAsUTF16Comparer);
+#pragma warning restore 612, 618
+                    Console.WriteLine("TEST: terms in UTF16 order:");
+                    foreach (BytesRef b in termsList)
+                    {
+                        Console.WriteLine("  " + UnicodeUtil.ToHexString(b.Utf8ToString()) + " " + b);
+                        foreach (int docID in docs[b])
+                        {
+                            if (deleted.Contains(docID))
+                            {
+                                Console.WriteLine("    " + docID + " (deleted)");
+                            }
+                            else
+                            {
+                                Console.WriteLine("    " + docID);
+                            }
+                        }
+                    }
+                }
+
+                IndexReader reader = w.Reader;
+                w.Dispose();
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: reader=" + reader);
+                }
+
+                IBits liveDocs = MultiFields.GetLiveDocs(reader);
+                foreach (int delDoc in deleted)
+                {
+                    Assert.IsFalse(liveDocs.Get(delDoc));
+                }
+
+                for (int i = 0; i < 100; i++)
+                {
+                    BytesRef term = terms[Random().Next(terms.Count)];
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("TEST: seek term=" + UnicodeUtil.ToHexString(term.Utf8ToString()) + " " + term);
+                    }
+
+                    DocsEnum docsEnum = TestUtil.Docs(Random(), reader, "field", term, liveDocs, null, DocsEnum.FLAG_NONE);
+                    Assert.IsNotNull(docsEnum);
+
+                    foreach (int docID in docs[term])
+                    {
+                        if (!deleted.Contains(docID))
+                        {
+                            Assert.AreEqual(docID, docsEnum.NextDoc());
+                        }
+                    }
+                    Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, docsEnum.NextDoc());
+                }
+
+                reader.Dispose();
+                dir.Dispose();
+            }
+        }
+
+        /*
+        private void verify(IndexReader r, String term, List<Integer> expected) throws Exception {
+          DocsEnum docs = TestUtil.Docs(random, r,
+                                         "field",
+                                         new BytesRef(term),
+                                         MultiFields.GetLiveDocs(r),
+                                         null,
+                                         false);
+          for(int docID : expected) {
+            Assert.AreEqual(docID, docs.NextDoc());
+          }
+          Assert.AreEqual(docs.NO_MORE_DOCS, docs.NextDoc());
+        }
+        */
+
+        [Test]
+        public virtual void TestSeparateEnums()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Documents.Document d = new Documents.Document();
+            d.Add(NewStringField("f", "j", Field.Store.NO));
+            w.AddDocument(d);
+            w.Commit();
+            w.AddDocument(d);
+            IndexReader r = w.Reader;
+            w.Dispose();
+            DocsEnum d1 = TestUtil.Docs(Random(), r, "f", new BytesRef("j"), null, null, DocsEnum.FLAG_NONE);
+            DocsEnum d2 = TestUtil.Docs(Random(), r, "f", new BytesRef("j"), null, null, DocsEnum.FLAG_NONE);
+            Assert.AreEqual(0, d1.NextDoc());
+            Assert.AreEqual(0, d2.NextDoc());
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestTermDocsEnum()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Documents.Document d = new Documents.Document();
+            d.Add(NewStringField("f", "j", Field.Store.NO));
+            w.AddDocument(d);
+            w.Commit();
+            w.AddDocument(d);
+            IndexReader r = w.Reader;
+            w.Dispose();
+            DocsEnum de = MultiFields.GetTermDocsEnum(r, null, "f", new BytesRef("j"));
+            Assert.AreEqual(0, de.NextDoc());
+            Assert.AreEqual(1, de.NextDoc());
+            Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, de.NextDoc());
+            r.Dispose();
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestMultiLevelSkipList.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestMultiLevelSkipList.cs b/src/Lucene.Net.Tests/Index/TestMultiLevelSkipList.cs
new file mode 100644
index 0000000..49456ed
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestMultiLevelSkipList.cs
@@ -0,0 +1,218 @@
+using Lucene.Net.Analysis.TokenAttributes;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    using Lucene.Net.Analysis;
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using System.IO;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexInput = Lucene.Net.Store.IndexInput;
+    using IOContext = Lucene.Net.Store.IOContext;
+    using Lucene41PostingsFormat = Lucene.Net.Codecs.Lucene41.Lucene41PostingsFormat;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+    using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    /// <summary>
+    /// this testcase tests whether multi-level skipping is being used
+    /// to reduce I/O while skipping through posting lists.
+    ///
+    /// Skipping in general is already covered by several other
+    /// testcases.
+    ///
+    /// </summary>
+    [TestFixture]
+    public class TestMultiLevelSkipList : LuceneTestCase
+    {
+        internal class CountingRAMDirectory : MockDirectoryWrapper
+        {
+            private readonly TestMultiLevelSkipList OuterInstance;
+
+            public CountingRAMDirectory(TestMultiLevelSkipList outerInstance, Directory @delegate)
+                : base(Random(), @delegate)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            public override IndexInput OpenInput(string fileName, IOContext context)
+            {
+                IndexInput @in = base.OpenInput(fileName, context);
+                if (fileName.EndsWith(".frq"))
+                {
+                    @in = new CountingStream(OuterInstance, @in);
+                }
+                return @in;
+            }
+        }
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Counter = 0;
+        }
+
+        [Test]
+        public virtual void TestSimpleSkip()
+        {
+            Directory dir = new CountingRAMDirectory(this, new RAMDirectory());
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new PayloadAnalyzer()).SetCodec(TestUtil.AlwaysPostingsFormat(new Lucene41PostingsFormat())).SetMergePolicy(NewLogMergePolicy()));
+            Term term = new Term("test", "a");
+            for (int i = 0; i < 5000; i++)
+            {
+                Document d1 = new Document();
+                d1.Add(NewTextField(term.Field, term.Text(), Field.Store.NO));
+                writer.AddDocument(d1);
+            }
+            writer.Commit();
+            writer.ForceMerge(1);
+            writer.Dispose();
+
+            AtomicReader reader = GetOnlySegmentReader(DirectoryReader.Open(dir));
+
+            for (int i = 0; i < 2; i++)
+            {
+                Counter = 0;
+                DocsAndPositionsEnum tp = reader.TermPositionsEnum(term);
+                CheckSkipTo(tp, 14, 185); // no skips
+                CheckSkipTo(tp, 17, 190); // one skip on level 0
+                CheckSkipTo(tp, 287, 200); // one skip on level 1, two on level 0
+
+                // this test would fail if we had only one skip level,
+                // because than more bytes would be read from the freqStream
+                CheckSkipTo(tp, 4800, 250); // one skip on level 2
+            }
+        }
+
+        public virtual void CheckSkipTo(DocsAndPositionsEnum tp, int target, int maxCounter)
+        {
+            tp.Advance(target);
+            if (maxCounter < Counter)
+            {
+                Assert.Fail("Too many bytes read: " + Counter + " vs " + maxCounter);
+            }
+
+            Assert.AreEqual(target, tp.DocID, "Wrong document " + tp.DocID + " after skipTo target " + target);
+            Assert.AreEqual(1, tp.Freq, "Frequency is not 1: " + tp.Freq);
+            tp.NextPosition();
+            BytesRef b = tp.GetPayload();
+            Assert.AreEqual(1, b.Length);
+            Assert.AreEqual((sbyte)target, (sbyte)b.Bytes[b.Offset], "Wrong payload for the target " + target + ": " + (sbyte)b.Bytes[b.Offset]);
+        }
+
+        private class PayloadAnalyzer : Analyzer
+        {
+            internal readonly AtomicInt32 PayloadCount = new AtomicInt32(-1);
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, true);
+                return new TokenStreamComponents(tokenizer, new PayloadFilter(PayloadCount, tokenizer));
+            }
+        }
+
+        private class PayloadFilter : TokenFilter
+        {
+            internal IPayloadAttribute PayloadAtt;
+            internal AtomicInt32 PayloadCount;
+
+            protected internal PayloadFilter(AtomicInt32 payloadCount, TokenStream input)
+                : base(input)
+            {
+                this.PayloadCount = payloadCount;
+                PayloadAtt = AddAttribute<IPayloadAttribute>();
+            }
+
+            public sealed override bool IncrementToken()
+            {
+                bool hasNext = m_input.IncrementToken();
+                if (hasNext)
+                {
+                    PayloadAtt.Payload = new BytesRef(new[] { (byte)PayloadCount.IncrementAndGet() });
+                }
+                return hasNext;
+            }
+        }
+
+        private int Counter = 0;
+
+        // Simply extends IndexInput in a way that we are able to count the number
+        // of bytes read
+        internal class CountingStream : IndexInput
+        {
+            private readonly TestMultiLevelSkipList OuterInstance;
+
+            internal IndexInput Input;
+
+            internal CountingStream(TestMultiLevelSkipList outerInstance, IndexInput input)
+                : base("CountingStream(" + input + ")")
+            {
+                this.OuterInstance = outerInstance;
+                this.Input = input;
+            }
+
+            public override byte ReadByte()
+            {
+                OuterInstance.Counter++;
+                return this.Input.ReadByte();
+            }
+
+            public override void ReadBytes(byte[] b, int offset, int len)
+            {
+                OuterInstance.Counter += len;
+                this.Input.ReadBytes(b, offset, len);
+            }
+
+            public override void Dispose()
+            {
+                this.Input.Dispose();
+            }
+
+            public override long FilePointer
+            {
+                get
+                {
+                    return this.Input.FilePointer;
+                }
+            }
+
+            public override void Seek(long pos)
+            {
+                this.Input.Seek(pos);
+            }
+
+            public override long Length
+            {
+                get { return this.Input.Length; }
+            }
+
+            public override object Clone()
+            {
+                return new CountingStream(OuterInstance, (IndexInput)this.Input.Clone());
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestNRTReaderWithThreads.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestNRTReaderWithThreads.cs b/src/Lucene.Net.Tests/Index/TestNRTReaderWithThreads.cs
new file mode 100644
index 0000000..918b5b5
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestNRTReaderWithThreads.cs
@@ -0,0 +1,140 @@
+using System;
+using System.Threading;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+
+    [TestFixture]
+    public class TestNRTReaderWithThreads : LuceneTestCase
+    {
+        internal AtomicInt32 Seq = new AtomicInt32(1);
+        [Test]
+        public virtual void TestIndexing()
+        {
+            Directory mainDir = NewDirectory();
+            var wrapper = mainDir as MockDirectoryWrapper;
+            if (wrapper != null)
+            {
+                wrapper.AssertNoDeleteOpenFile = true;
+            }
+            var writer = new IndexWriter(mainDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(10).SetMergePolicy(NewLogMergePolicy(false, 2)));
+            IndexReader reader = writer.Reader; // start pooling readers
+            reader.Dispose();
+            var indexThreads = new RunThread[4];
+            for (int x = 0; x < indexThreads.Length; x++)
+            {
+                indexThreads[x] = new RunThread(this, x % 2, writer);
+                indexThreads[x].Name = "Thread " + x;
+                indexThreads[x].Start();
+            }
+            long startTime = Environment.TickCount;
+            long duration = 1000;
+            while ((Environment.TickCount - startTime) < duration)
+            {
+                Thread.Sleep(100);
+            }
+            int delCount = 0;
+            int addCount = 0;
+            for (int x = 0; x < indexThreads.Length; x++)
+            {
+                indexThreads[x].Run_Renamed = false;
+                Assert.IsNull(indexThreads[x].Ex, "Exception thrown: " + indexThreads[x].Ex);
+                addCount += indexThreads[x].AddCount;
+                delCount += indexThreads[x].DelCount;
+            }
+            for (int x = 0; x < indexThreads.Length; x++)
+            {
+                indexThreads[x].Join();
+            }
+            for (int x = 0; x < indexThreads.Length; x++)
+            {
+                Assert.IsNull(indexThreads[x].Ex, "Exception thrown: " + indexThreads[x].Ex);
+            }
+            //System.out.println("addCount:"+addCount);
+            //System.out.println("delCount:"+delCount);
+            writer.Dispose();
+            mainDir.Dispose();
+        }
+
+        public class RunThread : ThreadClass
+        {
+            private readonly TestNRTReaderWithThreads OuterInstance;
+
+            internal IndexWriter Writer;
+            internal volatile bool Run_Renamed = true;
+            internal volatile Exception Ex;
+            internal int DelCount = 0;
+            internal int AddCount = 0;
+            internal int Type;
+            internal readonly Random r = new Random(Random().Next());
+
+            public RunThread(TestNRTReaderWithThreads outerInstance, int type, IndexWriter writer)
+            {
+                this.OuterInstance = outerInstance;
+                this.Type = type;
+                this.Writer = writer;
+            }
+
+            public override void Run()
+            {
+                try
+                {
+                    while (Run_Renamed)
+                    {
+                        //int n = random.nextInt(2);
+                        if (Type == 0)
+                        {
+                            int i = OuterInstance.Seq.AddAndGet(1);
+                            Document doc = DocHelper.CreateDocument(i, "index1", 10);
+                            Writer.AddDocument(doc);
+                            AddCount++;
+                        }
+                        else if (Type == 1)
+                        {
+                            // we may or may not delete because the term may not exist,
+                            // however we're opening and closing the reader rapidly
+                            IndexReader reader = Writer.Reader;
+                            int id = r.Next(OuterInstance.Seq.Get());
+                            Term term = new Term("id", Convert.ToString(id));
+                            int count = TestIndexWriterReader.Count(term, reader);
+                            Writer.DeleteDocuments(term);
+                            reader.Dispose();
+                            DelCount += count;
+                        }
+                    }
+                }
+                catch (Exception ex)
+                {
+                    Console.WriteLine(ex.StackTrace);
+                    this.Ex = ex;
+                    Run_Renamed = false;
+                }
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestNRTThreads.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestNRTThreads.cs b/src/Lucene.Net.Tests/Index/TestNRTThreads.cs
new file mode 100644
index 0000000..f6815cb
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestNRTThreads.cs
@@ -0,0 +1,185 @@
+using System;
+using System.Diagnostics;
+using System.Threading.Tasks;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Randomized.Generators;
+    using NUnit.Framework;
+    using System.Collections.Generic;
+    using Directory = Lucene.Net.Store.Directory;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+
+    // TODO
+    //   - mix in forceMerge, addIndexes
+    //   - randomoly mix in non-congruent docs
+    [SuppressCodecs("SimpleText", "Memory", "Direct")]
+    [TestFixture]
+    public class TestNRTThreads : ThreadedIndexingAndSearchingTestCase
+    {
+        private bool UseNonNrtReaders = true;
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            UseNonNrtReaders = Random().NextBoolean();
+        }
+
+        protected internal override void DoSearching(TaskScheduler es, DateTime stopTime)
+        {
+            bool anyOpenDelFiles = false;
+
+            DirectoryReader r = DirectoryReader.Open(Writer, true);
+
+            while (DateTime.UtcNow < stopTime && !Failed.Get())
+            {
+                if (Random().NextBoolean())
+                {
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("TEST: now reopen r=" + r);
+                    }
+                    DirectoryReader r2 = DirectoryReader.OpenIfChanged(r);
+                    if (r2 != null)
+                    {
+                        r.Dispose();
+                        r = r2;
+                    }
+                }
+                else
+                {
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("TEST: now close reader=" + r);
+                    }
+                    r.Dispose();
+                    Writer.Commit();
+                    ISet<string> openDeletedFiles = ((MockDirectoryWrapper)Dir).OpenDeletedFiles;
+                    if (openDeletedFiles.Count > 0)
+                    {
+                        Console.WriteLine("OBD files: " + openDeletedFiles);
+                    }
+                    anyOpenDelFiles |= openDeletedFiles.Count > 0;
+                    //Assert.AreEqual("open but deleted: " + openDeletedFiles, 0, openDeletedFiles.Size());
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("TEST: now open");
+                    }
+                    r = DirectoryReader.Open(Writer, true);
+                }
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: got new reader=" + r);
+                }
+                //System.out.println("numDocs=" + r.NumDocs + "
+                //openDelFileCount=" + dir.openDeleteFileCount());
+
+                if (r.NumDocs > 0)
+                {
+                    FixedSearcher = new IndexSearcher(r, es);
+                    SmokeTestSearcher(FixedSearcher);
+                    RunSearchThreads(DateTime.UtcNow.AddMilliseconds(500));
+                }
+            }
+            r.Dispose();
+
+            //System.out.println("numDocs=" + r.NumDocs + " openDelFileCount=" + dir.openDeleteFileCount());
+            ISet<string> openDeletedFiles_ = ((MockDirectoryWrapper)Dir).OpenDeletedFiles;
+            if (openDeletedFiles_.Count > 0)
+            {
+                Console.WriteLine("OBD files: " + openDeletedFiles_);
+            }
+            anyOpenDelFiles |= openDeletedFiles_.Count > 0;
+
+            Assert.IsFalse(anyOpenDelFiles, "saw non-zero open-but-deleted count");
+        }
+
+        protected internal override Directory GetDirectory(Directory @in)
+        {
+            Debug.Assert(@in is MockDirectoryWrapper);
+            if (!UseNonNrtReaders)
+            {
+                ((MockDirectoryWrapper)@in).AssertNoDeleteOpenFile = true;
+            }
+            return @in;
+        }
+
+        protected internal override void DoAfterWriter(TaskScheduler es)
+        {
+            // Force writer to do reader pooling, always, so that
+            // all merged segments, even for merges before
+            // doSearching is called, are warmed:
+            Writer.Reader.Dispose();
+        }
+
+        private IndexSearcher FixedSearcher;
+
+        protected internal override IndexSearcher CurrentSearcher
+        {
+            get
+            {
+                return FixedSearcher;
+            }
+        }
+
+        protected internal override void ReleaseSearcher(IndexSearcher s)
+        {
+            if (s != FixedSearcher)
+            {
+                // Final searcher:
+                s.IndexReader.Dispose();
+            }
+        }
+
+        protected internal override IndexSearcher FinalSearcher
+        {
+            get
+            {
+                IndexReader r2;
+                if (UseNonNrtReaders)
+                {
+                    if (Random().NextBoolean())
+                    {
+                        r2 = Writer.Reader;
+                    }
+                    else
+                    {
+                        Writer.Commit();
+                        r2 = DirectoryReader.Open(Dir);
+                    }
+                }
+                else
+                {
+                    r2 = Writer.Reader;
+                }
+                return NewSearcher(r2);
+            }
+        }
+
+        [Test]
+        public virtual void TestNRTThreads_Mem()
+        {
+            RunTest("TestNRTThreads");
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestNeverDelete.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestNeverDelete.cs b/src/Lucene.Net.Tests/Index/TestNeverDelete.cs
new file mode 100644
index 0000000..572182c
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestNeverDelete.cs
@@ -0,0 +1,158 @@
+using Lucene.Net.Documents;
+using NUnit.Framework;
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Threading;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Support;
+    using BaseDirectoryWrapper = Lucene.Net.Store.BaseDirectoryWrapper;
+    using Document = Documents.Document;
+    using Field = Field;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    // Make sure if you use NoDeletionPolicy that no file
+    // referenced by a commit point is ever deleted
+
+    [TestFixture]
+    public class TestNeverDelete : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestIndexing()
+        {
+            DirectoryInfo tmpDir = CreateTempDir("TestNeverDelete");
+            BaseDirectoryWrapper d = NewFSDirectory(tmpDir);
+
+            // We want to "see" files removed if Lucene removed
+            // them.  this is still worth running on Windows since
+            // some files the IR opens and closes.
+            if (d is MockDirectoryWrapper)
+            {
+                ((MockDirectoryWrapper)d).NoDeleteOpenFile = false;
+            }
+            RandomIndexWriter w = new RandomIndexWriter(Random(), d, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetIndexDeletionPolicy(NoDeletionPolicy.INSTANCE));
+            w.w.Config.SetMaxBufferedDocs(TestUtil.NextInt(Random(), 5, 30));
+
+            w.Commit();
+            ThreadClass[] indexThreads = new ThreadClass[Random().Next(4)];
+            long stopTime = Environment.TickCount + AtLeast(1000);
+            for (int x = 0; x < indexThreads.Length; x++)
+            {
+                indexThreads[x] = new ThreadAnonymousInnerClassHelper(w, stopTime, NewStringField, NewTextField);
+                indexThreads[x].Name = "Thread " + x;
+                indexThreads[x].Start();
+            }
+
+            HashSet<string> allFiles = new HashSet<string>();
+
+            DirectoryReader r = DirectoryReader.Open(d);
+            while (Environment.TickCount < stopTime)
+            {
+                IndexCommit ic = r.IndexCommit;
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: check files: " + ic.FileNames);
+                }
+                allFiles.AddAll(ic.FileNames);
+                // Make sure no old files were removed
+                foreach (string fileName in allFiles)
+                {
+                    Assert.IsTrue(SlowFileExists(d, fileName), "file " + fileName + " does not exist");
+                }
+                DirectoryReader r2 = DirectoryReader.OpenIfChanged(r);
+                if (r2 != null)
+                {
+                    r.Dispose();
+                    r = r2;
+                }
+                Thread.Sleep(1);
+            }
+            r.Dispose();
+
+            foreach (ThreadClass t in indexThreads)
+            {
+                t.Join();
+            }
+            w.Dispose();
+            d.Dispose();
+
+            System.IO.Directory.Delete(tmpDir.FullName, true);
+        }
+
+        private class ThreadAnonymousInnerClassHelper : ThreadClass
+        {
+            private readonly Func<string, string, Field.Store, Field> NewStringField;
+            private readonly Func<string, string, Field.Store, Field> NewTextField;
+
+            private RandomIndexWriter w;
+            private long StopTime;
+
+            /// <param name="newStringField">
+            /// LUCENENET specific
+            /// Passed in because <see cref="LuceneTestCase.NewStringField(string, string, Field.Store)"/>
+            /// is no longer static
+            /// </param>
+            /// <param name="newTextField">
+            /// LUCENENET specific
+            /// Passed in because <see cref="LuceneTestCase.NewTextField(string, string, Field.Store)"/>
+            /// is no longer static
+            /// </param>
+            public ThreadAnonymousInnerClassHelper(RandomIndexWriter w, long stopTime, 
+                Func<string, string, Field.Store, Field> newStringField, Func<string, string, Field.Store, Field> newTextField)
+            {
+                this.w = w;
+                this.StopTime = stopTime;
+                NewStringField = newStringField;
+                NewTextField = newTextField;
+            }
+
+            public override void Run()
+            {
+                try
+                {
+                    int docCount = 0;
+                    while (Environment.TickCount < StopTime)
+                    {
+                        Document doc = new Document();
+                        doc.Add(NewStringField("dc", "" + docCount, Field.Store.YES));
+                        doc.Add(NewTextField("field", "here is some text", Field.Store.YES));
+                        w.AddDocument(doc);
+
+                        if (docCount % 13 == 0)
+                        {
+                            w.Commit();
+                        }
+                        docCount++;
+                    }
+                }
+                catch (Exception e)
+                {
+                    throw new Exception(e.Message, e);
+                }
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestNewestSegment.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestNewestSegment.cs b/src/Lucene.Net.Tests/Index/TestNewestSegment.cs
new file mode 100644
index 0000000..fc76173
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestNewestSegment.cs
@@ -0,0 +1,39 @@
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+    using Directory = Lucene.Net.Store.Directory;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+
+    [TestFixture]
+    public class TestNewestSegment : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestNewestSegment_Mem()
+        {
+            Directory directory = NewDirectory();
+            IndexWriter writer = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Assert.IsNull(writer.NewestSegment());
+            writer.Dispose();
+            directory.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestNoDeletionPolicy.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestNoDeletionPolicy.cs b/src/Lucene.Net.Tests/Index/TestNoDeletionPolicy.cs
new file mode 100644
index 0000000..a91b485
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestNoDeletionPolicy.cs
@@ -0,0 +1,95 @@
+using System.Reflection;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+    using Support;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+
+    [TestFixture]
+    public class TestNoDeletionPolicy : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestNoDeletionPolicy_Mem()
+        {
+            IndexDeletionPolicy idp = NoDeletionPolicy.INSTANCE;
+            idp.OnInit<IndexCommit>(null);
+            idp.OnCommit<IndexCommit>(null);
+        }
+
+        [Test]
+        public virtual void TestFinalSingleton()
+	    {
+		    assertTrue(typeof(NoDeletionPolicy).GetTypeInfo().IsSealed);
+		    ConstructorInfo[] ctors = typeof(NoDeletionPolicy).GetConstructors(BindingFlags.Instance |
+                    BindingFlags.NonPublic |
+                    BindingFlags.Public |
+                    BindingFlags.DeclaredOnly); // LUCENENET NOTE: It seems .NET automatically adds a private static constructor, so leaving off the static BindingFlag
+		    assertEquals("expected 1 private ctor only: " + Arrays.ToString(ctors), 1, ctors.Length);
+		    assertTrue("that 1 should be private: " + ctors[0], ctors[0].IsPrivate);
+	    }
+
+        [Test]
+        public virtual void TestMethodsOverridden()
+        {
+            // Ensures that all methods of IndexDeletionPolicy are
+            // overridden/implemented. That's important to ensure that NoDeletionPolicy
+            // overrides everything, so that no unexpected behavior/error occurs.
+            // NOTE: even though IndexDeletionPolicy is an interface today, and so all
+            // methods must be implemented by NoDeletionPolicy, this test is important
+            // in case one day IDP becomes an abstract class.
+            foreach (MethodInfo m in typeof(NoDeletionPolicy).GetMethods())
+            {
+                // getDeclaredMethods() returns just those methods that are declared on
+                // NoDeletionPolicy. getMethods() returns those that are visible in that
+                // context, including ones from Object. So just filter out Object. If in
+                // the future IndexDeletionPolicy will become a class that extends a
+                // different class than Object, this will need to change.
+                if (m.DeclaringType != typeof(object) && !m.IsFinal && m.IsVirtual)
+                {
+                    Assert.IsTrue(m.DeclaringType == typeof(NoDeletionPolicy), m + " is not overridden !");
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestAllCommitsRemain()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetIndexDeletionPolicy(NoDeletionPolicy.INSTANCE));
+            for (int i = 0; i < 10; i++)
+            {
+                Document doc = new Document();
+                doc.Add(NewTextField("c", "a" + i, Field.Store.YES));
+                writer.AddDocument(doc);
+                writer.Commit();
+                Assert.AreEqual(i + 1, DirectoryReader.ListCommits(dir).Count, "wrong number of commits !");
+            }
+            writer.Dispose();
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestNoMergePolicy.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestNoMergePolicy.cs b/src/Lucene.Net.Tests/Index/TestNoMergePolicy.cs
new file mode 100644
index 0000000..694126b
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestNoMergePolicy.cs
@@ -0,0 +1,83 @@
+using Lucene.Net.Support;
+using NUnit.Framework;
+using System.Reflection;
+
+namespace Lucene.Net.Index
+{
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    [TestFixture]
+    public class TestNoMergePolicy : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestNoMergePolicy_Mem()
+        {
+            MergePolicy mp = NoMergePolicy.NO_COMPOUND_FILES;
+            Assert.IsNull(mp.FindMerges(null, (SegmentInfos)null));
+            Assert.IsNull(mp.FindForcedMerges(null, 0, null));
+            Assert.IsNull(mp.FindForcedDeletesMerges(null));
+            Assert.IsFalse(mp.UseCompoundFile(null, null));
+            mp.Dispose();
+        }
+
+        [Test]
+        public virtual void TestCompoundFiles()
+        {
+            Assert.IsFalse(NoMergePolicy.NO_COMPOUND_FILES.UseCompoundFile(null, null));
+            Assert.IsTrue(NoMergePolicy.COMPOUND_FILES.UseCompoundFile(null, null));
+        }
+
+        [Test]
+        public virtual void TestFinalSingleton()
+	    {
+		    assertTrue(typeof(NoMergePolicy).GetTypeInfo().IsSealed);
+		    ConstructorInfo[] ctors = typeof(NoMergePolicy).GetConstructors(BindingFlags.Instance |
+                    BindingFlags.NonPublic |
+                    BindingFlags.Public |
+                    BindingFlags.DeclaredOnly); // LUCENENET NOTE: It seems .NET automatically adds a private static constructor, so leaving off the static BindingFlag
+            assertEquals("expected 1 private ctor only: " + Arrays.ToString(ctors), 1, ctors.Length);
+		    assertTrue("that 1 should be private: " + ctors[0], ctors[0].IsPrivate);
+	    }
+
+        [Test]
+        public virtual void TestMethodsOverridden()
+        {
+            // Ensures that all methods of MergePolicy are overridden. That's important
+            // to ensure that NoMergePolicy overrides everything, so that no unexpected
+            // behavior/error occurs
+            foreach (MethodInfo m in typeof(NoMergePolicy).GetMethods())
+            {
+                // getDeclaredMethods() returns just those methods that are declared on
+                // NoMergePolicy. getMethods() returns those that are visible in that
+                // context, including ones from Object. So just filter out Object. If in
+                // the future MergePolicy will extend a different class than Object, this
+                // will need to change.
+                if (m.Name.Equals("Clone"))
+                {
+                    continue;
+                }
+                if (m.DeclaringType != typeof(object) && !m.IsFinal && m.IsVirtual)
+                {
+                    Assert.IsTrue(m.DeclaringType == typeof(NoMergePolicy), m + " is not overridden ! Declaring Type: " + m.DeclaringType);
+                }
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestNoMergeScheduler.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestNoMergeScheduler.cs b/src/Lucene.Net.Tests/Index/TestNoMergeScheduler.cs
new file mode 100644
index 0000000..9e20f64
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestNoMergeScheduler.cs
@@ -0,0 +1,72 @@
+using Lucene.Net.Randomized.Generators;
+using Lucene.Net.Support;
+using NUnit.Framework;
+using System;
+using System.Linq;
+using System.Reflection;
+
+namespace Lucene.Net.Index
+{
+    /*
+    * Licensed to the Apache Software Foundation (ASF) under one or more
+    * contributor license agreements.  See the NOTICE file distributed with
+    * this work for additional information regarding copyright ownership.
+    * The ASF licenses this file to You under the Apache License, Version 2.0
+    * (the "License"); you may not use this file except in compliance with
+    * the License.  You may obtain a copy of the License at
+    *
+    *     http://www.apache.org/licenses/LICENSE-2.0
+    *
+    * Unless required by applicable law or agreed to in writing, software
+    * distributed under the License is distributed on an "AS IS" BASIS,
+    * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    * See the License for the specific language governing permissions and
+    * limitations under the License.
+    */
+
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    [TestFixture]
+    public class TestNoMergeScheduler : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestNoMergeScheduler_Mem()
+        {
+            MergeScheduler ms = NoMergeScheduler.INSTANCE;
+            ms.Dispose();
+            ms.Merge(null, RandomInts.RandomFrom(Random(), Enum.GetValues(typeof(MergeTrigger)).Cast<MergeTrigger>().ToArray()), Random().NextBoolean());
+        }
+
+        [Test]
+        public virtual void TestFinalSingleton()
+	    {
+		    assertTrue(typeof(NoMergeScheduler).GetTypeInfo().IsSealed);
+		    ConstructorInfo[] ctors = typeof(NoMergeScheduler).GetConstructors(BindingFlags.Instance |
+                    BindingFlags.NonPublic |
+                    BindingFlags.Public |
+                    BindingFlags.DeclaredOnly); // LUCENENET NOTE: It seems .NET automatically adds a private static constructor, so leaving off the static BindingFlag
+            assertEquals("expected 1 private ctor only: " + Arrays.ToString(ctors), 1, ctors.Length);
+		    assertTrue("that 1 should be private: " + ctors[0], ctors[0].IsPrivate);
+	    }
+
+        [Test]
+        public virtual void TestMethodsOverridden()
+        {
+            // Ensures that all methods of MergeScheduler are overridden. That's
+            // important to ensure that NoMergeScheduler overrides everything, so that
+            // no unexpected behavior/error occurs
+            foreach (MethodInfo m in typeof(NoMergeScheduler).GetMethods())
+            {
+                // getDeclaredMethods() returns just those methods that are declared on
+                // NoMergeScheduler. getMethods() returns those that are visible in that
+                // context, including ones from Object. So just filter out Object. If in
+                // the future MergeScheduler will extend a different class than Object,
+                // this will need to change.
+                if (m.DeclaringType != typeof(object) && !m.IsFinal && m.IsVirtual)
+                {
+                    Assert.IsTrue(m.DeclaringType == typeof(NoMergeScheduler), m + " is not overridden !");
+                }
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestNorms.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestNorms.cs b/src/Lucene.Net.Tests/Index/TestNorms.cs
new file mode 100644
index 0000000..f6bcdcf
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestNorms.cs
@@ -0,0 +1,252 @@
+using System;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using CollectionStatistics = Lucene.Net.Search.CollectionStatistics;
+    using DefaultSimilarity = Lucene.Net.Search.Similarities.DefaultSimilarity;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using LineFileDocs = Lucene.Net.Util.LineFileDocs;
+
+    //using Slow = Lucene.Net.Util.LuceneTestCase.Slow;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using PerFieldSimilarityWrapper = Lucene.Net.Search.Similarities.PerFieldSimilarityWrapper;
+    using Similarity = Lucene.Net.Search.Similarities.Similarity;
+    using TermStatistics = Lucene.Net.Search.TermStatistics;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using TextField = TextField;
+    using TFIDFSimilarity = Lucene.Net.Search.Similarities.TFIDFSimilarity;
+
+    /// <summary>
+    /// Test that norms info is preserved during index life - including
+    /// separate norms, addDocument, addIndexes, forceMerge.
+    /// </summary>
+    [SuppressCodecs("Memory", "Direct", "SimpleText")]
+    [TestFixture]
+    public class TestNorms : LuceneTestCase
+    {
+        internal readonly string ByteTestField = "normsTestByte";
+
+        internal class CustomNormEncodingSimilarity : TFIDFSimilarity
+        {
+            private readonly TestNorms OuterInstance;
+
+            public CustomNormEncodingSimilarity(TestNorms outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            public override long EncodeNormValue(float f)
+            {
+                return (long)f;
+            }
+
+            public override float DecodeNormValue(long norm)
+            {
+                return norm;
+            }
+
+            public override float LengthNorm(FieldInvertState state)
+            {
+                return state.Length;
+            }
+
+            public override float Coord(int overlap, int maxOverlap)
+            {
+                return 0;
+            }
+
+            public override float QueryNorm(float sumOfSquaredWeights)
+            {
+                return 0;
+            }
+
+            public override float Tf(float freq)
+            {
+                return 0;
+            }
+
+            public override float Idf(long docFreq, long numDocs)
+            {
+                return 0;
+            }
+
+            public override float SloppyFreq(int distance)
+            {
+                return 0;
+            }
+
+            public override float ScorePayload(int doc, int start, int end, BytesRef payload)
+            {
+                return 0;
+            }
+        }
+
+        // LUCENE-1260
+        [Test]
+        public virtual void TestCustomEncoder()
+        {
+            Directory dir = NewDirectory();
+            MockAnalyzer analyzer = new MockAnalyzer(Random());
+
+            IndexWriterConfig config = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
+            config.SetSimilarity(new CustomNormEncodingSimilarity(this));
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, config);
+            Document doc = new Document();
+            Field foo = NewTextField("foo", "", Field.Store.NO);
+            Field bar = NewTextField("bar", "", Field.Store.NO);
+            doc.Add(foo);
+            doc.Add(bar);
+
+            for (int i = 0; i < 100; i++)
+            {
+                bar.SetStringValue("singleton");
+                writer.AddDocument(doc);
+            }
+
+            IndexReader reader = writer.Reader;
+            writer.Dispose();
+
+            NumericDocValues fooNorms = MultiDocValues.GetNormValues(reader, "foo");
+            for (int i = 0; i < reader.MaxDoc; i++)
+            {
+                Assert.AreEqual(0, fooNorms.Get(i));
+            }
+
+            NumericDocValues barNorms = MultiDocValues.GetNormValues(reader, "bar");
+            for (int i = 0; i < reader.MaxDoc; i++)
+            {
+                Assert.AreEqual(1, barNorms.Get(i));
+            }
+
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestMaxByteNorms()
+        {
+            Directory dir = NewFSDirectory(CreateTempDir("TestNorms.testMaxByteNorms"));
+            BuildIndex(dir);
+            AtomicReader open = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir));
+            NumericDocValues normValues = open.GetNormValues(ByteTestField);
+            Assert.IsNotNull(normValues);
+            for (int i = 0; i < open.MaxDoc; i++)
+            {
+                Document document = open.Document(i);
+                int expected = Convert.ToInt32(document.Get(ByteTestField));
+                Assert.AreEqual(expected, normValues.Get(i) & 0xff);
+            }
+            open.Dispose();
+            dir.Dispose();
+        }
+
+        // TODO: create a testNormsNotPresent ourselves by adding/deleting/merging docs
+
+        public virtual void BuildIndex(Directory dir)
+        {
+            Random random = Random();
+            MockAnalyzer analyzer = new MockAnalyzer(Random());
+            analyzer.MaxTokenLength = TestUtil.NextInt(Random(), 1, IndexWriter.MAX_TERM_LENGTH);
+            IndexWriterConfig config = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
+            Similarity provider = new MySimProvider(this);
+            config.SetSimilarity(provider);
+            RandomIndexWriter writer = new RandomIndexWriter(random, dir, config);
+            LineFileDocs docs = new LineFileDocs(random, DefaultCodecSupportsDocValues());
+            int num = AtLeast(100);
+            for (int i = 0; i < num; i++)
+            {
+                Document doc = docs.NextDoc();
+                int boost = Random().Next(255);
+                Field f = new TextField(ByteTestField, "" + boost, Field.Store.YES);
+                f.Boost = boost;
+                doc.Add(f);
+                writer.AddDocument(doc);
+                doc.RemoveField(ByteTestField);
+                if (Rarely())
+                {
+                    writer.Commit();
+                }
+            }
+            writer.Commit();
+            writer.Dispose();
+            docs.Dispose();
+        }
+
+        public class MySimProvider : PerFieldSimilarityWrapper
+        {
+            private readonly TestNorms OuterInstance;
+
+            public MySimProvider(TestNorms outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            internal Similarity @delegate = new DefaultSimilarity();
+
+            public override float QueryNorm(float sumOfSquaredWeights)
+            {
+                return @delegate.QueryNorm(sumOfSquaredWeights);
+            }
+
+            public override Similarity Get(string field)
+            {
+                if (OuterInstance.ByteTestField.Equals(field))
+                {
+                    return new ByteEncodingBoostSimilarity();
+                }
+                else
+                {
+                    return @delegate;
+                }
+            }
+
+            public override float Coord(int overlap, int maxOverlap)
+            {
+                return @delegate.Coord(overlap, maxOverlap);
+            }
+        }
+
+        public class ByteEncodingBoostSimilarity : Similarity
+        {
+            public override long ComputeNorm(FieldInvertState state)
+            {
+                int boost = (int)state.Boost;
+                return (sbyte)boost;
+            }
+
+            public override SimWeight ComputeWeight(float queryBoost, CollectionStatistics collectionStats, params TermStatistics[] termStats)
+            {
+                throw new System.NotSupportedException();
+            }
+
+            public override SimScorer GetSimScorer(SimWeight weight, AtomicReaderContext context)
+            {
+                throw new System.NotSupportedException();
+            }
+        }
+    }
+}
\ No newline at end of file


[23/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestNumericDocValuesUpdates.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestNumericDocValuesUpdates.cs b/src/Lucene.Net.Tests/Index/TestNumericDocValuesUpdates.cs
new file mode 100644
index 0000000..827433f
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestNumericDocValuesUpdates.cs
@@ -0,0 +1,1698 @@
+using System;
+using System.Collections.Generic;
+using System.Threading;
+using Lucene.Net.Codecs;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Randomized.Generators;
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using System.IO;
+    using AssertingDocValuesFormat = Lucene.Net.Codecs.asserting.AssertingDocValuesFormat;
+    using BinaryDocValuesField = BinaryDocValuesField;
+    using IBits = Lucene.Net.Util.IBits;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Codec = Lucene.Net.Codecs.Codec;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using DocValuesFormat = Lucene.Net.Codecs.DocValuesFormat;
+    using IOUtils = Lucene.Net.Util.IOUtils;
+    using Lucene40RWCodec = Lucene.Net.Codecs.Lucene40.Lucene40RWCodec;
+    using Lucene41RWCodec = Lucene.Net.Codecs.Lucene41.Lucene41RWCodec;
+    using Lucene42RWCodec = Lucene.Net.Codecs.Lucene42.Lucene42RWCodec;
+    using Lucene45DocValuesFormat = Lucene.Net.Codecs.Lucene45.Lucene45DocValuesFormat;
+    using Lucene45RWCodec = Lucene.Net.Codecs.Lucene45.Lucene45RWCodec;
+    using Lucene46Codec = Lucene.Net.Codecs.Lucene46.Lucene46Codec;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockTokenizer = Lucene.Net.Analysis.MockTokenizer;
+    using NumericDocValuesField = NumericDocValuesField;
+    using SortedDocValuesField = SortedDocValuesField;
+    using SortedSetDocValuesField = SortedSetDocValuesField;
+    using Store = Field.Store;
+    using StringField = StringField;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using Attributes;
+
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+    [SuppressCodecs("Appending", "Lucene3x", "Lucene40", "Lucene41", "Lucene42", "Lucene45")]
+    [TestFixture]
+    public class TestNumericDocValuesUpdates : LuceneTestCase
+    {
+        private Document Doc(int id)
+        {
+            Document doc = new Document();
+            doc.Add(new StringField("id", "doc-" + id, Store.NO));
+            // make sure we don't set the doc's value to 0, to not confuse with a document that's missing values
+            doc.Add(new NumericDocValuesField("val", id + 1));
+            return doc;
+        }
+
+        [Test]
+        public virtual void TestUpdatesAreFlushed()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false)).SetRAMBufferSizeMB(0.00000001));
+            writer.AddDocument(Doc(0)); // val=1
+            writer.AddDocument(Doc(1)); // val=2
+            writer.AddDocument(Doc(3)); // val=2
+            writer.Commit();
+            Assert.AreEqual(1, writer.FlushDeletesCount);
+            writer.UpdateNumericDocValue(new Term("id", "doc-0"), "val", 5L);
+            Assert.AreEqual(2, writer.FlushDeletesCount);
+            writer.UpdateNumericDocValue(new Term("id", "doc-1"), "val", 6L);
+            Assert.AreEqual(3, writer.FlushDeletesCount);
+            writer.UpdateNumericDocValue(new Term("id", "doc-2"), "val", 7L);
+            Assert.AreEqual(4, writer.FlushDeletesCount);
+            writer.Config.SetRAMBufferSizeMB(1000d);
+            writer.UpdateNumericDocValue(new Term("id", "doc-2"), "val", 7L);
+            Assert.AreEqual(4, writer.FlushDeletesCount);
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestSimple()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            // make sure random config doesn't flush on us
+            conf.SetMaxBufferedDocs(10);
+            conf.SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH);
+            IndexWriter writer = new IndexWriter(dir, conf);
+            writer.AddDocument(Doc(0)); // val=1
+            writer.AddDocument(Doc(1)); // val=2
+            if (Random().NextBoolean()) // randomly commit before the update is sent
+            {
+                writer.Commit();
+            }
+            writer.UpdateNumericDocValue(new Term("id", "doc-0"), "val", 2L); // doc=0, exp=2
+
+            DirectoryReader reader;
+            if (Random().NextBoolean()) // not NRT
+            {
+                writer.Dispose();
+                reader = DirectoryReader.Open(dir);
+            } // NRT
+            else
+            {
+                reader = DirectoryReader.Open(writer, true);
+                writer.Dispose();
+            }
+
+            Assert.AreEqual(1, reader.Leaves.Count);
+            AtomicReader r = (AtomicReader)reader.Leaves[0].Reader;
+            NumericDocValues ndv = r.GetNumericDocValues("val");
+            Assert.AreEqual(2, ndv.Get(0));
+            Assert.AreEqual(2, ndv.Get(1));
+            reader.Dispose();
+
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestUpdateFewSegments()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            conf.SetMaxBufferedDocs(2); // generate few segments
+            conf.SetMergePolicy(NoMergePolicy.COMPOUND_FILES); // prevent merges for this test
+            IndexWriter writer = new IndexWriter(dir, conf);
+            int numDocs = 10;
+            long[] expectedValues = new long[numDocs];
+            for (int i = 0; i < numDocs; i++)
+            {
+                writer.AddDocument(Doc(i));
+                expectedValues[i] = i + 1;
+            }
+            writer.Commit();
+
+            // update few docs
+            for (int i = 0; i < numDocs; i++)
+            {
+                if (Random().NextDouble() < 0.4)
+                {
+                    long value = (i + 1) * 2;
+                    writer.UpdateNumericDocValue(new Term("id", "doc-" + i), "val", value);
+                    expectedValues[i] = value;
+                }
+            }
+
+            DirectoryReader reader;
+            if (Random().NextBoolean()) // not NRT
+            {
+                writer.Dispose();
+                reader = DirectoryReader.Open(dir);
+            } // NRT
+            else
+            {
+                reader = DirectoryReader.Open(writer, true);
+                writer.Dispose();
+            }
+
+            foreach (AtomicReaderContext context in reader.Leaves)
+            {
+                AtomicReader r = context.AtomicReader;
+                NumericDocValues ndv = r.GetNumericDocValues("val");
+                Assert.IsNotNull(ndv);
+                for (int i = 0; i < r.MaxDoc; i++)
+                {
+                    long expected = expectedValues[i + context.DocBase];
+                    long actual = ndv.Get(i);
+                    Assert.AreEqual(expected, actual);
+                }
+            }
+
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestReopen()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, conf);
+            writer.AddDocument(Doc(0));
+            writer.AddDocument(Doc(1));
+
+            bool isNRT = Random().NextBoolean();
+            DirectoryReader reader1;
+            if (isNRT)
+            {
+                reader1 = DirectoryReader.Open(writer, true);
+            }
+            else
+            {
+                writer.Commit();
+                reader1 = DirectoryReader.Open(dir);
+            }
+
+            // update doc
+            writer.UpdateNumericDocValue(new Term("id", "doc-0"), "val", 10L); // update doc-0's value to 10
+            if (!isNRT)
+            {
+                writer.Commit();
+            }
+
+            // reopen reader and assert only it sees the update
+            DirectoryReader reader2 = DirectoryReader.OpenIfChanged(reader1);
+            Assert.IsNotNull(reader2);
+            Assert.IsTrue(reader1 != reader2);
+
+            Assert.AreEqual(1, ((AtomicReader)reader1.Leaves[0].Reader).GetNumericDocValues("val").Get(0));
+            Assert.AreEqual(10, ((AtomicReader)reader2.Leaves[0].Reader).GetNumericDocValues("val").Get(0));
+
+            IOUtils.Close(writer, reader1, reader2, dir);
+        }
+
+        [Test]
+        public virtual void TestUpdatesAndDeletes()
+        {
+            // create an index with a segment with only deletes, a segment with both
+            // deletes and updates and a segment with only updates
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            conf.SetMaxBufferedDocs(10); // control segment flushing
+            conf.SetMergePolicy(NoMergePolicy.COMPOUND_FILES); // prevent merges for this test
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            for (int i = 0; i < 6; i++)
+            {
+                writer.AddDocument(Doc(i));
+                if (i % 2 == 1)
+                {
+                    writer.Commit(); // create 2-docs segments
+                }
+            }
+
+            // delete doc-1 and doc-2
+            writer.DeleteDocuments(new Term("id", "doc-1"), new Term("id", "doc-2")); // 1st and 2nd segments
+
+            // update docs 3 and 5
+            writer.UpdateNumericDocValue(new Term("id", "doc-3"), "val", 17L);
+            writer.UpdateNumericDocValue(new Term("id", "doc-5"), "val", 17L);
+
+            DirectoryReader reader;
+            if (Random().NextBoolean()) // not NRT
+            {
+                writer.Dispose();
+                reader = DirectoryReader.Open(dir);
+            } // NRT
+            else
+            {
+                reader = DirectoryReader.Open(writer, true);
+                writer.Dispose();
+            }
+
+            AtomicReader slow = SlowCompositeReaderWrapper.Wrap(reader);
+
+            IBits liveDocs = slow.LiveDocs;
+            bool[] expectedLiveDocs = new bool[] { true, false, false, true, true, true };
+            for (int i = 0; i < expectedLiveDocs.Length; i++)
+            {
+                Assert.AreEqual(expectedLiveDocs[i], liveDocs.Get(i));
+            }
+
+            long[] expectedValues = new long[] { 1, 2, 3, 17, 5, 17 };
+            NumericDocValues ndv = slow.GetNumericDocValues("val");
+            for (int i = 0; i < expectedValues.Length; i++)
+            {
+                Assert.AreEqual(expectedValues[i], ndv.Get(i));
+            }
+
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestUpdatesWithDeletes()
+        {
+            // update and delete different documents in the same commit session
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            conf.SetMaxBufferedDocs(10); // control segment flushing
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            writer.AddDocument(Doc(0));
+            writer.AddDocument(Doc(1));
+
+            if (Random().NextBoolean())
+            {
+                writer.Commit();
+            }
+
+            writer.DeleteDocuments(new Term("id", "doc-0"));
+            writer.UpdateNumericDocValue(new Term("id", "doc-1"), "val", 17L);
+
+            DirectoryReader reader;
+            if (Random().NextBoolean()) // not NRT
+            {
+                writer.Dispose();
+                reader = DirectoryReader.Open(dir);
+            } // NRT
+            else
+            {
+                reader = DirectoryReader.Open(writer, true);
+                writer.Dispose();
+            }
+
+            AtomicReader r = (AtomicReader)reader.Leaves[0].Reader;
+            Assert.IsFalse(r.LiveDocs.Get(0));
+            Assert.AreEqual(17, r.GetNumericDocValues("val").Get(1));
+
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestUpdateAndDeleteSameDocument()
+        {
+            // update and delete same document in same commit session
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            conf.SetMaxBufferedDocs(10); // control segment flushing
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            writer.AddDocument(Doc(0));
+            writer.AddDocument(Doc(1));
+
+            if (Random().NextBoolean())
+            {
+                writer.Commit();
+            }
+
+            writer.DeleteDocuments(new Term("id", "doc-0"));
+            writer.UpdateNumericDocValue(new Term("id", "doc-0"), "val", 17L);
+
+            DirectoryReader reader;
+            if (Random().NextBoolean()) // not NRT
+            {
+                writer.Dispose();
+                reader = DirectoryReader.Open(dir);
+            } // NRT
+            else
+            {
+                reader = DirectoryReader.Open(writer, true);
+                writer.Dispose();
+            }
+
+            AtomicReader r = (AtomicReader)reader.Leaves[0].Reader;
+            Assert.IsFalse(r.LiveDocs.Get(0));
+            Assert.AreEqual(1, r.GetNumericDocValues("val").Get(0)); // deletes are currently applied first
+
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestMultipleDocValuesTypes()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            conf.SetMaxBufferedDocs(10); // prevent merges
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            for (int i = 0; i < 4; i++)
+            {
+                Document doc = new Document();
+                doc.Add(new StringField("dvUpdateKey", "dv", Store.NO));
+                doc.Add(new NumericDocValuesField("ndv", i));
+                doc.Add(new BinaryDocValuesField("bdv", new BytesRef(Convert.ToString(i))));
+                doc.Add(new SortedDocValuesField("sdv", new BytesRef(Convert.ToString(i))));
+                doc.Add(new SortedSetDocValuesField("ssdv", new BytesRef(Convert.ToString(i))));
+                doc.Add(new SortedSetDocValuesField("ssdv", new BytesRef(Convert.ToString(i * 2))));
+                writer.AddDocument(doc);
+            }
+            writer.Commit();
+
+            // update all docs' ndv field
+            writer.UpdateNumericDocValue(new Term("dvUpdateKey", "dv"), "ndv", 17L);
+            writer.Dispose();
+
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            AtomicReader r = (AtomicReader)reader.Leaves[0].Reader;
+            NumericDocValues ndv = r.GetNumericDocValues("ndv");
+            BinaryDocValues bdv = r.GetBinaryDocValues("bdv");
+            SortedDocValues sdv = r.GetSortedDocValues("sdv");
+            SortedSetDocValues ssdv = r.GetSortedSetDocValues("ssdv");
+            BytesRef scratch = new BytesRef();
+            for (int i = 0; i < r.MaxDoc; i++)
+            {
+                Assert.AreEqual(17, ndv.Get(i));
+                bdv.Get(i, scratch);
+                Assert.AreEqual(new BytesRef(Convert.ToString(i)), scratch);
+                sdv.Get(i, scratch);
+                Assert.AreEqual(new BytesRef(Convert.ToString(i)), scratch);
+                ssdv.SetDocument(i);
+                long ord = ssdv.NextOrd();
+                ssdv.LookupOrd(ord, scratch);
+                Assert.AreEqual(i, Convert.ToInt32(scratch.Utf8ToString()));
+                if (i != 0)
+                {
+                    ord = ssdv.NextOrd();
+                    ssdv.LookupOrd(ord, scratch);
+                    Assert.AreEqual(i * 2, Convert.ToInt32(scratch.Utf8ToString()));
+                }
+                Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, ssdv.NextOrd());
+            }
+
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestMultipleNumericDocValues()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            conf.SetMaxBufferedDocs(10); // prevent merges
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            for (int i = 0; i < 2; i++)
+            {
+                Document doc = new Document();
+                doc.Add(new StringField("dvUpdateKey", "dv", Store.NO));
+                doc.Add(new NumericDocValuesField("ndv1", i));
+                doc.Add(new NumericDocValuesField("ndv2", i));
+                writer.AddDocument(doc);
+            }
+            writer.Commit();
+
+            // update all docs' ndv1 field
+            writer.UpdateNumericDocValue(new Term("dvUpdateKey", "dv"), "ndv1", 17L);
+            writer.Dispose();
+
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            AtomicReader r = (AtomicReader)reader.Leaves[0].Reader;
+            NumericDocValues ndv1 = r.GetNumericDocValues("ndv1");
+            NumericDocValues ndv2 = r.GetNumericDocValues("ndv2");
+            for (int i = 0; i < r.MaxDoc; i++)
+            {
+                Assert.AreEqual(17, ndv1.Get(i));
+                Assert.AreEqual(i, ndv2.Get(i));
+            }
+
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestDocumentWithNoValue()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            for (int i = 0; i < 2; i++)
+            {
+                Document doc = new Document();
+                doc.Add(new StringField("dvUpdateKey", "dv", Store.NO));
+                if (i == 0) // index only one document with value
+                {
+                    doc.Add(new NumericDocValuesField("ndv", 5));
+                }
+                writer.AddDocument(doc);
+            }
+            writer.Commit();
+
+            // update all docs' ndv field
+            writer.UpdateNumericDocValue(new Term("dvUpdateKey", "dv"), "ndv", 17L);
+            writer.Dispose();
+
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            AtomicReader r = (AtomicReader)reader.Leaves[0].Reader;
+            NumericDocValues ndv = r.GetNumericDocValues("ndv");
+            for (int i = 0; i < r.MaxDoc; i++)
+            {
+                Assert.AreEqual(17, ndv.Get(i));
+            }
+
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestUnsetValue()
+        {
+            AssumeTrue("codec does not support docsWithField", DefaultCodecSupportsDocsWithField());
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            for (int i = 0; i < 2; i++)
+            {
+                Document doc = new Document();
+                doc.Add(new StringField("id", "doc" + i, Store.NO));
+                doc.Add(new NumericDocValuesField("ndv", 5));
+                writer.AddDocument(doc);
+            }
+            writer.Commit();
+
+            // unset the value of 'doc0'
+            writer.UpdateNumericDocValue(new Term("id", "doc0"), "ndv", null);
+            writer.Dispose();
+
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            AtomicReader r = (AtomicReader)reader.Leaves[0].Reader;
+            NumericDocValues ndv = r.GetNumericDocValues("ndv");
+            for (int i = 0; i < r.MaxDoc; i++)
+            {
+                if (i == 0)
+                {
+                    Assert.AreEqual(0, ndv.Get(i));
+                }
+                else
+                {
+                    Assert.AreEqual(5, ndv.Get(i));
+                }
+            }
+
+            IBits docsWithField = r.GetDocsWithField("ndv");
+            Assert.IsFalse(docsWithField.Get(0));
+            Assert.IsTrue(docsWithField.Get(1));
+
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestUnsetAllValues()
+        {
+            AssumeTrue("codec does not support docsWithField", DefaultCodecSupportsDocsWithField());
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            for (int i = 0; i < 2; i++)
+            {
+                Document doc = new Document();
+                doc.Add(new StringField("id", "doc", Store.NO));
+                doc.Add(new NumericDocValuesField("ndv", 5));
+                writer.AddDocument(doc);
+            }
+            writer.Commit();
+
+            // unset the value of 'doc'
+            writer.UpdateNumericDocValue(new Term("id", "doc"), "ndv", null);
+            writer.Dispose();
+
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            AtomicReader r = (AtomicReader)reader.Leaves[0].Reader;
+            NumericDocValues ndv = r.GetNumericDocValues("ndv");
+            for (int i = 0; i < r.MaxDoc; i++)
+            {
+                Assert.AreEqual(0, ndv.Get(i));
+            }
+
+            IBits docsWithField = r.GetDocsWithField("ndv");
+            Assert.IsFalse(docsWithField.Get(0));
+            Assert.IsFalse(docsWithField.Get(1));
+
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestUpdateNonNumericDocValuesField()
+        {
+            // we don't support adding new fields or updating existing non-numeric-dv
+            // fields through numeric updates
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            Document doc = new Document();
+            doc.Add(new StringField("key", "doc", Store.NO));
+            doc.Add(new StringField("foo", "bar", Store.NO));
+            writer.AddDocument(doc); // flushed document
+            writer.Commit();
+            writer.AddDocument(doc); // in-memory document
+
+            try
+            {
+                writer.UpdateNumericDocValue(new Term("key", "doc"), "ndv", 17L);
+                Assert.Fail("should not have allowed creating new fields through update");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException e)
+#pragma warning restore 168
+            {
+                // ok
+            }
+
+            try
+            {
+                writer.UpdateNumericDocValue(new Term("key", "doc"), "foo", 17L);
+                Assert.Fail("should not have allowed updating an existing field to numeric-dv");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException e)
+#pragma warning restore 168
+            {
+                // ok
+            }
+
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestDifferentDVFormatPerField()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            conf.SetCodec(new Lucene46CodecAnonymousInnerClassHelper(this));
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            Document doc = new Document();
+            doc.Add(new StringField("key", "doc", Store.NO));
+            doc.Add(new NumericDocValuesField("ndv", 5));
+            doc.Add(new SortedDocValuesField("sorted", new BytesRef("value")));
+            writer.AddDocument(doc); // flushed document
+            writer.Commit();
+            writer.AddDocument(doc); // in-memory document
+
+            writer.UpdateNumericDocValue(new Term("key", "doc"), "ndv", 17L);
+            writer.Dispose();
+
+            DirectoryReader reader = DirectoryReader.Open(dir);
+
+            AtomicReader r = SlowCompositeReaderWrapper.Wrap(reader);
+            NumericDocValues ndv = r.GetNumericDocValues("ndv");
+            SortedDocValues sdv = r.GetSortedDocValues("sorted");
+            BytesRef scratch = new BytesRef();
+            for (int i = 0; i < r.MaxDoc; i++)
+            {
+                Assert.AreEqual(17, ndv.Get(i));
+                sdv.Get(i, scratch);
+                Assert.AreEqual(new BytesRef("value"), scratch);
+            }
+
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        private class Lucene46CodecAnonymousInnerClassHelper : Lucene46Codec
+        {
+            private readonly TestNumericDocValuesUpdates OuterInstance;
+
+            public Lucene46CodecAnonymousInnerClassHelper(TestNumericDocValuesUpdates outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            public override DocValuesFormat GetDocValuesFormatForField(string field)
+            {
+                return new Lucene45DocValuesFormat();
+            }
+        }
+
+        [Test]
+        public virtual void TestUpdateSameDocMultipleTimes()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            Document doc = new Document();
+            doc.Add(new StringField("key", "doc", Store.NO));
+            doc.Add(new NumericDocValuesField("ndv", 5));
+            writer.AddDocument(doc); // flushed document
+            writer.Commit();
+            writer.AddDocument(doc); // in-memory document
+
+            writer.UpdateNumericDocValue(new Term("key", "doc"), "ndv", 17L); // update existing field
+            writer.UpdateNumericDocValue(new Term("key", "doc"), "ndv", 3L); // update existing field 2nd time in this commit
+            writer.Dispose();
+
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            AtomicReader r = SlowCompositeReaderWrapper.Wrap(reader);
+            NumericDocValues ndv = r.GetNumericDocValues("ndv");
+            for (int i = 0; i < r.MaxDoc; i++)
+            {
+                Assert.AreEqual(3, ndv.Get(i));
+            }
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestSegmentMerges()
+        {
+            Directory dir = NewDirectory();
+            Random random = Random();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
+            IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)conf.Clone());
+
+            int docid = 0;
+            int numRounds = AtLeast(10);
+            for (int rnd = 0; rnd < numRounds; rnd++)
+            {
+                Document doc = new Document();
+                doc.Add(new StringField("key", "doc", Store.NO));
+                doc.Add(new NumericDocValuesField("ndv", -1));
+                int numDocs = AtLeast(30);
+                for (int i = 0; i < numDocs; i++)
+                {
+                    doc.RemoveField("id");
+                    doc.Add(new StringField("id", Convert.ToString(docid++), Store.NO));
+                    writer.AddDocument(doc);
+                }
+
+                long value = rnd + 1;
+                writer.UpdateNumericDocValue(new Term("key", "doc"), "ndv", value);
+
+                if (random.NextDouble() < 0.2) // randomly delete some docs
+                {
+                    writer.DeleteDocuments(new Term("id", Convert.ToString(random.Next(docid))));
+                }
+
+                // randomly commit or reopen-IW (or nothing), before forceMerge
+                if (random.NextDouble() < 0.4)
+                {
+                    writer.Commit();
+                }
+                else if (random.NextDouble() < 0.1)
+                {
+                    writer.Dispose();
+                    writer = new IndexWriter(dir, (IndexWriterConfig)conf.Clone());
+                }
+
+                // add another document with the current value, to be sure forceMerge has
+                // something to merge (for instance, it could be that CMS finished merging
+                // all segments down to 1 before the delete was applied, so when
+                // forceMerge is called, the index will be with one segment and deletes
+                // and some MPs might now merge it, thereby invalidating test's
+                // assumption that the reader has no deletes).
+                doc = new Document();
+                doc.Add(new StringField("id", Convert.ToString(docid++), Store.NO));
+                doc.Add(new StringField("key", "doc", Store.NO));
+                doc.Add(new NumericDocValuesField("ndv", value));
+                writer.AddDocument(doc);
+
+                writer.ForceMerge(1, true);
+                DirectoryReader reader;
+                if (random.NextBoolean())
+                {
+                    writer.Commit();
+                    reader = DirectoryReader.Open(dir);
+                }
+                else
+                {
+                    reader = DirectoryReader.Open(writer, true);
+                }
+
+                Assert.AreEqual(1, reader.Leaves.Count);
+                AtomicReader r = (AtomicReader)reader.Leaves[0].Reader;
+                Assert.IsNull(r.LiveDocs, "index should have no deletes after forceMerge");
+                NumericDocValues ndv = r.GetNumericDocValues("ndv");
+                Assert.IsNotNull(ndv);
+                for (int i = 0; i < r.MaxDoc; i++)
+                {
+                    Assert.AreEqual(value, ndv.Get(i));
+                }
+                reader.Dispose();
+            }
+
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestUpdateDocumentByMultipleTerms()
+        {
+            // make sure the order of updates is respected, even when multiple terms affect same document
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            Document doc = new Document();
+            doc.Add(new StringField("k1", "v1", Store.NO));
+            doc.Add(new StringField("k2", "v2", Store.NO));
+            doc.Add(new NumericDocValuesField("ndv", 5));
+            writer.AddDocument(doc); // flushed document
+            writer.Commit();
+            writer.AddDocument(doc); // in-memory document
+
+            writer.UpdateNumericDocValue(new Term("k1", "v1"), "ndv", 17L);
+            writer.UpdateNumericDocValue(new Term("k2", "v2"), "ndv", 3L);
+            writer.Dispose();
+
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            AtomicReader r = SlowCompositeReaderWrapper.Wrap(reader);
+            NumericDocValues ndv = r.GetNumericDocValues("ndv");
+            for (int i = 0; i < r.MaxDoc; i++)
+            {
+                Assert.AreEqual(3, ndv.Get(i));
+            }
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestManyReopensAndFields()
+        {
+            Directory dir = NewDirectory();
+            Random random = Random();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
+            LogMergePolicy lmp = NewLogMergePolicy();
+            lmp.MergeFactor = 3; // merge often
+            conf.SetMergePolicy(lmp);
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            bool isNRT = random.NextBoolean();
+            DirectoryReader reader;
+            if (isNRT)
+            {
+                reader = DirectoryReader.Open(writer, true);
+            }
+            else
+            {
+                writer.Commit();
+                reader = DirectoryReader.Open(dir);
+            }
+
+            int numFields = random.Next(4) + 3; // 3-7
+            long[] fieldValues = new long[numFields];
+            bool[] fieldHasValue = new bool[numFields];
+            Arrays.Fill(fieldHasValue, true);
+            for (int i = 0; i < fieldValues.Length; i++)
+            {
+                fieldValues[i] = 1;
+            }
+
+            int numRounds = AtLeast(15);
+            int docID = 0;
+            for (int i = 0; i < numRounds; i++)
+            {
+                int numDocs = AtLeast(5);
+                //      System.out.println("[" + Thread.currentThread().getName() + "]: round=" + i + ", numDocs=" + numDocs);
+                for (int j = 0; j < numDocs; j++)
+                {
+                    Document doc = new Document();
+                    doc.Add(new StringField("id", "doc-" + docID, Store.NO));
+                    doc.Add(new StringField("key", "all", Store.NO)); // update key
+                    // add all fields with their current value
+                    for (int f = 0; f < fieldValues.Length; f++)
+                    {
+                        doc.Add(new NumericDocValuesField("f" + f, fieldValues[f]));
+                    }
+                    writer.AddDocument(doc);
+                    ++docID;
+                }
+
+                // if field's value was unset before, unset it from all new added documents too
+                for (int field = 0; field < fieldHasValue.Length; field++)
+                {
+                    if (!fieldHasValue[field])
+                    {
+                        writer.UpdateNumericDocValue(new Term("key", "all"), "f" + field, null);
+                    }
+                }
+
+                int fieldIdx = random.Next(fieldValues.Length);
+                string updateField = "f" + fieldIdx;
+                if (random.NextBoolean())
+                {
+                    //        System.out.println("[" + Thread.currentThread().getName() + "]: unset field '" + updateField + "'");
+                    fieldHasValue[fieldIdx] = false;
+                    writer.UpdateNumericDocValue(new Term("key", "all"), updateField, null);
+                }
+                else
+                {
+                    fieldHasValue[fieldIdx] = true;
+                    writer.UpdateNumericDocValue(new Term("key", "all"), updateField, ++fieldValues[fieldIdx]);
+                    //        System.out.println("[" + Thread.currentThread().getName() + "]: updated field '" + updateField + "' to value " + fieldValues[fieldIdx]);
+                }
+
+                if (random.NextDouble() < 0.2)
+                {
+                    int deleteDoc = random.Next(docID); // might also delete an already deleted document, ok!
+                    writer.DeleteDocuments(new Term("id", "doc-" + deleteDoc));
+                    //        System.out.println("[" + Thread.currentThread().getName() + "]: deleted document: doc-" + deleteDoc);
+                }
+
+                // verify reader
+                if (!isNRT)
+                {
+                    writer.Commit();
+                }
+
+                //      System.out.println("[" + Thread.currentThread().getName() + "]: reopen reader: " + reader);
+                DirectoryReader newReader = DirectoryReader.OpenIfChanged(reader);
+                Assert.IsNotNull(newReader);
+                reader.Dispose();
+                reader = newReader;
+                //      System.out.println("[" + Thread.currentThread().getName() + "]: reopened reader: " + reader);
+                Assert.IsTrue(reader.NumDocs > 0); // we delete at most one document per round
+                foreach (AtomicReaderContext context in reader.Leaves)
+                {
+                    AtomicReader r = context.AtomicReader;
+                    //        System.out.println(((SegmentReader) r).getSegmentName());
+                    IBits liveDocs = r.LiveDocs;
+                    for (int field = 0; field < fieldValues.Length; field++)
+                    {
+                        string f = "f" + field;
+                        NumericDocValues ndv = r.GetNumericDocValues(f);
+                        IBits docsWithField = r.GetDocsWithField(f);
+                        Assert.IsNotNull(ndv);
+                        int maxDoc = r.MaxDoc;
+                        for (int doc = 0; doc < maxDoc; doc++)
+                        {
+                            if (liveDocs == null || liveDocs.Get(doc))
+                            {
+                                //              System.out.println("doc=" + (doc + context.DocBase) + " f='" + f + "' vslue=" + ndv.Get(doc));
+                                if (fieldHasValue[field])
+                                {
+                                    Assert.IsTrue(docsWithField.Get(doc));
+                                    Assert.AreEqual(fieldValues[field], ndv.Get(doc), "invalid value for doc=" + doc + ", field=" + f + ", reader=" + r);
+                                }
+                                else
+                                {
+                                    Assert.IsFalse(docsWithField.Get(doc));
+                                }
+                            }
+                        }
+                    }
+                }
+                //      System.out.println();
+            }
+
+            IOUtils.Close(writer, reader, dir);
+        }
+
+        [Test]
+        public virtual void TestUpdateSegmentWithNoDocValues()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            // prevent merges, otherwise by the time updates are applied
+            // (writer.Dispose()), the segments might have merged and that update becomes
+            // legit.
+            conf.SetMergePolicy(NoMergePolicy.COMPOUND_FILES);
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            // first segment with NDV
+            Document doc = new Document();
+            doc.Add(new StringField("id", "doc0", Store.NO));
+            doc.Add(new NumericDocValuesField("ndv", 3));
+            writer.AddDocument(doc);
+            doc = new Document();
+            doc.Add(new StringField("id", "doc4", Store.NO)); // document without 'ndv' field
+            writer.AddDocument(doc);
+            writer.Commit();
+
+            // second segment with no NDV
+            doc = new Document();
+            doc.Add(new StringField("id", "doc1", Store.NO));
+            writer.AddDocument(doc);
+            doc = new Document();
+            doc.Add(new StringField("id", "doc2", Store.NO)); // document that isn't updated
+            writer.AddDocument(doc);
+            writer.Commit();
+
+            // update document in the first segment - should not affect docsWithField of
+            // the document without NDV field
+            writer.UpdateNumericDocValue(new Term("id", "doc0"), "ndv", 5L);
+
+            // update document in the second segment - field should be added and we should
+            // be able to handle the other document correctly (e.g. no NPE)
+            writer.UpdateNumericDocValue(new Term("id", "doc1"), "ndv", 5L);
+            writer.Dispose();
+
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            foreach (AtomicReaderContext context in reader.Leaves)
+            {
+                AtomicReader r = context.AtomicReader;
+                NumericDocValues ndv = r.GetNumericDocValues("ndv");
+                IBits docsWithField = r.GetDocsWithField("ndv");
+                Assert.IsNotNull(docsWithField);
+                Assert.IsTrue(docsWithField.Get(0));
+                Assert.AreEqual(5L, ndv.Get(0));
+                Assert.IsFalse(docsWithField.Get(1));
+                Assert.AreEqual(0L, ndv.Get(1));
+            }
+            reader.Dispose();
+
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestUpdateSegmentWithPostingButNoDocValues()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            // prevent merges, otherwise by the time updates are applied
+            // (writer.Dispose()), the segments might have merged and that update becomes
+            // legit.
+            conf.SetMergePolicy(NoMergePolicy.COMPOUND_FILES);
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            // first segment with NDV
+            Document doc = new Document();
+            doc.Add(new StringField("id", "doc0", Store.NO));
+            doc.Add(new StringField("ndv", "mock-value", Store.NO));
+            doc.Add(new NumericDocValuesField("ndv", 5));
+            writer.AddDocument(doc);
+            writer.Commit();
+
+            // second segment with no NDV
+            doc = new Document();
+            doc.Add(new StringField("id", "doc1", Store.NO));
+            doc.Add(new StringField("ndv", "mock-value", Store.NO));
+            writer.AddDocument(doc);
+            writer.Commit();
+
+            // update document in the second segment
+            writer.UpdateNumericDocValue(new Term("id", "doc1"), "ndv", 5L);
+            writer.Dispose();
+
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            foreach (AtomicReaderContext context in reader.Leaves)
+            {
+                AtomicReader r = context.AtomicReader;
+                NumericDocValues ndv = r.GetNumericDocValues("ndv");
+                for (int i = 0; i < r.MaxDoc; i++)
+                {
+                    Assert.AreEqual(5L, ndv.Get(i));
+                }
+            }
+            reader.Dispose();
+
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestUpdateNumericDVFieldWithSameNameAsPostingField()
+        {
+            // this used to fail because FieldInfos.Builder neglected to update
+            // globalFieldMaps.docValueTypes map
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            Document doc = new Document();
+            doc.Add(new StringField("f", "mock-value", Store.NO));
+            doc.Add(new NumericDocValuesField("f", 5));
+            writer.AddDocument(doc);
+            writer.Commit();
+            writer.UpdateNumericDocValue(new Term("f", "mock-value"), "f", 17L);
+            writer.Dispose();
+
+            DirectoryReader r = DirectoryReader.Open(dir);
+            NumericDocValues ndv = ((AtomicReader)r.Leaves[0].Reader).GetNumericDocValues("f");
+            Assert.AreEqual(17, ndv.Get(0));
+            r.Dispose();
+
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestUpdateOldSegments()
+        {
+            OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true;
+
+            Codec[] oldCodecs = new Codec[] { new Lucene40RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE), new Lucene41RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE), new Lucene42RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE), new Lucene45RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE) };
+            Directory dir = NewDirectory();
+
+            bool oldValue = OLD_FORMAT_IMPERSONATION_IS_ACTIVE;
+            // create a segment with an old Codec
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            conf.SetCodec(oldCodecs[Random().Next(oldCodecs.Length)]);
+            IndexWriter writer = new IndexWriter(dir, conf);
+            Document doc = new Document();
+            doc.Add(new StringField("id", "doc", Store.NO));
+            doc.Add(new NumericDocValuesField("f", 5));
+            writer.AddDocument(doc);
+            writer.Dispose();
+
+            conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            writer = new IndexWriter(dir, conf);
+            writer.UpdateNumericDocValue(new Term("id", "doc"), "f", 4L);
+            OLD_FORMAT_IMPERSONATION_IS_ACTIVE = false;
+            try
+            {
+                writer.Dispose();
+                Assert.Fail("should not have succeeded to update a segment written with an old Codec");
+            }
+#pragma warning disable 168
+            catch (System.NotSupportedException e)
+#pragma warning restore 168
+            {
+                writer.Rollback();
+            }
+            finally
+            {
+                OLD_FORMAT_IMPERSONATION_IS_ACTIVE = oldValue;
+            }
+
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestStressMultiThreading()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            // create index
+            int numThreads = TestUtil.NextInt(Random(), 3, 6);
+            int numDocs = AtLeast(2000);
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document doc = new Document();
+                doc.Add(new StringField("id", "doc" + i, Store.NO));
+                double group = Random().NextDouble();
+                string g;
+                if (group < 0.1)
+                {
+                    g = "g0";
+                }
+                else if (group < 0.5)
+                {
+                    g = "g1";
+                }
+                else if (group < 0.8)
+                {
+                    g = "g2";
+                }
+                else
+                {
+                    g = "g3";
+                }
+                doc.Add(new StringField("updKey", g, Store.NO));
+                for (int j = 0; j < numThreads; j++)
+                {
+                    long value = Random().Next();
+                    doc.Add(new NumericDocValuesField("f" + j, value));
+                    doc.Add(new NumericDocValuesField("cf" + j, value * 2)); // control, always updated to f * 2
+                }
+                writer.AddDocument(doc);
+            }
+
+            CountdownEvent done = new CountdownEvent(numThreads);
+            AtomicInt32 numUpdates = new AtomicInt32(AtLeast(100));
+
+            // same thread updates a field as well as reopens
+            ThreadClass[] threads = new ThreadClass[numThreads];
+            for (int i = 0; i < threads.Length; i++)
+            {
+                string f = "f" + i;
+                string cf = "cf" + i;
+                threads[i] = new ThreadAnonymousInnerClassHelper(this, "UpdateThread-" + i, writer, numDocs, done, numUpdates, f, cf);
+            }
+
+            foreach (ThreadClass t in threads)
+            {
+                t.Start();
+            }
+            done.Wait();
+            writer.Dispose();
+
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            foreach (AtomicReaderContext context in reader.Leaves)
+            {
+                AtomicReader r = context.AtomicReader;
+                for (int i = 0; i < numThreads; i++)
+                {
+                    NumericDocValues ndv = r.GetNumericDocValues("f" + i);
+                    NumericDocValues control = r.GetNumericDocValues("cf" + i);
+                    IBits docsWithNdv = r.GetDocsWithField("f" + i);
+                    IBits docsWithControl = r.GetDocsWithField("cf" + i);
+                    IBits liveDocs = r.LiveDocs;
+                    for (int j = 0; j < r.MaxDoc; j++)
+                    {
+                        if (liveDocs == null || liveDocs.Get(j))
+                        {
+                            Assert.AreEqual(docsWithNdv.Get(j), docsWithControl.Get(j));
+                            if (docsWithNdv.Get(j))
+                            {
+                                Assert.AreEqual(control.Get(j), ndv.Get(j) * 2);
+                            }
+                        }
+                    }
+                }
+            }
+            reader.Dispose();
+
+            dir.Dispose();
+        }
+
+        private class ThreadAnonymousInnerClassHelper : ThreadClass
+        {
+            private readonly TestNumericDocValuesUpdates OuterInstance;
+
+            private IndexWriter Writer;
+            private int NumDocs;
+            private CountdownEvent Done;
+            private AtomicInt32 NumUpdates;
+            private string f;
+            private string Cf;
+
+            public ThreadAnonymousInnerClassHelper(TestNumericDocValuesUpdates outerInstance, string str, IndexWriter writer, int numDocs, CountdownEvent done, AtomicInt32 numUpdates, string f, string cf)
+                : base(str)
+            {
+                this.OuterInstance = outerInstance;
+                this.Writer = writer;
+                this.NumDocs = numDocs;
+                this.Done = done;
+                this.NumUpdates = numUpdates;
+                this.f = f;
+                this.Cf = cf;
+            }
+
+            public override void Run()
+            {
+                DirectoryReader reader = null;
+                bool success = false;
+                try
+                {
+                    Random random = Random();
+                    while (NumUpdates.GetAndDecrement() > 0)
+                    {
+                        double group = random.NextDouble();
+                        Term t;
+                        if (group < 0.1)
+                        {
+                            t = new Term("updKey", "g0");
+                        }
+                        else if (group < 0.5)
+                        {
+                            t = new Term("updKey", "g1");
+                        }
+                        else if (group < 0.8)
+                        {
+                            t = new Term("updKey", "g2");
+                        }
+                        else
+                        {
+                            t = new Term("updKey", "g3");
+                        }
+                        //              System.out.println("[" + Thread.currentThread().getName() + "] numUpdates=" + numUpdates + " updateTerm=" + t);
+                        if (random.NextBoolean()) // sometimes unset a value
+                        {
+                            Writer.UpdateNumericDocValue(t, f, null);
+                            Writer.UpdateNumericDocValue(t, Cf, null);
+                        }
+                        else
+                        {
+                            long updValue = random.Next();
+                            Writer.UpdateNumericDocValue(t, f, updValue);
+                            Writer.UpdateNumericDocValue(t, Cf, updValue * 2);
+                        }
+
+                        if (random.NextDouble() < 0.2)
+                        {
+                            // delete a random document
+                            int doc = random.Next(NumDocs);
+                            //                System.out.println("[" + Thread.currentThread().getName() + "] deleteDoc=doc" + doc);
+                            Writer.DeleteDocuments(new Term("id", "doc" + doc));
+                        }
+
+                        if (random.NextDouble() < 0.05) // commit every 20 updates on average
+                        {
+                            //                  System.out.println("[" + Thread.currentThread().getName() + "] commit");
+                            Writer.Commit();
+                        }
+
+                        if (random.NextDouble() < 0.1) // reopen NRT reader (apply updates), on average once every 10 updates
+                        {
+                            if (reader == null)
+                            {
+                                //                  System.out.println("[" + Thread.currentThread().getName() + "] open NRT");
+                                reader = DirectoryReader.Open(Writer, true);
+                            }
+                            else
+                            {
+                                //                  System.out.println("[" + Thread.currentThread().getName() + "] reopen NRT");
+                                DirectoryReader r2 = DirectoryReader.OpenIfChanged(reader, Writer, true);
+                                if (r2 != null)
+                                {
+                                    reader.Dispose();
+                                    reader = r2;
+                                }
+                            }
+                        }
+                    }
+                    //            System.out.println("[" + Thread.currentThread().getName() + "] DONE");
+                    success = true;
+                }
+                catch (IOException e)
+                {
+                    throw new Exception(e.Message, e);
+                }
+                finally
+                {
+                    if (reader != null)
+                    {
+                        try
+                        {
+                            reader.Dispose();
+                        }
+                        catch (IOException e)
+                        {
+                            if (success) // suppress this exception only if there was another exception
+                            {
+                                throw new Exception(e.Message, e);
+                            }
+                        }
+                    }
+                    Done.Signal();
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestUpdateDifferentDocsInDifferentGens()
+        {
+            // update same document multiple times across generations
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            conf.SetMaxBufferedDocs(4);
+            IndexWriter writer = new IndexWriter(dir, conf);
+            int numDocs = AtLeast(10);
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document doc = new Document();
+                doc.Add(new StringField("id", "doc" + i, Store.NO));
+                long value = Random().Next();
+                doc.Add(new NumericDocValuesField("f", value));
+                doc.Add(new NumericDocValuesField("cf", value * 2));
+                writer.AddDocument(doc);
+            }
+
+            int numGens = AtLeast(5);
+            for (int i = 0; i < numGens; i++)
+            {
+                int doc = Random().Next(numDocs);
+                Term t = new Term("id", "doc" + doc);
+                long value = Random().NextLong();
+                writer.UpdateNumericDocValue(t, "f", value);
+                writer.UpdateNumericDocValue(t, "cf", value * 2);
+                DirectoryReader reader = DirectoryReader.Open(writer, true);
+                foreach (AtomicReaderContext context in reader.Leaves)
+                {
+                    AtomicReader r = context.AtomicReader;
+                    NumericDocValues fndv = r.GetNumericDocValues("f");
+                    NumericDocValues cfndv = r.GetNumericDocValues("cf");
+                    for (int j = 0; j < r.MaxDoc; j++)
+                    {
+                        Assert.AreEqual(cfndv.Get(j), fndv.Get(j) * 2);
+                    }
+                }
+                reader.Dispose();
+            }
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestChangeCodec()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            conf.SetMergePolicy(NoMergePolicy.COMPOUND_FILES); // disable merges to simplify test assertions.
+            conf.SetCodec(new Lucene46CodecAnonymousInnerClassHelper2(this));
+            IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)conf.Clone());
+            Document doc = new Document();
+            doc.Add(new StringField("id", "d0", Store.NO));
+            doc.Add(new NumericDocValuesField("f1", 5L));
+            doc.Add(new NumericDocValuesField("f2", 13L));
+            writer.AddDocument(doc);
+            writer.Dispose();
+
+            // change format
+            conf.SetCodec(new Lucene46CodecAnonymousInnerClassHelper3(this));
+            writer = new IndexWriter(dir, (IndexWriterConfig)conf.Clone());
+            doc = new Document();
+            doc.Add(new StringField("id", "d1", Store.NO));
+            doc.Add(new NumericDocValuesField("f1", 17L));
+            doc.Add(new NumericDocValuesField("f2", 2L));
+            writer.AddDocument(doc);
+            writer.UpdateNumericDocValue(new Term("id", "d0"), "f1", 12L);
+            writer.Dispose();
+
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            AtomicReader r = SlowCompositeReaderWrapper.Wrap(reader);
+            NumericDocValues f1 = r.GetNumericDocValues("f1");
+            NumericDocValues f2 = r.GetNumericDocValues("f2");
+            Assert.AreEqual(12L, f1.Get(0));
+            Assert.AreEqual(13L, f2.Get(0));
+            Assert.AreEqual(17L, f1.Get(1));
+            Assert.AreEqual(2L, f2.Get(1));
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        private class Lucene46CodecAnonymousInnerClassHelper2 : Lucene46Codec
+        {
+            private readonly TestNumericDocValuesUpdates OuterInstance;
+
+            public Lucene46CodecAnonymousInnerClassHelper2(TestNumericDocValuesUpdates outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            public override DocValuesFormat GetDocValuesFormatForField(string field)
+            {
+                return new Lucene45DocValuesFormat();
+            }
+        }
+
+        private class Lucene46CodecAnonymousInnerClassHelper3 : Lucene46Codec
+        {
+            private readonly TestNumericDocValuesUpdates OuterInstance;
+
+            public Lucene46CodecAnonymousInnerClassHelper3(TestNumericDocValuesUpdates outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            public override DocValuesFormat GetDocValuesFormatForField(string field)
+            {
+                return new AssertingDocValuesFormat();
+            }
+        }
+
+        [Test]
+        public virtual void TestAddIndexes()
+        {
+            Directory dir1 = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir1, conf);
+
+            int numDocs = AtLeast(50);
+            int numTerms = TestUtil.NextInt(Random(), 1, numDocs / 5);
+            HashSet<string> randomTerms = new HashSet<string>();
+            while (randomTerms.Count < numTerms)
+            {
+                randomTerms.Add(TestUtil.RandomSimpleString(Random()));
+            }
+
+            // create first index
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document doc = new Document();
+                doc.Add(new StringField("id", RandomInts.RandomFrom(Random(), randomTerms), Store.NO));
+                doc.Add(new NumericDocValuesField("ndv", 4L));
+                doc.Add(new NumericDocValuesField("control", 8L));
+                writer.AddDocument(doc);
+            }
+
+            if (Random().NextBoolean())
+            {
+                writer.Commit();
+            }
+
+            // update some docs to a random value
+            long value = Random().Next();
+            Term term = new Term("id", RandomInts.RandomFrom(Random(), randomTerms));
+            writer.UpdateNumericDocValue(term, "ndv", value);
+            writer.UpdateNumericDocValue(term, "control", value * 2);
+            writer.Dispose();
+
+            Directory dir2 = NewDirectory();
+            conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            writer = new IndexWriter(dir2, conf);
+            if (Random().NextBoolean())
+            {
+                writer.AddIndexes(dir1);
+            }
+            else
+            {
+                DirectoryReader reader = DirectoryReader.Open(dir1);
+                writer.AddIndexes(reader);
+                reader.Dispose();
+            }
+            writer.Dispose();
+
+            DirectoryReader reader_ = DirectoryReader.Open(dir2);
+            foreach (AtomicReaderContext context in reader_.Leaves)
+            {
+                AtomicReader r = context.AtomicReader;
+                NumericDocValues ndv = r.GetNumericDocValues("ndv");
+                NumericDocValues control = r.GetNumericDocValues("control");
+                for (int i = 0; i < r.MaxDoc; i++)
+                {
+                    Assert.AreEqual(ndv.Get(i) * 2, control.Get(i));
+                }
+            }
+            reader_.Dispose();
+
+            IOUtils.Close(dir1, dir2);
+        }
+
+        [Test]
+        public virtual void TestDeleteUnusedUpdatesFiles()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            Document doc = new Document();
+            doc.Add(new StringField("id", "d0", Store.NO));
+            doc.Add(new NumericDocValuesField("f", 1L));
+            writer.AddDocument(doc);
+
+            // create first gen of update files
+            writer.UpdateNumericDocValue(new Term("id", "d0"), "f", 2L);
+            writer.Commit();
+            int numFiles = dir.ListAll().Length;
+
+            DirectoryReader r = DirectoryReader.Open(dir);
+            Assert.AreEqual(2L, ((AtomicReader)r.Leaves[0].Reader).GetNumericDocValues("f").Get(0));
+            r.Dispose();
+
+            // create second gen of update files, first gen should be deleted
+            writer.UpdateNumericDocValue(new Term("id", "d0"), "f", 5L);
+            writer.Commit();
+            Assert.AreEqual(numFiles, dir.ListAll().Length);
+
+            r = DirectoryReader.Open(dir);
+            Assert.AreEqual(5L, ((AtomicReader)r.Leaves[0].Reader).GetNumericDocValues("f").Get(0));
+            r.Dispose();
+
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(120000)]
+#endif
+        [Test, HasTimeout]
+        public virtual void TestTonsOfUpdates()
+        {
+            // LUCENE-5248: make sure that when there are many updates, we don't use too much RAM
+            Directory dir = NewDirectory();
+            Random random = Random();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
+            conf.SetRAMBufferSizeMB(IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB);
+            conf.SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH); // don't flush by doc
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            // test data: lots of documents (few 10Ks) and lots of update terms (few hundreds)
+            int numDocs = AtLeast(20000);
+            int numNumericFields = AtLeast(5);
+            int numTerms = TestUtil.NextInt(random, 10, 100); // terms should affect many docs
+            HashSet<string> updateTerms = new HashSet<string>();
+            while (updateTerms.Count < numTerms)
+            {
+                updateTerms.Add(TestUtil.RandomSimpleString(random));
+            }
+
+            //    System.out.println("numDocs=" + numDocs + " numNumericFields=" + numNumericFields + " numTerms=" + numTerms);
+
+            // build a large index with many NDV fields and update terms
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document doc = new Document();
+                int numUpdateTerms = TestUtil.NextInt(random, 1, numTerms / 10);
+                for (int j = 0; j < numUpdateTerms; j++)
+                {
+                    doc.Add(new StringField("upd", RandomInts.RandomFrom(random, updateTerms), Store.NO));
+                }
+                for (int j = 0; j < numNumericFields; j++)
+                {
+                    long val = random.Next();
+                    doc.Add(new NumericDocValuesField("f" + j, val));
+                    doc.Add(new NumericDocValuesField("cf" + j, val * 2));
+                }
+                writer.AddDocument(doc);
+            }
+
+            writer.Commit(); // commit so there's something to apply to
+
+            // set to flush every 2048 bytes (approximately every 12 updates), so we get
+            // many flushes during numeric updates
+            writer.Config.SetRAMBufferSizeMB(2048.0 / 1024 / 1024);
+            int numUpdates = AtLeast(100);
+            //    System.out.println("numUpdates=" + numUpdates);
+            for (int i = 0; i < numUpdates; i++)
+            {
+                int field = random.Next(numNumericFields);
+                Term updateTerm = new Term("upd", RandomInts.RandomFrom(random, updateTerms));
+                long value = random.Next();
+                writer.UpdateNumericDocValue(updateTerm, "f" + field, value);
+                writer.UpdateNumericDocValue(updateTerm, "cf" + field, value * 2);
+            }
+
+            writer.Dispose();
+
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            foreach (AtomicReaderContext context in reader.Leaves)
+            {
+                for (int i = 0; i < numNumericFields; i++)
+                {
+                    AtomicReader r = context.AtomicReader;
+                    NumericDocValues f = r.GetNumericDocValues("f" + i);
+                    NumericDocValues cf = r.GetNumericDocValues("cf" + i);
+                    for (int j = 0; j < r.MaxDoc; j++)
+                    {
+                        Assert.AreEqual(cf.Get(j), f.Get(j) * 2, "reader=" + r + ", field=f" + i + ", doc=" + j);
+                    }
+                }
+            }
+            reader.Dispose();
+
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestUpdatesOrder()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            Document doc = new Document();
+            doc.Add(new StringField("upd", "t1", Store.NO));
+            doc.Add(new StringField("upd", "t2", Store.NO));
+            doc.Add(new NumericDocValuesField("f1", 1L));
+            doc.Add(new NumericDocValuesField("f2", 1L));
+            writer.AddDocument(doc);
+            writer.UpdateNumericDocValue(new Term("upd", "t1"), "f1", 2L); // update f1 to 2
+            writer.UpdateNumericDocValue(new Term("upd", "t1"), "f2", 2L); // update f2 to 2
+            writer.UpdateNumericDocValue(new Term("upd", "t2"), "f1", 3L); // update f1 to 3
+            writer.UpdateNumericDocValue(new Term("upd", "t2"), "f2", 3L); // update f2 to 3
+            writer.UpdateNumericDocValue(new Term("upd", "t1"), "f1", 4L); // update f1 to 4 (but not f2)
+            writer.Dispose();
+
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(4, ((AtomicReader)reader.Leaves[0].Reader).GetNumericDocValues("f1").Get(0));
+            Assert.AreEqual(3, ((AtomicReader)reader.Leaves[0].Reader).GetNumericDocValues("f2").Get(0));
+            reader.Dispose();
+
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestUpdateAllDeletedSegment()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            Document doc = new Document();
+            doc.Add(new StringField("id", "doc", Store.NO));
+            doc.Add(new NumericDocValuesField("f1", 1L));
+            writer.AddDocument(doc);
+            writer.AddDocument(doc);
+            writer.Commit();
+            writer.DeleteDocuments(new Term("id", "doc")); // delete all docs in the first segment
+            writer.AddDocument(doc);
+            writer.UpdateNumericDocValue(new Term("id", "doc"), "f1", 2L);
+            writer.Dispose();
+
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(1, reader.Leaves.Count);
+            Assert.AreEqual(2L, ((AtomicReader)reader.Leaves[0].Reader).GetNumericDocValues("f1").Get(0));
+            reader.Dispose();
+
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestUpdateTwoNonexistingTerms()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            Document doc = new Document();
+            doc.Add(new StringField("id", "doc", Store.NO));
+            doc.Add(new NumericDocValuesField("f1", 1L));
+            writer.AddDocument(doc);
+            // update w/ multiple nonexisting terms in same field
+            writer.UpdateNumericDocValue(new Term("c", "foo"), "f1", 2L);
+            writer.UpdateNumericDocValue(new Term("c", "bar"), "f1", 2L);
+            writer.Dispose();
+
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(1, reader.Leaves.Count);
+            Assert.AreEqual(1L, ((AtomicReader)reader.Leaves[0].Reader).GetNumericDocValues("f1").Get(0));
+            reader.Dispose();
+
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestOmitNorms.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestOmitNorms.cs b/src/Lucene.Net.Tests/Index/TestOmitNorms.cs
new file mode 100644
index 0000000..fe0697a
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestOmitNorms.cs
@@ -0,0 +1,331 @@
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Randomized.Generators;
+    using NUnit.Framework;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Analyzer = Lucene.Net.Analysis.Analyzer;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldType = FieldType;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using TextField = TextField;
+
+    [TestFixture]
+    public class TestOmitNorms : LuceneTestCase
+    {
+        // Tests whether the DocumentWriter correctly enable the
+        // omitNorms bit in the FieldInfo
+        [Test]
+        public virtual void TestOmitNorms_Mem()
+        {
+            Directory ram = NewDirectory();
+            Analyzer analyzer = new MockAnalyzer(Random());
+            IndexWriter writer = new IndexWriter(ram, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
+            Document d = new Document();
+
+            // this field will have norms
+            Field f1 = NewTextField("f1", "this field has norms", Field.Store.NO);
+            d.Add(f1);
+
+            // this field will NOT have norms
+            FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
+            customType.OmitNorms = true;
+            Field f2 = NewField("f2", "this field has NO norms in all docs", customType);
+            d.Add(f2);
+
+            writer.AddDocument(d);
+            writer.ForceMerge(1);
+            // now we add another document which has term freq for field f2 and not for f1 and verify if the SegmentMerger
+            // keep things constant
+            d = new Document();
+
+            // Reverse
+            d.Add(NewField("f1", "this field has norms", customType));
+
+            d.Add(NewTextField("f2", "this field has NO norms in all docs", Field.Store.NO));
+
+            writer.AddDocument(d);
+
+            // force merge
+            writer.ForceMerge(1);
+            // flush
+            writer.Dispose();
+
+            SegmentReader reader = GetOnlySegmentReader(DirectoryReader.Open(ram));
+            FieldInfos fi = reader.FieldInfos;
+            Assert.IsTrue(fi.FieldInfo("f1").OmitsNorms, "OmitNorms field bit should be set.");
+            Assert.IsTrue(fi.FieldInfo("f2").OmitsNorms, "OmitNorms field bit should be set.");
+
+            reader.Dispose();
+            ram.Dispose();
+        }
+
+        // Tests whether merging of docs that have different
+        // omitNorms for the same field works
+        [Test]
+        public virtual void TestMixedMerge()
+        {
+            Directory ram = NewDirectory();
+            Analyzer analyzer = new MockAnalyzer(Random());
+            IndexWriter writer = new IndexWriter(ram, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetMaxBufferedDocs(3).SetMergePolicy(NewLogMergePolicy(2)));
+            Document d = new Document();
+
+            // this field will have norms
+            Field f1 = NewTextField("f1", "this field has norms", Field.Store.NO);
+            d.Add(f1);
+
+            // this field will NOT have norms
+            FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
+            customType.OmitNorms = true;
+            Field f2 = NewField("f2", "this field has NO norms in all docs", customType);
+            d.Add(f2);
+
+            for (int i = 0; i < 30; i++)
+            {
+                writer.AddDocument(d);
+            }
+
+            // now we add another document which has norms for field f2 and not for f1 and verify if the SegmentMerger
+            // keep things constant
+            d = new Document();
+
+            // Reverese
+            d.Add(NewField("f1", "this field has norms", customType));
+
+            d.Add(NewTextField("f2", "this field has NO norms in all docs", Field.Store.NO));
+
+            for (int i = 0; i < 30; i++)
+            {
+                writer.AddDocument(d);
+            }
+
+            // force merge
+            writer.ForceMerge(1);
+            // flush
+            writer.Dispose();
+
+            SegmentReader reader = GetOnlySegmentReader(DirectoryReader.Open(ram));
+            FieldInfos fi = reader.FieldInfos;
+            Assert.IsTrue(fi.FieldInfo("f1").OmitsNorms, "OmitNorms field bit should be set.");
+            Assert.IsTrue(fi.FieldInfo("f2").OmitsNorms, "OmitNorms field bit should be set.");
+
+            reader.Dispose();
+            ram.Dispose();
+        }
+
+        // Make sure first adding docs that do not omitNorms for
+        // field X, then adding docs that do omitNorms for that same
+        // field,
+        [Test]
+        public virtual void TestMixedRAM()
+        {
+            Directory ram = NewDirectory();
+            Analyzer analyzer = new MockAnalyzer(Random());
+            IndexWriter writer = new IndexWriter(ram, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetMaxBufferedDocs(10).SetMergePolicy(NewLogMergePolicy(2)));
+            Document d = new Document();
+
+            // this field will have norms
+            Field f1 = NewTextField("f1", "this field has norms", Field.Store.NO);
+            d.Add(f1);
+
+            // this field will NOT have norms
+
+            FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
+            customType.OmitNorms = true;
+            Field f2 = NewField("f2", "this field has NO norms in all docs", customType);
+            d.Add(f2);
+
+            for (int i = 0; i < 5; i++)
+            {
+                writer.AddDocument(d);
+            }
+
+            for (int i = 0; i < 20; i++)
+            {
+                writer.AddDocument(d);
+            }
+
+            // force merge
+            writer.ForceMerge(1);
+
+            // flush
+            writer.Dispose();
+
+            SegmentReader reader = GetOnlySegmentReader(DirectoryReader.Open(ram));
+            FieldInfos fi = reader.FieldInfos;
+            Assert.IsTrue(!fi.FieldInfo("f1").OmitsNorms, "OmitNorms field bit should not be set.");
+            Assert.IsTrue(fi.FieldInfo("f2").OmitsNorms, "OmitNorms field bit should be set.");
+
+            reader.Dispose();
+            ram.Dispose();
+        }
+
+        private void AssertNoNrm(Directory dir)
+        {
+            string[] files = dir.ListAll();
+            for (int i = 0; i < files.Length; i++)
+            {
+                // TODO: this relies upon filenames
+                Assert.IsFalse(files[i].EndsWith(".nrm") || files[i].EndsWith(".len"));
+            }
+        }
+
+        // Verifies no *.nrm exists when all fields omit norms:
+        [Test]
+        public virtual void TestNoNrmFile()
+        {
+            Directory ram = NewDirectory();
+            Analyzer analyzer = new MockAnalyzer(Random());
+            IndexWriter writer = new IndexWriter(ram, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetMaxBufferedDocs(3).SetMergePolicy(NewLogMergePolicy()));
+            LogMergePolicy lmp = (LogMergePolicy)writer.Config.MergePolicy;
+            lmp.MergeFactor = 2;
+            lmp.NoCFSRatio = 0.0;
+            Document d = new Document();
+
+            FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
+            customType.OmitNorms = true;
+            Field f1 = NewField("f1", "this field has no norms", customType);
+            d.Add(f1);
+
+            for (int i = 0; i < 30; i++)
+            {
+                writer.AddDocument(d);
+            }
+
+            writer.Commit();
+
+            AssertNoNrm(ram);
+
+            // force merge
+            writer.ForceMerge(1);
+            // flush
+            writer.Dispose();
+
+            AssertNoNrm(ram);
+            ram.Dispose();
+        }
+
+        /// <summary>
+        /// Tests various combinations of omitNorms=true/false, the field not existing at all,
+        /// ensuring that only omitNorms is 'viral'.
+        /// Internally checks that MultiNorms.norms() is consistent (returns the same bytes)
+        /// as the fully merged equivalent.
+        /// </summary>
+        [Test]
+        public virtual void TestOmitNormsCombos()
+        {
+            // indexed with norms
+            FieldType customType = new FieldType(TextField.TYPE_STORED);
+            Field norms = new Field("foo", "a", customType);
+            // indexed without norms
+            FieldType customType1 = new FieldType(TextField.TYPE_STORED);
+            customType1.OmitNorms = true;
+            Field noNorms = new Field("foo", "a", customType1);
+            // not indexed, but stored
+            FieldType customType2 = new FieldType();
+            customType2.IsStored = true;
+            Field noIndex = new Field("foo", "a", customType2);
+            // not indexed but stored, omitNorms is set
+            FieldType customType3 = new FieldType();
+            customType3.IsStored = true;
+            customType3.OmitNorms = true;
+            Field noNormsNoIndex = new Field("foo", "a", customType3);
+            // not indexed nor stored (doesnt exist at all, we index a different field instead)
+            Field emptyNorms = new Field("bar", "a", customType);
+
+            Assert.IsNotNull(GetNorms("foo", norms, norms));
+            Assert.IsNull(GetNorms("foo", norms, noNorms));
+            Assert.IsNotNull(GetNorms("foo", norms, noIndex));
+            Assert.IsNotNull(GetNorms("foo", norms, noNormsNoIndex));
+            Assert.IsNotNull(GetNorms("foo", norms, emptyNorms));
+            Assert.IsNull(GetNorms("foo", noNorms, noNorms));
+            Assert.IsNull(GetNorms("foo", noNorms, noIndex));
+            Assert.IsNull(GetNorms("foo", noNorms, noNormsNoIndex));
+            Assert.IsNull(GetNorms("foo", noNorms, emptyNorms));
+            Assert.IsNull(GetNorms("foo", noIndex, noIndex));
+            Assert.IsNull(GetNorms("foo", noIndex, noNormsNoIndex));
+            Assert.IsNull(GetNorms("foo", noIndex, emptyNorms));
+            Assert.IsNull(GetNorms("foo", noNormsNoIndex, noNormsNoIndex));
+            Assert.IsNull(GetNorms("foo", noNormsNoIndex, emptyNorms));
+            Assert.IsNull(GetNorms("foo", emptyNorms, emptyNorms));
+        }
+
+        /// <summary>
+        /// Indexes at least 1 document with f1, and at least 1 document with f2.
+        /// returns the norms for "field".
+        /// </summary>
+        internal virtual NumericDocValues GetNorms(string field, Field f1, Field f2)
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy());
+            RandomIndexWriter riw = new RandomIndexWriter(Random(), dir, iwc);
+
+            // add f1
+            Document d = new Document();
+            d.Add(f1);
+            riw.AddDocument(d);
+
+            // add f2
+            d = new Document();
+            d.Add(f2);
+            riw.AddDocument(d);
+
+            // add a mix of f1's and f2's
+            int numExtraDocs = TestUtil.NextInt(Random(), 1, 1000);
+            for (int i = 0; i < numExtraDocs; i++)
+            {
+                d = new Document();
+                d.Add(Random().NextBoolean() ? f1 : f2);
+                riw.AddDocument(d);
+            }
+
+            IndexReader ir1 = riw.Reader;
+            // todo: generalize
+            NumericDocValues norms1 = MultiDocValues.GetNormValues(ir1, field);
+
+            // fully merge and validate MultiNorms against single segment.
+            riw.ForceMerge(1);
+            DirectoryReader ir2 = riw.Reader;
+            NumericDocValues norms2 = GetOnlySegmentReader(ir2).GetNormValues(field);
+
+            if (norms1 == null)
+            {
+                Assert.IsNull(norms2);
+            }
+            else
+            {
+                for (int docID = 0; docID < ir1.MaxDoc; docID++)
+                {
+                    Assert.AreEqual(norms1.Get(docID), norms2.Get(docID));
+                }
+            }
+            ir1.Dispose();
+            ir2.Dispose();
+            riw.Dispose();
+            dir.Dispose();
+            return norms1;
+        }
+    }
+}
\ No newline at end of file


[61/72] [abbrv] lucenenet git commit: Lucene.Net.TestFramework: Renamed Codecs\lucene45\ to Codecs\Lucene45\

Posted by ni...@apache.org.
Lucene.Net.TestFramework: Renamed Codecs\lucene45\ to Codecs\Lucene45\


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/7c9f5727
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/7c9f5727
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/7c9f5727

Branch: refs/heads/api-work
Commit: 7c9f5727fbb024c553a6a9c99be65f9dc0f9081b
Parents: 9e2f4c5
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 26 03:20:41 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Feb 27 06:17:58 2017 +0700

----------------------------------------------------------------------
 .../Codecs/Lucene45/Lucene45RWCodec.cs          | 88 ++++++++++++++++++++
 .../Codecs/lucene45/Lucene45RWCodec.cs          | 88 --------------------
 .../Lucene.Net.TestFramework.csproj             |  2 +-
 3 files changed, 89 insertions(+), 89 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/7c9f5727/src/Lucene.Net.TestFramework/Codecs/Lucene45/Lucene45RWCodec.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene45/Lucene45RWCodec.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene45/Lucene45RWCodec.cs
new file mode 100644
index 0000000..c610ca9
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene45/Lucene45RWCodec.cs
@@ -0,0 +1,88 @@
+namespace Lucene.Net.Codecs.Lucene45
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    using Lucene42FieldInfosFormat = Lucene.Net.Codecs.Lucene42.Lucene42FieldInfosFormat;
+    using Lucene42FieldInfosWriter = Lucene.Net.Codecs.Lucene42.Lucene42FieldInfosWriter;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /// <summary>
+    /// Read-write version of <seealso cref="Lucene45Codec"/> for testing.
+    /// </summary>
+#pragma warning disable 612, 618
+    public class Lucene45RWCodec : Lucene45Codec
+    {
+        private readonly FieldInfosFormat fieldInfosFormat;
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Creates the codec with OldFormatImpersonationIsActive = true.
+        /// </summary>
+        /// <remarks>
+        /// Added so that SPIClassIterator can locate this Codec.  The iterator
+        /// only recognises classes that have empty constructors.
+        /// </remarks>
+        public Lucene45RWCodec()
+            : this(true)
+        { }
+
+        /// <param name="oldFormatImpersonationIsActive">
+        /// LUCENENET specific
+        /// Added to remove dependency on then-static <see cref="LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/> 
+        /// </param>
+        public Lucene45RWCodec(bool oldFormatImpersonationIsActive) : base()
+        {
+             fieldInfosFormat = new Lucene42FieldInfosFormatAnonymousInnerClassHelper(oldFormatImpersonationIsActive);
+        }
+
+        private class Lucene42FieldInfosFormatAnonymousInnerClassHelper : Lucene42FieldInfosFormat
+        {
+            private readonly bool _oldFormatImpersonationIsActive;
+
+            /// <param name="oldFormatImpersonationIsActive">
+            /// LUCENENET specific
+            /// Added to remove dependency on then-static <see cref="LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/> 
+            /// </param>
+            public Lucene42FieldInfosFormatAnonymousInnerClassHelper(bool oldFormatImpersonationIsActive) : base()
+            {
+                _oldFormatImpersonationIsActive = oldFormatImpersonationIsActive;
+            }
+
+            public override FieldInfosWriter FieldInfosWriter
+            {
+                get
+                {
+                    if (!_oldFormatImpersonationIsActive)
+                    {
+                        return base.FieldInfosWriter;
+                    }
+                    else
+                    {
+                        return new Lucene42FieldInfosWriter();
+                    }
+                }
+            }
+        }
+
+        public override FieldInfosFormat FieldInfosFormat
+        {
+            get { return fieldInfosFormat; }
+        }
+    }
+#pragma warning restore 612, 618
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/7c9f5727/src/Lucene.Net.TestFramework/Codecs/lucene45/Lucene45RWCodec.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/lucene45/Lucene45RWCodec.cs b/src/Lucene.Net.TestFramework/Codecs/lucene45/Lucene45RWCodec.cs
deleted file mode 100644
index c610ca9..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/lucene45/Lucene45RWCodec.cs
+++ /dev/null
@@ -1,88 +0,0 @@
-namespace Lucene.Net.Codecs.Lucene45
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    using Lucene42FieldInfosFormat = Lucene.Net.Codecs.Lucene42.Lucene42FieldInfosFormat;
-    using Lucene42FieldInfosWriter = Lucene.Net.Codecs.Lucene42.Lucene42FieldInfosWriter;
-    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
-
-    /// <summary>
-    /// Read-write version of <seealso cref="Lucene45Codec"/> for testing.
-    /// </summary>
-#pragma warning disable 612, 618
-    public class Lucene45RWCodec : Lucene45Codec
-    {
-        private readonly FieldInfosFormat fieldInfosFormat;
-
-        /// <summary>
-        /// LUCENENET specific
-        /// Creates the codec with OldFormatImpersonationIsActive = true.
-        /// </summary>
-        /// <remarks>
-        /// Added so that SPIClassIterator can locate this Codec.  The iterator
-        /// only recognises classes that have empty constructors.
-        /// </remarks>
-        public Lucene45RWCodec()
-            : this(true)
-        { }
-
-        /// <param name="oldFormatImpersonationIsActive">
-        /// LUCENENET specific
-        /// Added to remove dependency on then-static <see cref="LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/> 
-        /// </param>
-        public Lucene45RWCodec(bool oldFormatImpersonationIsActive) : base()
-        {
-             fieldInfosFormat = new Lucene42FieldInfosFormatAnonymousInnerClassHelper(oldFormatImpersonationIsActive);
-        }
-
-        private class Lucene42FieldInfosFormatAnonymousInnerClassHelper : Lucene42FieldInfosFormat
-        {
-            private readonly bool _oldFormatImpersonationIsActive;
-
-            /// <param name="oldFormatImpersonationIsActive">
-            /// LUCENENET specific
-            /// Added to remove dependency on then-static <see cref="LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/> 
-            /// </param>
-            public Lucene42FieldInfosFormatAnonymousInnerClassHelper(bool oldFormatImpersonationIsActive) : base()
-            {
-                _oldFormatImpersonationIsActive = oldFormatImpersonationIsActive;
-            }
-
-            public override FieldInfosWriter FieldInfosWriter
-            {
-                get
-                {
-                    if (!_oldFormatImpersonationIsActive)
-                    {
-                        return base.FieldInfosWriter;
-                    }
-                    else
-                    {
-                        return new Lucene42FieldInfosWriter();
-                    }
-                }
-            }
-        }
-
-        public override FieldInfosFormat FieldInfosFormat
-        {
-            get { return fieldInfosFormat; }
-        }
-    }
-#pragma warning restore 612, 618
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/7c9f5727/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj b/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
index c7ac221..2f32d94 100644
--- a/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
+++ b/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
@@ -244,7 +244,7 @@
     <Compile Include="Codecs\Lucene42\Lucene42RWDocValuesFormat.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Codecs\lucene45\Lucene45RWCodec.cs">
+    <Compile Include="Codecs\Lucene45\Lucene45RWCodec.cs">
       <SubType>Code</SubType>
     </Compile>
     <Compile Include="Codecs\MissingOrdRemapper.cs">


[19/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestSegmentTermEnum.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestSegmentTermEnum.cs b/src/Lucene.Net.Tests/Index/TestSegmentTermEnum.cs
new file mode 100644
index 0000000..3ed504b
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestSegmentTermEnum.cs
@@ -0,0 +1,152 @@
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Field = Field;
+    using Lucene41PostingsFormat = Lucene.Net.Codecs.Lucene41.Lucene41PostingsFormat;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    [TestFixture]
+    public class TestSegmentTermEnum : LuceneTestCase
+    {
+        internal Directory Dir;
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Dir = NewDirectory();
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            Dir.Dispose();
+            base.TearDown();
+        }
+
+        [Test]
+        public virtual void TestTermEnum()
+        {
+            IndexWriter writer = null;
+
+            writer = new IndexWriter(Dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+
+            // ADD 100 documents with term : aaa
+            // add 100 documents with terms: aaa bbb
+            // Therefore, term 'aaa' has document frequency of 200 and term 'bbb' 100
+            for (int i = 0; i < 100; i++)
+            {
+                AddDoc(writer, "aaa");
+                AddDoc(writer, "aaa bbb");
+            }
+
+            writer.Dispose();
+
+            // verify document frequency of terms in an multi segment index
+            VerifyDocFreq();
+
+            // merge segments
+            writer = new IndexWriter(Dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND));
+            writer.ForceMerge(1);
+            writer.Dispose();
+
+            // verify document frequency of terms in a single segment index
+            VerifyDocFreq();
+        }
+
+        [Test]
+        public virtual void TestPrevTermAtEnd()
+        {
+            IndexWriter writer = new IndexWriter(Dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetCodec(TestUtil.AlwaysPostingsFormat(new Lucene41PostingsFormat())));
+            AddDoc(writer, "aaa bbb");
+            writer.Dispose();
+            SegmentReader reader = GetOnlySegmentReader(DirectoryReader.Open(Dir));
+            TermsEnum terms = reader.Fields.GetTerms("content").GetIterator(null);
+            Assert.IsNotNull(terms.Next());
+            Assert.AreEqual("aaa", terms.Term.Utf8ToString());
+            Assert.IsNotNull(terms.Next());
+            long ordB;
+            try
+            {
+                ordB = terms.Ord;
+            }
+#pragma warning disable 168
+            catch (System.NotSupportedException uoe)
+#pragma warning restore 168
+            {
+                // ok -- codec is not required to support ord
+                reader.Dispose();
+                return;
+            }
+            Assert.AreEqual("bbb", terms.Term.Utf8ToString());
+            Assert.IsNull(terms.Next());
+
+            terms.SeekExact(ordB);
+            Assert.AreEqual("bbb", terms.Term.Utf8ToString());
+            reader.Dispose();
+        }
+
+        private void VerifyDocFreq()
+        {
+            IndexReader reader = DirectoryReader.Open(Dir);
+            TermsEnum termEnum = MultiFields.GetTerms(reader, "content").GetIterator(null);
+
+            // create enumeration of all terms
+            // go to the first term (aaa)
+            termEnum.Next();
+            // assert that term is 'aaa'
+            Assert.AreEqual("aaa", termEnum.Term.Utf8ToString());
+            Assert.AreEqual(200, termEnum.DocFreq);
+            // go to the second term (bbb)
+            termEnum.Next();
+            // assert that term is 'bbb'
+            Assert.AreEqual("bbb", termEnum.Term.Utf8ToString());
+            Assert.AreEqual(100, termEnum.DocFreq);
+
+            // create enumeration of terms after term 'aaa',
+            // including 'aaa'
+            termEnum.SeekCeil(new BytesRef("aaa"));
+            // assert that term is 'aaa'
+            Assert.AreEqual("aaa", termEnum.Term.Utf8ToString());
+            Assert.AreEqual(200, termEnum.DocFreq);
+            // go to term 'bbb'
+            termEnum.Next();
+            // assert that term is 'bbb'
+            Assert.AreEqual("bbb", termEnum.Term.Utf8ToString());
+            Assert.AreEqual(100, termEnum.DocFreq);
+            reader.Dispose();
+        }
+
+        private void AddDoc(IndexWriter writer, string value)
+        {
+            Document doc = new Document();
+            doc.Add(NewTextField("content", value, Field.Store.NO));
+            writer.AddDocument(doc);
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestSizeBoundedForceMerge.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestSizeBoundedForceMerge.cs b/src/Lucene.Net.Tests/Index/TestSizeBoundedForceMerge.cs
new file mode 100644
index 0000000..812e759
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestSizeBoundedForceMerge.cs
@@ -0,0 +1,403 @@
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+    using Directory = Lucene.Net.Store.Directory;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Document = Documents.Document;
+    using Field = Field;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+    using StringField = StringField;
+
+    [TestFixture]
+    public class TestSizeBoundedForceMerge : LuceneTestCase
+    {
+        private void AddDocs(IndexWriter writer, int numDocs)
+        {
+            AddDocs(writer, numDocs, false);
+        }
+
+        private void AddDocs(IndexWriter writer, int numDocs, bool withID)
+        {
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document doc = new Document();
+                if (withID)
+                {
+                    doc.Add(new StringField("id", "" + i, Field.Store.NO));
+                }
+                writer.AddDocument(doc);
+            }
+            writer.Commit();
+        }
+
+        private IndexWriterConfig NewWriterConfig()
+        {
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, null);
+            conf.SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH);
+            conf.SetRAMBufferSizeMB(IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB);
+            // prevent any merges by default.
+            conf.SetMergePolicy(NoMergePolicy.COMPOUND_FILES);
+            return conf;
+        }
+
+        [Test]
+        public virtual void TestByteSizeLimit()
+        {
+            // tests that the max merge size constraint is applied during forceMerge.
+            Directory dir = new RAMDirectory();
+
+            // Prepare an index w/ several small segments and a large one.
+            IndexWriterConfig conf = NewWriterConfig();
+            IndexWriter writer = new IndexWriter(dir, conf);
+            const int numSegments = 15;
+            for (int i = 0; i < numSegments; i++)
+            {
+                int numDocs = i == 7 ? 30 : 1;
+                AddDocs(writer, numDocs);
+            }
+            writer.Dispose();
+
+            SegmentInfos sis = new SegmentInfos();
+            sis.Read(dir);
+            double min = sis.Info(0).SizeInBytes();
+
+            conf = NewWriterConfig();
+            LogByteSizeMergePolicy lmp = new LogByteSizeMergePolicy();
+            lmp.MaxMergeMBForForcedMerge = (min + 1) / (1 << 20);
+            conf.SetMergePolicy(lmp);
+
+            writer = new IndexWriter(dir, conf);
+            writer.ForceMerge(1);
+            writer.Dispose();
+
+            // Should only be 3 segments in the index, because one of them exceeds the size limit
+            sis = new SegmentInfos();
+            sis.Read(dir);
+            Assert.AreEqual(3, sis.Count);
+        }
+
+        [Test]
+        public virtual void TestNumDocsLimit()
+        {
+            // tests that the max merge docs constraint is applied during forceMerge.
+            Directory dir = new RAMDirectory();
+
+            // Prepare an index w/ several small segments and a large one.
+            IndexWriterConfig conf = NewWriterConfig();
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            AddDocs(writer, 3);
+            AddDocs(writer, 3);
+            AddDocs(writer, 5);
+            AddDocs(writer, 3);
+            AddDocs(writer, 3);
+            AddDocs(writer, 3);
+            AddDocs(writer, 3);
+
+            writer.Dispose();
+
+            conf = NewWriterConfig();
+            LogMergePolicy lmp = new LogDocMergePolicy();
+            lmp.MaxMergeDocs = 3;
+            conf.SetMergePolicy(lmp);
+
+            writer = new IndexWriter(dir, conf);
+            writer.ForceMerge(1);
+            writer.Dispose();
+
+            // Should only be 3 segments in the index, because one of them exceeds the size limit
+            SegmentInfos sis = new SegmentInfos();
+            sis.Read(dir);
+            Assert.AreEqual(3, sis.Count);
+        }
+
+        [Test]
+        public virtual void TestLastSegmentTooLarge()
+        {
+            Directory dir = new RAMDirectory();
+
+            IndexWriterConfig conf = NewWriterConfig();
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            AddDocs(writer, 3);
+            AddDocs(writer, 3);
+            AddDocs(writer, 3);
+            AddDocs(writer, 5);
+
+            writer.Dispose();
+
+            conf = NewWriterConfig();
+            LogMergePolicy lmp = new LogDocMergePolicy();
+            lmp.MaxMergeDocs = 3;
+            conf.SetMergePolicy(lmp);
+
+            writer = new IndexWriter(dir, conf);
+            writer.ForceMerge(1);
+            writer.Dispose();
+
+            SegmentInfos sis = new SegmentInfos();
+            sis.Read(dir);
+            Assert.AreEqual(2, sis.Count);
+        }
+
+        [Test]
+        public virtual void TestFirstSegmentTooLarge()
+        {
+            Directory dir = new RAMDirectory();
+
+            IndexWriterConfig conf = NewWriterConfig();
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            AddDocs(writer, 5);
+            AddDocs(writer, 3);
+            AddDocs(writer, 3);
+            AddDocs(writer, 3);
+
+            writer.Dispose();
+
+            conf = NewWriterConfig();
+            LogMergePolicy lmp = new LogDocMergePolicy();
+            lmp.MaxMergeDocs = 3;
+            conf.SetMergePolicy(lmp);
+
+            writer = new IndexWriter(dir, conf);
+            writer.ForceMerge(1);
+            writer.Dispose();
+
+            SegmentInfos sis = new SegmentInfos();
+            sis.Read(dir);
+            Assert.AreEqual(2, sis.Count);
+        }
+
+        [Test]
+        public virtual void TestAllSegmentsSmall()
+        {
+            Directory dir = new RAMDirectory();
+
+            IndexWriterConfig conf = NewWriterConfig();
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            AddDocs(writer, 3);
+            AddDocs(writer, 3);
+            AddDocs(writer, 3);
+            AddDocs(writer, 3);
+
+            writer.Dispose();
+
+            conf = NewWriterConfig();
+            LogMergePolicy lmp = new LogDocMergePolicy();
+            lmp.MaxMergeDocs = 3;
+            conf.SetMergePolicy(lmp);
+
+            writer = new IndexWriter(dir, conf);
+            writer.ForceMerge(1);
+            writer.Dispose();
+
+            SegmentInfos sis = new SegmentInfos();
+            sis.Read(dir);
+            Assert.AreEqual(1, sis.Count);
+        }
+
+        [Test]
+        public virtual void TestAllSegmentsLarge()
+        {
+            Directory dir = new RAMDirectory();
+
+            IndexWriterConfig conf = NewWriterConfig();
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            AddDocs(writer, 3);
+            AddDocs(writer, 3);
+            AddDocs(writer, 3);
+
+            writer.Dispose();
+
+            conf = NewWriterConfig();
+            LogMergePolicy lmp = new LogDocMergePolicy();
+            lmp.MaxMergeDocs = 2;
+            conf.SetMergePolicy(lmp);
+
+            writer = new IndexWriter(dir, conf);
+            writer.ForceMerge(1);
+            writer.Dispose();
+
+            SegmentInfos sis = new SegmentInfos();
+            sis.Read(dir);
+            Assert.AreEqual(3, sis.Count);
+        }
+
+        [Test]
+        public virtual void TestOneLargeOneSmall()
+        {
+            Directory dir = new RAMDirectory();
+
+            IndexWriterConfig conf = NewWriterConfig();
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            AddDocs(writer, 3);
+            AddDocs(writer, 5);
+            AddDocs(writer, 3);
+            AddDocs(writer, 5);
+
+            writer.Dispose();
+
+            conf = NewWriterConfig();
+            LogMergePolicy lmp = new LogDocMergePolicy();
+            lmp.MaxMergeDocs = 3;
+            conf.SetMergePolicy(lmp);
+
+            writer = new IndexWriter(dir, conf);
+            writer.ForceMerge(1);
+            writer.Dispose();
+
+            SegmentInfos sis = new SegmentInfos();
+            sis.Read(dir);
+            Assert.AreEqual(4, sis.Count);
+        }
+
+        [Test]
+        public virtual void TestMergeFactor()
+        {
+            Directory dir = new RAMDirectory();
+
+            IndexWriterConfig conf = NewWriterConfig();
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            AddDocs(writer, 3);
+            AddDocs(writer, 3);
+            AddDocs(writer, 3);
+            AddDocs(writer, 3);
+            AddDocs(writer, 5);
+            AddDocs(writer, 3);
+            AddDocs(writer, 3);
+
+            writer.Dispose();
+
+            conf = NewWriterConfig();
+            LogMergePolicy lmp = new LogDocMergePolicy();
+            lmp.MaxMergeDocs = 3;
+            lmp.MergeFactor = 2;
+            conf.SetMergePolicy(lmp);
+
+            writer = new IndexWriter(dir, conf);
+            writer.ForceMerge(1);
+            writer.Dispose();
+
+            // Should only be 4 segments in the index, because of the merge factor and
+            // max merge docs settings.
+            SegmentInfos sis = new SegmentInfos();
+            sis.Read(dir);
+            Assert.AreEqual(4, sis.Count);
+        }
+
+        [Test]
+        public virtual void TestSingleMergeableSegment()
+        {
+            Directory dir = new RAMDirectory();
+
+            IndexWriterConfig conf = NewWriterConfig();
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            AddDocs(writer, 3);
+            AddDocs(writer, 5);
+            AddDocs(writer, 3);
+
+            // delete the last document, so that the last segment is merged.
+            writer.DeleteDocuments(new Term("id", "10"));
+            writer.Dispose();
+
+            conf = NewWriterConfig();
+            LogMergePolicy lmp = new LogDocMergePolicy();
+            lmp.MaxMergeDocs = 3;
+            conf.SetMergePolicy(lmp);
+
+            writer = new IndexWriter(dir, conf);
+            writer.ForceMerge(1);
+            writer.Dispose();
+
+            // Verify that the last segment does not have deletions.
+            SegmentInfos sis = new SegmentInfos();
+            sis.Read(dir);
+            Assert.AreEqual(3, sis.Count);
+            Assert.IsFalse(sis.Info(2).HasDeletions);
+        }
+
+        [Test]
+        public virtual void TestSingleNonMergeableSegment()
+        {
+            Directory dir = new RAMDirectory();
+
+            IndexWriterConfig conf = NewWriterConfig();
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            AddDocs(writer, 3, true);
+
+            writer.Dispose();
+
+            conf = NewWriterConfig();
+            LogMergePolicy lmp = new LogDocMergePolicy();
+            lmp.MaxMergeDocs = 3;
+            conf.SetMergePolicy(lmp);
+
+            writer = new IndexWriter(dir, conf);
+            writer.ForceMerge(1);
+            writer.Dispose();
+
+            // Verify that the last segment does not have deletions.
+            SegmentInfos sis = new SegmentInfos();
+            sis.Read(dir);
+            Assert.AreEqual(1, sis.Count);
+        }
+
+        [Test]
+        public virtual void TestSingleMergeableTooLargeSegment()
+        {
+            Directory dir = new RAMDirectory();
+
+            IndexWriterConfig conf = NewWriterConfig();
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            AddDocs(writer, 5, true);
+
+            // delete the last document
+
+            writer.DeleteDocuments(new Term("id", "4"));
+            writer.Dispose();
+
+            conf = NewWriterConfig();
+            LogMergePolicy lmp = new LogDocMergePolicy();
+            lmp.MaxMergeDocs = 2;
+            conf.SetMergePolicy(lmp);
+
+            writer = new IndexWriter(dir, conf);
+            writer.ForceMerge(1);
+            writer.Dispose();
+
+            // Verify that the last segment does not have deletions.
+            SegmentInfos sis = new SegmentInfos();
+            sis.Read(dir);
+            Assert.AreEqual(1, sis.Count);
+            Assert.IsTrue(sis.Info(0).HasDeletions);
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestSnapshotDeletionPolicy.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestSnapshotDeletionPolicy.cs b/src/Lucene.Net.Tests/Index/TestSnapshotDeletionPolicy.cs
new file mode 100644
index 0000000..3fb56f6
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestSnapshotDeletionPolicy.cs
@@ -0,0 +1,527 @@
+using System;
+using System.Collections.Generic;
+using System.Threading;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using FieldType = FieldType;
+    using IndexInput = Lucene.Net.Store.IndexInput;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using TextField = TextField;
+
+    //
+    // this was developed for Lucene In Action,
+    // http://lucenebook.com
+    //
+    [TestFixture]
+    public class TestSnapshotDeletionPolicy : LuceneTestCase
+    {
+        public const string INDEX_PATH = "test.snapshots";
+
+        protected internal virtual IndexWriterConfig GetConfig(Random random, IndexDeletionPolicy dp)
+        {
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
+            if (dp != null)
+            {
+                conf.SetIndexDeletionPolicy(dp);
+            }
+            return conf;
+        }
+
+        protected internal virtual void CheckSnapshotExists(Directory dir, IndexCommit c)
+        {
+            string segFileName = c.SegmentsFileName;
+            Assert.IsTrue(SlowFileExists(dir, segFileName), "segments file not found in directory: " + segFileName);
+        }
+
+        protected internal virtual void CheckMaxDoc(IndexCommit commit, int expectedMaxDoc)
+        {
+            IndexReader reader = DirectoryReader.Open(commit);
+            try
+            {
+                Assert.AreEqual(expectedMaxDoc, reader.MaxDoc);
+            }
+            finally
+            {
+                reader.Dispose();
+            }
+        }
+
+        protected internal virtual void PrepareIndexAndSnapshots(SnapshotDeletionPolicy sdp, IndexWriter writer, int numSnapshots)
+        {
+            for (int i = 0; i < numSnapshots; i++)
+            {
+                // create dummy document to trigger commit.
+                writer.AddDocument(new Document());
+                writer.Commit();
+                Snapshots.Add(sdp.Snapshot());
+            }
+        }
+
+        protected internal virtual SnapshotDeletionPolicy DeletionPolicy
+        {
+            get
+            {
+                return new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
+            }
+        }
+
+        protected internal virtual void AssertSnapshotExists(Directory dir, SnapshotDeletionPolicy sdp, int numSnapshots, bool checkIndexCommitSame)
+        {
+            for (int i = 0; i < numSnapshots; i++)
+            {
+                IndexCommit snapshot = Snapshots[i];
+                CheckMaxDoc(snapshot, i + 1);
+                CheckSnapshotExists(dir, snapshot);
+                if (checkIndexCommitSame)
+                {
+                    Assert.AreSame(snapshot, sdp.GetIndexCommit(snapshot.Generation));
+                }
+                else
+                {
+                    Assert.AreEqual(snapshot.Generation, sdp.GetIndexCommit(snapshot.Generation).Generation);
+                }
+            }
+        }
+
+        protected internal IList<IndexCommit> Snapshots;
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+
+            this.Snapshots = new List<IndexCommit>();
+        }
+
+        [Test]
+        public virtual void TestSnapshotDeletionPolicy_Mem()
+        {
+            Directory fsDir = NewDirectory();
+            RunTest(Random(), fsDir);
+            fsDir.Dispose();
+        }
+
+        private void RunTest(Random random, Directory dir)
+        {
+            // Run for ~1 seconds
+            long stopTime = Environment.TickCount + 1000;
+
+            SnapshotDeletionPolicy dp = DeletionPolicy;
+            IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).SetIndexDeletionPolicy(dp).SetMaxBufferedDocs(2));
+
+            // Verify we catch misuse:
+            try
+            {
+                dp.Snapshot();
+                Assert.Fail("did not hit exception");
+            }
+#pragma warning disable 168
+            catch (InvalidOperationException ise)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            dp = (SnapshotDeletionPolicy)writer.Config.IndexDeletionPolicy;
+            writer.Commit();
+
+            ThreadClass t = new ThreadAnonymousInnerClassHelper(stopTime, writer, NewField);
+
+            t.Start();
+
+            // While the above indexing thread is running, take many
+            // backups:
+            do
+            {
+                BackupIndex(dir, dp);
+                Thread.Sleep(20);
+            } while (t.IsAlive);
+
+            t.Join();
+
+            // Add one more document to force writer to commit a
+            // final segment, so deletion policy has a chance to
+            // delete again:
+            Document doc = new Document();
+            FieldType customType = new FieldType(TextField.TYPE_STORED);
+            customType.StoreTermVectors = true;
+            customType.StoreTermVectorPositions = true;
+            customType.StoreTermVectorOffsets = true;
+            doc.Add(NewField("content", "aaa", customType));
+            writer.AddDocument(doc);
+
+            // Make sure we don't have any leftover files in the
+            // directory:
+            writer.Dispose();
+            TestIndexWriter.AssertNoUnreferencedFiles(dir, "some files were not deleted but should have been");
+        }
+
+        private class ThreadAnonymousInnerClassHelper : ThreadClass
+        {
+            private long StopTime;
+            private IndexWriter Writer;
+            private readonly Func<string, string, FieldType, Field> _newFieldFunc;
+
+            /// <param name="newFieldFunc">
+            /// LUCENENET specific
+            /// Passed in because <see cref="LuceneTestCase.NewField(string, string, FieldType)"/>
+            /// is no longer static. 
+            /// </param>
+            public ThreadAnonymousInnerClassHelper(long stopTime, IndexWriter writer, Func<string, string, FieldType, Field> newFieldFunc)
+            {
+                this.StopTime = stopTime;
+                this.Writer = writer;
+                _newFieldFunc = newFieldFunc;
+            }
+
+            public override void Run()
+            {
+                Document doc = new Document();
+                FieldType customType = new FieldType(TextField.TYPE_STORED);
+                customType.StoreTermVectors = true;
+                customType.StoreTermVectorPositions = true;
+                customType.StoreTermVectorOffsets = true;
+                doc.Add(_newFieldFunc("content", "aaa", customType));
+                do
+                {
+                    for (int i = 0; i < 27; i++)
+                    {
+                        try
+                        {
+                            Writer.AddDocument(doc);
+                        }
+                        catch (Exception t)
+                        {
+                            Console.WriteLine(t.StackTrace);
+                            Assert.Fail("addDocument failed");
+                        }
+                        if (i % 2 == 0)
+                        {
+                            try
+                            {
+                                Writer.Commit();
+                            }
+                            catch (Exception e)
+                            {
+                                throw new Exception(e.Message, e);
+                            }
+                        }
+                    }
+#if !NETSTANDARD
+                    try
+                    {
+#endif 
+                        Thread.Sleep(1);
+#if !NETSTANDARD
+                    }
+                    catch (ThreadInterruptedException ie)
+                    {
+                        throw new ThreadInterruptedException("Thread Interrupted Exception", ie);
+                    }
+#endif
+                } while (Environment.TickCount < StopTime);
+            }
+        }
+
+        /// <summary>
+        /// Example showing how to use the SnapshotDeletionPolicy to take a backup.
+        /// this method does not really do a backup; instead, it reads every byte of
+        /// every file just to test that the files indeed exist and are readable even
+        /// while the index is changing.
+        /// </summary>
+        public virtual void BackupIndex(Directory dir, SnapshotDeletionPolicy dp)
+        {
+            // To backup an index we first take a snapshot:
+            IndexCommit snapshot = dp.Snapshot();
+            try
+            {
+                CopyFiles(dir, snapshot);
+            }
+            finally
+            {
+                // Make sure to release the snapshot, otherwise these
+                // files will never be deleted during this IndexWriter
+                // session:
+                dp.Release(snapshot);
+            }
+        }
+
+        private void CopyFiles(Directory dir, IndexCommit cp)
+        {
+            // While we hold the snapshot, and nomatter how long
+            // we take to do the backup, the IndexWriter will
+            // never delete the files in the snapshot:
+            ICollection<string> files = cp.FileNames;
+            foreach (String fileName in files)
+            {
+                // NOTE: in a real backup you would not use
+                // readFile; you would need to use something else
+                // that copies the file to a backup location.  this
+                // could even be a spawned shell process (eg "tar",
+                // "zip") that takes the list of files and builds a
+                // backup.
+                ReadFile(dir, fileName);
+            }
+        }
+
+        internal byte[] Buffer = new byte[4096];
+
+        private void ReadFile(Directory dir, string name)
+        {
+            IndexInput input = dir.OpenInput(name, NewIOContext(Random()));
+            try
+            {
+                long size = dir.FileLength(name);
+                long bytesLeft = size;
+                while (bytesLeft > 0)
+                {
+                    int numToRead;
+                    if (bytesLeft < Buffer.Length)
+                    {
+                        numToRead = (int)bytesLeft;
+                    }
+                    else
+                    {
+                        numToRead = Buffer.Length;
+                    }
+                    input.ReadBytes(Buffer, 0, numToRead, false);
+                    bytesLeft -= numToRead;
+                }
+                // Don't do this in your real backups!  this is just
+                // to force a backup to take a somewhat long time, to
+                // make sure we are exercising the fact that the
+                // IndexWriter should not delete this file even when I
+                // take my time reading it.
+                Thread.Sleep(1);
+            }
+            finally
+            {
+                input.Dispose();
+            }
+        }
+
+        [Test]
+        public virtual void TestBasicSnapshots()
+        {
+            int numSnapshots = 3;
+
+            // Create 3 snapshots: snapshot0, snapshot1, snapshot2
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, GetConfig(Random(), DeletionPolicy));
+            SnapshotDeletionPolicy sdp = (SnapshotDeletionPolicy)writer.Config.IndexDeletionPolicy;
+            PrepareIndexAndSnapshots(sdp, writer, numSnapshots);
+            writer.Dispose();
+
+            Assert.AreEqual(numSnapshots, sdp.GetSnapshots().Count);
+            Assert.AreEqual(numSnapshots, sdp.SnapshotCount);
+            AssertSnapshotExists(dir, sdp, numSnapshots, true);
+
+            // open a reader on a snapshot - should succeed.
+            DirectoryReader.Open(Snapshots[0]).Dispose();
+
+            // open a new IndexWriter w/ no snapshots to keep and assert that all snapshots are gone.
+            sdp = DeletionPolicy;
+            writer = new IndexWriter(dir, GetConfig(Random(), sdp));
+            writer.DeleteUnusedFiles();
+            writer.Dispose();
+            Assert.AreEqual(1, DirectoryReader.ListCommits(dir).Count, "no snapshots should exist");
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestMultiThreadedSnapshotting()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, GetConfig(Random(), DeletionPolicy));
+            SnapshotDeletionPolicy sdp = (SnapshotDeletionPolicy)writer.Config.IndexDeletionPolicy;
+
+            ThreadClass[] threads = new ThreadClass[10];
+            IndexCommit[] snapshots = new IndexCommit[threads.Length];
+            for (int i = 0; i < threads.Length; i++)
+            {
+                int finalI = i;
+                threads[i] = new ThreadAnonymousInnerClassHelper2(this, writer, sdp, snapshots, finalI);
+                threads[i].Name = "t" + i;
+            }
+
+            foreach (ThreadClass t in threads)
+            {
+                t.Start();
+            }
+
+            foreach (ThreadClass t in threads)
+            {
+                t.Join();
+            }
+
+            // Do one last commit, so that after we release all snapshots, we stay w/ one commit
+            writer.AddDocument(new Document());
+            writer.Commit();
+
+            for (int i = 0; i < threads.Length; i++)
+            {
+                sdp.Release(snapshots[i]);
+                writer.DeleteUnusedFiles();
+            }
+            Assert.AreEqual(1, DirectoryReader.ListCommits(dir).Count);
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        private class ThreadAnonymousInnerClassHelper2 : ThreadClass
+        {
+            private readonly TestSnapshotDeletionPolicy OuterInstance;
+
+            private IndexWriter Writer;
+            private SnapshotDeletionPolicy Sdp;
+            private IndexCommit[] Snapshots;
+            private int FinalI;
+
+            public ThreadAnonymousInnerClassHelper2(TestSnapshotDeletionPolicy outerInstance, IndexWriter writer, SnapshotDeletionPolicy sdp, IndexCommit[] snapshots, int finalI)
+            {
+                this.OuterInstance = outerInstance;
+                this.Writer = writer;
+                this.Sdp = sdp;
+                this.Snapshots = snapshots;
+                this.FinalI = finalI;
+            }
+
+            public override void Run()
+            {
+                try
+                {
+                    Writer.AddDocument(new Document());
+                    Writer.Commit();
+                    Snapshots[FinalI] = Sdp.Snapshot();
+                }
+                catch (Exception e)
+                {
+                    throw new Exception(e.Message, e);
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestRollbackToOldSnapshot()
+        {
+            int numSnapshots = 2;
+            Directory dir = NewDirectory();
+
+            SnapshotDeletionPolicy sdp = DeletionPolicy;
+            IndexWriter writer = new IndexWriter(dir, GetConfig(Random(), sdp));
+            PrepareIndexAndSnapshots(sdp, writer, numSnapshots);
+            writer.Dispose();
+
+            // now open the writer on "snapshot0" - make sure it succeeds
+            writer = new IndexWriter(dir, GetConfig(Random(), sdp).SetIndexCommit(Snapshots[0]));
+            // this does the actual rollback
+            writer.Commit();
+            writer.DeleteUnusedFiles();
+            AssertSnapshotExists(dir, sdp, numSnapshots - 1, false);
+            writer.Dispose();
+
+            // but 'snapshot1' files will still exist (need to release snapshot before they can be deleted).
+            string segFileName = Snapshots[1].SegmentsFileName;
+            Assert.IsTrue(SlowFileExists(dir, segFileName), "snapshot files should exist in the directory: " + segFileName);
+
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestReleaseSnapshot()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, GetConfig(Random(), DeletionPolicy));
+            SnapshotDeletionPolicy sdp = (SnapshotDeletionPolicy)writer.Config.IndexDeletionPolicy;
+            PrepareIndexAndSnapshots(sdp, writer, 1);
+
+            // Create another commit - we must do that, because otherwise the "snapshot"
+            // files will still remain in the index, since it's the last commit.
+            writer.AddDocument(new Document());
+            writer.Commit();
+
+            // Release
+            string segFileName = Snapshots[0].SegmentsFileName;
+            sdp.Release(Snapshots[0]);
+            writer.DeleteUnusedFiles();
+            writer.Dispose();
+            Assert.IsFalse(SlowFileExists(dir, segFileName), "segments file should not be found in dirctory: " + segFileName);
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestSnapshotLastCommitTwice()
+        {
+            Directory dir = NewDirectory();
+
+            IndexWriter writer = new IndexWriter(dir, GetConfig(Random(), DeletionPolicy));
+            SnapshotDeletionPolicy sdp = (SnapshotDeletionPolicy)writer.Config.IndexDeletionPolicy;
+            writer.AddDocument(new Document());
+            writer.Commit();
+
+            IndexCommit s1 = sdp.Snapshot();
+            IndexCommit s2 = sdp.Snapshot();
+            Assert.AreSame(s1, s2); // should be the same instance
+
+            // create another commit
+            writer.AddDocument(new Document());
+            writer.Commit();
+
+            // release "s1" should not delete "s2"
+            sdp.Release(s1);
+            writer.DeleteUnusedFiles();
+            CheckSnapshotExists(dir, s2);
+
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestMissingCommits()
+        {
+            // Tests the behavior of SDP when commits that are given at ctor are missing
+            // on onInit().
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, GetConfig(Random(), DeletionPolicy));
+            SnapshotDeletionPolicy sdp = (SnapshotDeletionPolicy)writer.Config.IndexDeletionPolicy;
+            writer.AddDocument(new Document());
+            writer.Commit();
+            IndexCommit s1 = sdp.Snapshot();
+
+            // create another commit, not snapshotted.
+            writer.AddDocument(new Document());
+            writer.Dispose();
+
+            // open a new writer w/ KeepOnlyLastCommit policy, so it will delete "s1"
+            // commit.
+            (new IndexWriter(dir, GetConfig(Random(), null))).Dispose();
+
+            Assert.IsFalse(SlowFileExists(dir, s1.SegmentsFileName), "snapshotted commit should not exist");
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestStoredFieldsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestStoredFieldsFormat.cs b/src/Lucene.Net.Tests/Index/TestStoredFieldsFormat.cs
new file mode 100644
index 0000000..9c551f3
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestStoredFieldsFormat.cs
@@ -0,0 +1,141 @@
+using Lucene.Net.Attributes;
+using NUnit.Framework;
+
+namespace Lucene.Net.Index
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    using Codec = Lucene.Net.Codecs.Codec;
+    using Lucene3xCodec = Lucene.Net.Codecs.Lucene3x.Lucene3xCodec;
+
+    /// <summary>
+    /// Tests with the default randomized codec. Not really redundant with
+    /// other specific instantiations since we want to test some test-only impls
+    /// like Asserting, as well as make it easy to write a codec and pass -Dtests.codec
+    /// </summary>
+    [TestFixture]
+    public class TestStoredFieldsFormat : BaseStoredFieldsFormatTestCase
+    {
+        protected override Codec Codec
+        {
+            get
+            {
+                return Codec.Default;
+            }
+        }
+
+        [Test]
+        public override void TestWriteReadMerge()
+        {
+#pragma warning disable 612, 618
+            AssumeFalse("impersonation isnt good enough", Codec is Lucene3xCodec);
+#pragma warning restore 612, 618
+            // this test tries to switch up between the codec and another codec.
+            // for 3.x: we currently cannot take an index with existing 4.x segments
+            // and merge into newly formed 3.x segments.
+            base.TestWriteReadMerge();
+        }
+
+
+        #region BaseStoredFieldsFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestRandomStoredFields()
+        {
+            base.TestRandomStoredFields();
+        }
+
+        [Test]
+        // LUCENE-1727: make sure doc fields are stored in order
+        public override void TestStoredFieldsOrder()
+        {
+            base.TestStoredFieldsOrder();
+        }
+
+        [Test]
+        // LUCENE-1219
+        public override void TestBinaryFieldOffsetLength()
+        {
+            base.TestBinaryFieldOffsetLength();
+        }
+
+        [Test]
+        public override void TestNumericField()
+        {
+            base.TestNumericField();
+        }
+
+        [Test]
+        public override void TestIndexedBit()
+        {
+            base.TestIndexedBit();
+        }
+
+        [Test]
+        public override void TestReadSkip()
+        {
+            base.TestReadSkip();
+        }
+
+        [Test]
+        public override void TestEmptyDocs()
+        {
+            base.TestEmptyDocs();
+        }
+
+        [Test]
+        public override void TestConcurrentReads()
+        {
+            base.TestConcurrentReads();
+        }
+
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(120000)]
+#endif
+        [Test, HasTimeout]
+        public override void TestBigDocuments()
+        {
+            base.TestBigDocuments();
+        }
+
+        [Test]
+        public override void TestBulkMergeWithDeletes()
+        {
+            base.TestBulkMergeWithDeletes();
+        }
+
+        #endregion
+
+        #region BaseIndexFileFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestMergeStability()
+        {
+            base.TestMergeStability();
+        }
+
+        #endregion
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestStressAdvance.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestStressAdvance.cs b/src/Lucene.Net.Tests/Index/TestStressAdvance.cs
new file mode 100644
index 0000000..c6c4521
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestStressAdvance.cs
@@ -0,0 +1,173 @@
+using System;
+using System.Collections.Generic;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    
+    using Lucene.Net.Store;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Lucene.Net.Util;
+    using NUnit.Framework;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+
+    [TestFixture]
+    public class TestStressAdvance : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestStressAdvance_Mem()
+        {
+            for (int iter = 0; iter < 3; iter++)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("\nTEST: iter=" + iter);
+                }
+                Directory dir = NewDirectory();
+                RandomIndexWriter w = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+                HashSet<int> aDocs = new HashSet<int>();
+                Documents.Document doc = new Documents.Document();
+                Field f = NewStringField("field", "", Field.Store.NO);
+                doc.Add(f);
+                Field idField = NewStringField("id", "", Field.Store.YES);
+                doc.Add(idField);
+                int num = AtLeast(4097);
+                if (VERBOSE)
+                {
+                    Console.WriteLine("\nTEST: numDocs=" + num);
+                }
+                for (int id = 0; id < num; id++)
+                {
+                    if (Random().Next(4) == 3)
+                    {
+                        f.SetStringValue("a");
+                        aDocs.Add(id);
+                    }
+                    else
+                    {
+                        f.SetStringValue("b");
+                    }
+                    idField.SetStringValue("" + id);
+                    w.AddDocument(doc);
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("\nTEST: doc upto " + id);
+                    }
+                }
+
+                w.ForceMerge(1);
+
+                IList<int> aDocIDs = new List<int>();
+                IList<int> bDocIDs = new List<int>();
+
+                DirectoryReader r = w.Reader;
+                int[] idToDocID = new int[r.MaxDoc];
+                for (int docID = 0; docID < idToDocID.Length; docID++)
+                {
+                    int id = Convert.ToInt32(r.Document(docID).Get("id"));
+                    if (aDocs.Contains(id))
+                    {
+                        aDocIDs.Add(docID);
+                    }
+                    else
+                    {
+                        bDocIDs.Add(docID);
+                    }
+                }
+                TermsEnum te = GetOnlySegmentReader(r).Fields.GetTerms("field").GetIterator(null);
+
+                DocsEnum de = null;
+                for (int iter2 = 0; iter2 < 10; iter2++)
+                {
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("\nTEST: iter=" + iter + " iter2=" + iter2);
+                    }
+                    Assert.AreEqual(TermsEnum.SeekStatus.FOUND, te.SeekCeil(new BytesRef("a")));
+                    de = TestUtil.Docs(Random(), te, null, de, DocsEnum.FLAG_NONE);
+                    TestOne(de, aDocIDs);
+
+                    Assert.AreEqual(TermsEnum.SeekStatus.FOUND, te.SeekCeil(new BytesRef("b")));
+                    de = TestUtil.Docs(Random(), te, null, de, DocsEnum.FLAG_NONE);
+                    TestOne(de, bDocIDs);
+                }
+
+                w.Dispose();
+                r.Dispose();
+                dir.Dispose();
+            }
+        }
+
+        private void TestOne(DocsEnum docs, IList<int> expected)
+        {
+            if (VERBOSE)
+            {
+                Console.WriteLine("test");
+            }
+            int upto = -1;
+            while (upto < expected.Count)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("  cycle upto=" + upto + " of " + expected.Count);
+                }
+                int docID;
+                if (Random().Next(4) == 1 || upto == expected.Count - 1)
+                {
+                    // test nextDoc()
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("    do nextDoc");
+                    }
+                    upto++;
+                    docID = docs.NextDoc();
+                }
+                else
+                {
+                    // test advance()
+                    int inc = TestUtil.NextInt(Random(), 1, expected.Count - 1 - upto);
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("    do advance inc=" + inc);
+                    }
+                    upto += inc;
+                    docID = docs.Advance(expected[upto]);
+                }
+                if (upto == expected.Count)
+                {
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("  expect docID=" + DocIdSetIterator.NO_MORE_DOCS + " actual=" + docID);
+                    }
+                    Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, docID);
+                }
+                else
+                {
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("  expect docID=" + expected[upto] + " actual=" + docID);
+                    }
+                    Assert.IsTrue(docID != DocIdSetIterator.NO_MORE_DOCS);
+                    Assert.AreEqual((int)expected[upto], docID);
+                }
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestStressIndexing.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestStressIndexing.cs b/src/Lucene.Net.Tests/Index/TestStressIndexing.cs
new file mode 100644
index 0000000..428a2d0
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestStressIndexing.cs
@@ -0,0 +1,237 @@
+using System;
+using System.Threading;
+using Lucene.Net.Attributes;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    
+    using Lucene.Net.Search;
+    using Lucene.Net.Store;
+    using Lucene.Net.Support;
+
+    /*
+        /// Copyright 2004 The Apache Software Foundation
+        ///
+        /// Licensed under the Apache License, Version 2.0 (the "License");
+        /// you may not use this file except in compliance with the License.
+        /// You may obtain a copy of the License at
+        ///
+        ///     http://www.apache.org/licenses/LICENSE-2.0
+        ///
+        /// Unless required by applicable law or agreed to in writing, software
+        /// distributed under the License is distributed on an "AS IS" BASIS,
+        /// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+        /// See the License for the specific language governing permissions and
+        /// limitations under the License.
+        */
+
+    using Lucene.Net.Util;
+    using NUnit.Framework;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+
+    [TestFixture]
+    public class TestStressIndexing : LuceneTestCase
+    {
+        private abstract class TimedThread : ThreadClass
+        {
+            internal volatile bool Failed;
+            internal int Count;
+            internal static int RUN_TIME_MSEC = AtLeast(1000);
+            internal TimedThread[] AllThreads;
+
+            public abstract void DoWork();
+
+            internal TimedThread(TimedThread[] threads)
+            {
+                this.AllThreads = threads;
+            }
+
+            public override void Run()
+            {
+                long stopTime = Environment.TickCount + RUN_TIME_MSEC;
+
+                Count = 0;
+
+                try
+                {
+                    do
+                    {
+                        if (AnyErrors())
+                        {
+                            break;
+                        }
+                        DoWork();
+                        Count++;
+                    } while (Environment.TickCount < stopTime);
+                }
+                catch (Exception e)
+                {
+                    Console.WriteLine(Thread.CurrentThread + ": exc");
+                    Console.WriteLine(e.StackTrace);
+                    Failed = true;
+                }
+            }
+
+            internal virtual bool AnyErrors()
+            {
+                for (int i = 0; i < AllThreads.Length; i++)
+                {
+                    if (AllThreads[i] != null && AllThreads[i].Failed)
+                    {
+                        return true;
+                    }
+                }
+                return false;
+            }
+        }
+
+        private class IndexerThread : TimedThread
+        {
+            private readonly Func<string, string, Field.Store, Field> NewStringFieldFunc;
+            private readonly Func<string, string, Field.Store, Field> NewTextFieldFunc;
+
+            internal IndexWriter Writer;
+            internal int NextID;
+
+            /// <param name="newStringField">
+            /// LUCENENET specific
+            /// Passed in because <see cref="LuceneTestCase.NewStringField(string, string, Field.Store)"/>
+            /// is no longer static.
+            /// </param>
+            /// <param name="newTextField">
+            /// LUCENENET specific
+            /// Passed in because <see cref="LuceneTestCase.NewTextField(string, string, Field.Store)"/>
+            /// is no longer static.
+            /// </param>
+            public IndexerThread(IndexWriter writer, TimedThread[] threads,
+                Func<string, string, Field.Store, Field> newStringField,
+                Func<string, string, Field.Store, Field> newTextField)
+                : base(threads)
+            {
+                this.Writer = writer;
+                NewStringFieldFunc = newStringField;
+                NewTextFieldFunc = newTextField;
+            }
+
+            public override void DoWork()
+            {
+                // Add 10 docs:
+                for (int j = 0; j < 10; j++)
+                {
+                    Documents.Document d = new Documents.Document();
+                    int n = Random().Next();
+                    d.Add(NewStringFieldFunc("id", Convert.ToString(NextID++), Field.Store.YES));
+                    d.Add(NewTextFieldFunc("contents", English.IntToEnglish(n), Field.Store.NO));
+                    Writer.AddDocument(d);
+                }
+
+                // Delete 5 docs:
+                int deleteID = NextID - 1;
+                for (int j = 0; j < 5; j++)
+                {
+                    Writer.DeleteDocuments(new Term("id", "" + deleteID));
+                    deleteID -= 2;
+                }
+            }
+        }
+
+        private class SearcherThread : TimedThread
+        {
+            internal Directory Directory;
+            private readonly LuceneTestCase OuterInstance;
+
+            /// <param name="outerInstance">
+            /// LUCENENET specific
+            /// Passed in because <see cref="LuceneTestCase.NewSearcher(IndexReader)"/>
+            /// is no longer static.
+            /// </param>
+            public SearcherThread(Directory directory, TimedThread[] threads, LuceneTestCase outerInstance)
+                : base(threads)
+            {
+                OuterInstance = outerInstance;
+                this.Directory = directory;
+            }
+
+            public override void DoWork()
+            {
+                for (int i = 0; i < 100; i++)
+                {
+                    IndexReader ir = DirectoryReader.Open(Directory);
+                    IndexSearcher @is = OuterInstance.NewSearcher(ir);
+                    ir.Dispose();
+                }
+                Count += 100;
+            }
+        }
+
+        /*
+          Run one indexer and 2 searchers against single index as
+          stress test.
+        */
+
+        public virtual void RunStressTest(Directory directory, IConcurrentMergeScheduler mergeScheduler)
+        {
+            IndexWriter modifier = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(10).SetMergeScheduler(mergeScheduler));
+            modifier.Commit();
+
+            TimedThread[] threads = new TimedThread[4];
+            int numThread = 0;
+
+            // One modifier that writes 10 docs then removes 5, over
+            // and over:
+            IndexerThread indexerThread = new IndexerThread(modifier, threads, NewStringField, NewTextField);
+            threads[numThread++] = indexerThread;
+            indexerThread.Start();
+
+            IndexerThread indexerThread2 = new IndexerThread(modifier, threads, NewStringField, NewTextField);
+            threads[numThread++] = indexerThread2;
+            indexerThread2.Start();
+
+            // Two searchers that constantly just re-instantiate the
+            // searcher:
+            SearcherThread searcherThread1 = new SearcherThread(directory, threads, this);
+            threads[numThread++] = searcherThread1;
+            searcherThread1.Start();
+
+            SearcherThread searcherThread2 = new SearcherThread(directory, threads, this);
+            threads[numThread++] = searcherThread2;
+            searcherThread2.Start();
+
+            for (int i = 0; i < numThread; i++)
+            {
+                threads[i].Join();
+            }
+
+            modifier.Dispose();
+
+            for (int i = 0; i < numThread; i++)
+            {
+                Assert.IsTrue(!threads[i].Failed);
+            }
+
+            //System.out.println("    Writer: " + indexerThread.count + " iterations");
+            //System.out.println("Searcher 1: " + searcherThread1.count + " searchers created");
+            //System.out.println("Searcher 2: " + searcherThread2.count + " searchers created");
+        }
+
+        /*
+          Run above stress test against RAMDirectory and then
+          FSDirectory.
+        */
+
+        [Test]
+        public virtual void TestStressIndexAndSearching([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            Directory directory = NewDirectory();
+            MockDirectoryWrapper wrapper = directory as MockDirectoryWrapper;
+            if (wrapper != null)
+            {
+                wrapper.AssertNoUnrefencedFilesOnClose = true;
+            }
+
+            RunStressTest(directory, scheduler);
+            directory.Dispose();
+        }
+    }
+}
\ No newline at end of file


[45/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Document/TestDocument.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Document/TestDocument.cs b/src/Lucene.Net.Tests/Document/TestDocument.cs
new file mode 100644
index 0000000..2a73911
--- /dev/null
+++ b/src/Lucene.Net.Tests/Document/TestDocument.cs
@@ -0,0 +1,454 @@
+using Lucene.Net.Support;
+using NUnit.Framework;
+using System.IO;
+using System.Text;
+
+namespace Lucene.Net.Documents
+{
+    /*
+    * Licensed to the Apache Software Foundation (ASF) under one or more
+    * contributor license agreements.  See the NOTICE file distributed with
+    * this work for additional information regarding copyright ownership.
+    * The ASF licenses this file to You under the Apache License, Version 2.0
+    * (the "License"); you may not use this file except in compliance with
+    * the License.  You may obtain a copy of the License at
+    *
+    *     http://www.apache.org/licenses/LICENSE-2.0
+    *
+    * Unless required by applicable law or agreed to in writing, software
+    * distributed under the License is distributed on an "AS IS" BASIS,
+    * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    * See the License for the specific language governing permissions and
+    * limitations under the License.
+    */
+
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
+    using DocsAndPositionsEnum = Lucene.Net.Index.DocsAndPositionsEnum;
+    using Fields = Lucene.Net.Index.Fields;
+    using IIndexableField = Lucene.Net.Index.IIndexableField;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockTokenizer = Lucene.Net.Analysis.MockTokenizer;
+    using PhraseQuery = Lucene.Net.Search.PhraseQuery;
+    using Query = Lucene.Net.Search.Query;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using ScoreDoc = Lucene.Net.Search.ScoreDoc;
+    using Term = Lucene.Net.Index.Term;
+    using TermQuery = Lucene.Net.Search.TermQuery;
+    using Terms = Lucene.Net.Index.Terms;
+    using TermsEnum = Lucene.Net.Index.TermsEnum;
+
+    /// <summary>
+    /// Tests <seealso cref="Document"/> class.
+    /// </summary>
+    [TestFixture]
+    public class TestDocument : LuceneTestCase
+    {
+        internal string BinaryVal = "this text will be stored as a byte array in the index";
+        internal string BinaryVal2 = "this text will be also stored as a byte array in the index";
+
+        [Test]
+        public virtual void TestBinaryField()
+        {
+            Documents.Document doc = new Documents.Document();
+
+            FieldType ft = new FieldType();
+            ft.IsStored = true;
+            IIndexableField stringFld = new Field("string", BinaryVal, ft);
+            IIndexableField binaryFld = new StoredField("binary", BinaryVal.GetBytes(Encoding.UTF8));
+            IIndexableField binaryFld2 = new StoredField("binary", BinaryVal2.GetBytes(Encoding.UTF8));
+
+            doc.Add(stringFld);
+            doc.Add(binaryFld);
+
+            Assert.AreEqual(2, doc.Fields.Count);
+
+            Assert.IsTrue(binaryFld.GetBinaryValue() != null);
+            Assert.IsTrue(binaryFld.FieldType.IsStored);
+            Assert.IsFalse(binaryFld.FieldType.IsIndexed);
+
+            string binaryTest = doc.GetBinaryValue("binary").Utf8ToString();
+            Assert.IsTrue(binaryTest.Equals(BinaryVal));
+
+            string stringTest = doc.Get("string");
+            Assert.IsTrue(binaryTest.Equals(stringTest));
+
+            doc.Add(binaryFld2);
+
+            Assert.AreEqual(3, doc.Fields.Count);
+
+            BytesRef[] binaryTests = doc.GetBinaryValues("binary");
+
+            Assert.AreEqual(2, binaryTests.Length);
+
+            binaryTest = binaryTests[0].Utf8ToString();
+            string binaryTest2 = binaryTests[1].Utf8ToString();
+
+            Assert.IsFalse(binaryTest.Equals(binaryTest2));
+
+            Assert.IsTrue(binaryTest.Equals(BinaryVal));
+            Assert.IsTrue(binaryTest2.Equals(BinaryVal2));
+
+            doc.RemoveField("string");
+            Assert.AreEqual(2, doc.Fields.Count);
+
+            doc.RemoveFields("binary");
+            Assert.AreEqual(0, doc.Fields.Count);
+        }
+
+        /// <summary>
+        /// Tests <seealso cref="Document#removeField(String)"/> method for a brand new Document
+        /// that has not been indexed yet.
+        /// </summary>
+        /// <exception cref="Exception"> on error </exception>
+
+        [Test]
+        public virtual void TestRemoveForNewDocument()
+        {
+            Documents.Document doc = MakeDocumentWithFields();
+            Assert.AreEqual(10, doc.Fields.Count);
+            doc.RemoveFields("keyword");
+            Assert.AreEqual(8, doc.Fields.Count);
+            doc.RemoveFields("doesnotexists"); // removing non-existing fields is
+            // siltenlty ignored
+            doc.RemoveFields("keyword"); // removing a field more than once
+            Assert.AreEqual(8, doc.Fields.Count);
+            doc.RemoveFields("text");
+            Assert.AreEqual(6, doc.Fields.Count);
+            doc.RemoveFields("text");
+            Assert.AreEqual(6, doc.Fields.Count);
+            doc.RemoveFields("text");
+            Assert.AreEqual(6, doc.Fields.Count);
+            doc.RemoveFields("doesnotexists"); // removing non-existing fields is
+            // siltenlty ignored
+            Assert.AreEqual(6, doc.Fields.Count);
+            doc.RemoveFields("unindexed");
+            Assert.AreEqual(4, doc.Fields.Count);
+            doc.RemoveFields("unstored");
+            Assert.AreEqual(2, doc.Fields.Count);
+            doc.RemoveFields("doesnotexists"); // removing non-existing fields is
+            // siltenlty ignored
+            Assert.AreEqual(2, doc.Fields.Count);
+
+            doc.RemoveFields("indexed_not_tokenized");
+            Assert.AreEqual(0, doc.Fields.Count);
+        }
+
+        [Test]
+        public virtual void TestConstructorExceptions()
+        {
+            FieldType ft = new FieldType();
+            ft.IsStored = true;
+            new Field("name", "value", ft); // okay
+            new StringField("name", "value", Field.Store.NO); // okay
+            try
+            {
+                new Field("name", "value", new FieldType());
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException e)
+#pragma warning restore 168
+            {
+                // expected exception
+            }
+            new Field("name", "value", ft); // okay
+            try
+            {
+                FieldType ft2 = new FieldType();
+                ft2.IsStored = true;
+                ft2.StoreTermVectors = true;
+                new Field("name", "value", ft2);
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException e)
+#pragma warning restore 168
+            {
+                // expected exception
+            }
+        }
+
+        /// <summary>
+        /// Tests <seealso cref="Document#getValues(String)"/> method for a brand new Document
+        /// that has not been indexed yet.
+        /// </summary>
+        /// <exception cref="Exception"> on error </exception>
+        [Test]
+        public virtual void TestGetValuesForNewDocument()
+        {
+            DoAssert(MakeDocumentWithFields(), false);
+        }
+
+        /// <summary>
+        /// Tests <seealso cref="Document#getValues(String)"/> method for a Document retrieved
+        /// from an index.
+        /// </summary>
+        /// <exception cref="Exception"> on error </exception>
+        [Test]
+        public virtual void TestGetValuesForIndexedDocument()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            writer.AddDocument(MakeDocumentWithFields());
+            IndexReader reader = writer.Reader;
+
+            IndexSearcher searcher = NewSearcher(reader);
+
+            // search for something that does exists
+            Query query = new TermQuery(new Term("keyword", "test1"));
+
+            // ensure that queries return expected results without DateFilter first
+            ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+
+            DoAssert(searcher.Doc(hits[0].Doc), true);
+            writer.Dispose();
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestGetValues()
+        {
+            Documents.Document doc = MakeDocumentWithFields();
+            Assert.AreEqual(new string[] { "test1", "test2" }, doc.GetValues("keyword"));
+            Assert.AreEqual(new string[] { "test1", "test2" }, doc.GetValues("text"));
+            Assert.AreEqual(new string[] { "test1", "test2" }, doc.GetValues("unindexed"));
+            Assert.AreEqual(new string[0], doc.GetValues("nope"));
+        }
+
+        [Test]
+        public virtual void TestPositionIncrementMultiFields()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            writer.AddDocument(MakeDocumentWithFields());
+            IndexReader reader = writer.Reader;
+
+            IndexSearcher searcher = NewSearcher(reader);
+            PhraseQuery query = new PhraseQuery();
+            query.Add(new Term("indexed_not_tokenized", "test1"));
+            query.Add(new Term("indexed_not_tokenized", "test2"));
+
+            ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+
+            DoAssert(searcher.Doc(hits[0].Doc), true);
+            writer.Dispose();
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        private Documents.Document MakeDocumentWithFields()
+        {
+            Documents.Document doc = new Documents.Document();
+            FieldType stored = new FieldType();
+            stored.IsStored = true;
+            FieldType indexedNotTokenized = new FieldType();
+            indexedNotTokenized.IsIndexed = true;
+            indexedNotTokenized.IsTokenized = false;
+            doc.Add(new StringField("keyword", "test1", Field.Store.YES));
+            doc.Add(new StringField("keyword", "test2", Field.Store.YES));
+            doc.Add(new TextField("text", "test1", Field.Store.YES));
+            doc.Add(new TextField("text", "test2", Field.Store.YES));
+            doc.Add(new Field("unindexed", "test1", stored));
+            doc.Add(new Field("unindexed", "test2", stored));
+            doc.Add(new TextField("unstored", "test1", Field.Store.NO));
+            doc.Add(new TextField("unstored", "test2", Field.Store.NO));
+            doc.Add(new Field("indexed_not_tokenized", "test1", indexedNotTokenized));
+            doc.Add(new Field("indexed_not_tokenized", "test2", indexedNotTokenized));
+            return doc;
+        }
+
+        private void DoAssert(Documents.Document doc, bool fromIndex)
+        {
+            IIndexableField[] keywordFieldValues = doc.GetFields("keyword");
+            IIndexableField[] textFieldValues = doc.GetFields("text");
+            IIndexableField[] unindexedFieldValues = doc.GetFields("unindexed");
+            IIndexableField[] unstoredFieldValues = doc.GetFields("unstored");
+
+            Assert.IsTrue(keywordFieldValues.Length == 2);
+            Assert.IsTrue(textFieldValues.Length == 2);
+            Assert.IsTrue(unindexedFieldValues.Length == 2);
+            // this test cannot work for documents retrieved from the index
+            // since unstored fields will obviously not be returned
+            if (!fromIndex)
+            {
+                Assert.IsTrue(unstoredFieldValues.Length == 2);
+            }
+
+            Assert.IsTrue(keywordFieldValues[0].GetStringValue().Equals("test1"));
+            Assert.IsTrue(keywordFieldValues[1].GetStringValue().Equals("test2"));
+            Assert.IsTrue(textFieldValues[0].GetStringValue().Equals("test1"));
+            Assert.IsTrue(textFieldValues[1].GetStringValue().Equals("test2"));
+            Assert.IsTrue(unindexedFieldValues[0].GetStringValue().Equals("test1"));
+            Assert.IsTrue(unindexedFieldValues[1].GetStringValue().Equals("test2"));
+            // this test cannot work for documents retrieved from the index
+            // since unstored fields will obviously not be returned
+            if (!fromIndex)
+            {
+                Assert.IsTrue(unstoredFieldValues[0].GetStringValue().Equals("test1"));
+                Assert.IsTrue(unstoredFieldValues[1].GetStringValue().Equals("test2"));
+            }
+        }
+
+        [Test]
+        public virtual void TestFieldSetValue()
+        {
+            Field field = new StringField("id", "id1", Field.Store.YES);
+            Documents.Document doc = new Documents.Document();
+            doc.Add(field);
+            doc.Add(new StringField("keyword", "test", Field.Store.YES));
+
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            writer.AddDocument(doc);
+            field.SetStringValue("id2");
+            writer.AddDocument(doc);
+            field.SetStringValue("id3");
+            writer.AddDocument(doc);
+
+            IndexReader reader = writer.Reader;
+            IndexSearcher searcher = NewSearcher(reader);
+
+            Query query = new TermQuery(new Term("keyword", "test"));
+
+            // ensure that queries return expected results without DateFilter first
+            ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(3, hits.Length);
+            int result = 0;
+            for (int i = 0; i < 3; i++)
+            {
+                Documents.Document doc2 = searcher.Doc(hits[i].Doc);
+                Field f = (Field)doc2.GetField("id");
+                if (f.GetStringValue().Equals("id1"))
+                {
+                    result |= 1;
+                }
+                else if (f.GetStringValue().Equals("id2"))
+                {
+                    result |= 2;
+                }
+                else if (f.GetStringValue().Equals("id3"))
+                {
+                    result |= 4;
+                }
+                else
+                {
+                    Assert.Fail("unexpected id field");
+                }
+            }
+            writer.Dispose();
+            reader.Dispose();
+            dir.Dispose();
+            Assert.AreEqual(7, result, "did not see all IDs");
+        }
+
+        // LUCENE-3616
+        [Test]
+        public virtual void TestInvalidFields()
+        {
+            Assert.Throws<System.ArgumentException>(() => { new Field("foo", new MockTokenizer(new StreamReader(File.Open("", FileMode.Open))), StringField.TYPE_STORED); });
+        }
+
+        // LUCENE-3682
+        [Test]
+        public virtual void TestTransitionAPI()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+
+            Documents.Document doc = new Documents.Document();
+#pragma warning disable 612, 618
+            doc.Add(new Field("stored", "abc", Field.Store.YES, Field.Index.NO));
+            doc.Add(new Field("stored_indexed", "abc xyz", Field.Store.YES, Field.Index.NOT_ANALYZED));
+            doc.Add(new Field("stored_tokenized", "abc xyz", Field.Store.YES, Field.Index.ANALYZED));
+            doc.Add(new Field("indexed", "abc xyz", Field.Store.NO, Field.Index.NOT_ANALYZED));
+            doc.Add(new Field("tokenized", "abc xyz", Field.Store.NO, Field.Index.ANALYZED));
+            doc.Add(new Field("tokenized_reader", new StringReader("abc xyz")));
+            doc.Add(new Field("tokenized_tokenstream", w.w.Analyzer.TokenStream("tokenized_tokenstream", new StringReader("abc xyz"))));
+            doc.Add(new Field("binary", new byte[10]));
+            doc.Add(new Field("tv", "abc xyz", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.YES));
+            doc.Add(new Field("tv_pos", "abc xyz", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS));
+            doc.Add(new Field("tv_off", "abc xyz", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_OFFSETS));
+            doc.Add(new Field("tv_pos_off", "abc xyz", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
+#pragma warning restore 612, 618
+            w.AddDocument(doc);
+            IndexReader r = w.Reader;
+            w.Dispose();
+
+            doc = r.Document(0);
+            // 4 stored fields
+            Assert.AreEqual(4, doc.Fields.Count);
+            Assert.AreEqual("abc", doc.Get("stored"));
+            Assert.AreEqual("abc xyz", doc.Get("stored_indexed"));
+            Assert.AreEqual("abc xyz", doc.Get("stored_tokenized"));
+            BytesRef br = doc.GetBinaryValue("binary");
+            Assert.IsNotNull(br);
+            Assert.AreEqual(10, br.Length);
+
+            IndexSearcher s = new IndexSearcher(r);
+            Assert.AreEqual(1, s.Search(new TermQuery(new Term("stored_indexed", "abc xyz")), 1).TotalHits);
+            Assert.AreEqual(1, s.Search(new TermQuery(new Term("stored_tokenized", "abc")), 1).TotalHits);
+            Assert.AreEqual(1, s.Search(new TermQuery(new Term("stored_tokenized", "xyz")), 1).TotalHits);
+            Assert.AreEqual(1, s.Search(new TermQuery(new Term("indexed", "abc xyz")), 1).TotalHits);
+            Assert.AreEqual(1, s.Search(new TermQuery(new Term("tokenized", "abc")), 1).TotalHits);
+            Assert.AreEqual(1, s.Search(new TermQuery(new Term("tokenized", "xyz")), 1).TotalHits);
+            Assert.AreEqual(1, s.Search(new TermQuery(new Term("tokenized_reader", "abc")), 1).TotalHits);
+            Assert.AreEqual(1, s.Search(new TermQuery(new Term("tokenized_reader", "xyz")), 1).TotalHits);
+            Assert.AreEqual(1, s.Search(new TermQuery(new Term("tokenized_tokenstream", "abc")), 1).TotalHits);
+            Assert.AreEqual(1, s.Search(new TermQuery(new Term("tokenized_tokenstream", "xyz")), 1).TotalHits);
+
+            foreach (string field in new string[] { "tv", "tv_pos", "tv_off", "tv_pos_off" })
+            {
+                Fields tvFields = r.GetTermVectors(0);
+                Terms tvs = tvFields.GetTerms(field);
+                Assert.IsNotNull(tvs);
+                Assert.AreEqual(2, tvs.Count);
+                TermsEnum tvsEnum = tvs.GetIterator(null);
+                Assert.AreEqual(new BytesRef("abc"), tvsEnum.Next());
+                DocsAndPositionsEnum dpEnum = tvsEnum.DocsAndPositions(null, null);
+                if (field.Equals("tv"))
+                {
+                    Assert.IsNull(dpEnum);
+                }
+                else
+                {
+                    Assert.IsNotNull(dpEnum);
+                }
+                Assert.AreEqual(new BytesRef("xyz"), tvsEnum.Next());
+                Assert.IsNull(tvsEnum.Next());
+            }
+
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestNumericFieldAsString()
+        {
+            Documents.Document doc = new Documents.Document();
+            doc.Add(new Int32Field("int", 5, Field.Store.YES));
+            Assert.AreEqual("5", doc.Get("int"));
+            Assert.IsNull(doc.Get("somethingElse"));
+            doc.Add(new Int32Field("int", 4, Field.Store.YES));
+            Assert.AreEqual(new string[] { "5", "4" }, doc.GetValues("int"));
+
+            Directory dir = NewDirectory();
+            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            iw.AddDocument(doc);
+            DirectoryReader ir = iw.Reader;
+            Documents.Document sdoc = ir.Document(0);
+            Assert.AreEqual("5", sdoc.Get("int"));
+            Assert.IsNull(sdoc.Get("somethingElse"));
+            Assert.AreEqual(new string[] { "5", "4" }, sdoc.GetValues("int"));
+            ir.Dispose();
+            iw.Dispose();
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Document/TestField.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Document/TestField.cs b/src/Lucene.Net.Tests/Document/TestField.cs
new file mode 100644
index 0000000..f436364
--- /dev/null
+++ b/src/Lucene.Net.Tests/Document/TestField.cs
@@ -0,0 +1,617 @@
+using Lucene.Net.Support;
+using NUnit.Framework;
+using System;
+using System.IO;
+using System.Text;
+
+namespace Lucene.Net.Documents
+{
+    /*
+    * Licensed to the Apache Software Foundation (ASF) under one or more
+    * contributor license agreements.  See the NOTICE file distributed with
+    * this work for additional information regarding copyright ownership.
+    * The ASF licenses this file to You under the Apache License, Version 2.0
+    * (the "License"); you may not use this file except in compliance with
+    * the License.  You may obtain a copy of the License at
+    *
+    *     http://www.apache.org/licenses/LICENSE-2.0
+    *
+    * Unless required by applicable law or agreed to in writing, software
+    * distributed under the License is distributed on an "AS IS" BASIS,
+    * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    * See the License for the specific language governing permissions and
+    * limitations under the License.
+    */
+
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using CannedTokenStream = Lucene.Net.Analysis.CannedTokenStream;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using Token = Lucene.Net.Analysis.Token;
+
+    // sanity check some basics of fields
+    [TestFixture]
+    public class TestField : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestDoubleField()
+        {
+            Field[] fields = new Field[] { new DoubleField("foo", 5d, Field.Store.NO), new DoubleField("foo", 5d, Field.Store.YES) };
+
+            foreach (Field field in fields)
+            {
+                TrySetBoost(field);
+                TrySetByteValue(field);
+                TrySetBytesValue(field);
+                TrySetBytesRefValue(field);
+                field.SetDoubleValue(6d); // ok
+                TrySetIntValue(field);
+                TrySetFloatValue(field);
+                TrySetLongValue(field);
+                TrySetReaderValue(field);
+                TrySetShortValue(field);
+                TrySetStringValue(field);
+                TrySetTokenStreamValue(field);
+
+                Assert.AreEqual(6d, (double)field.GetNumericValue(), 0.0d);
+            }
+        }
+
+        [Test]
+        public virtual void TestDoubleDocValuesField()
+        {
+            DoubleDocValuesField field = new DoubleDocValuesField("foo", 5d);
+
+            TrySetBoost(field);
+            TrySetByteValue(field);
+            TrySetBytesValue(field);
+            TrySetBytesRefValue(field);
+            field.SetDoubleValue(6d); // ok
+            TrySetIntValue(field);
+            TrySetFloatValue(field);
+            TrySetLongValue(field);
+            TrySetReaderValue(field);
+            TrySetShortValue(field);
+            TrySetStringValue(field);
+            TrySetTokenStreamValue(field);
+
+            Assert.AreEqual(6d, BitConverter.Int64BitsToDouble((long)field.GetNumericValue()), 0.0d);
+        }
+
+        [Test]
+        public virtual void TestFloatDocValuesField()
+        {
+            SingleDocValuesField field = new SingleDocValuesField("foo", 5f);
+
+            TrySetBoost(field);
+            TrySetByteValue(field);
+            TrySetBytesValue(field);
+            TrySetBytesRefValue(field);
+            TrySetDoubleValue(field);
+            TrySetIntValue(field);
+            field.SetSingleValue(6f); // ok
+            TrySetLongValue(field);
+            TrySetReaderValue(field);
+            TrySetShortValue(field);
+            TrySetStringValue(field);
+            TrySetTokenStreamValue(field);
+
+            Assert.AreEqual(6f, Number.Int32BitsToSingle(Convert.ToInt32(field.GetNumericValue())), 0.0f);
+        }
+
+        [Test]
+        public virtual void TestFloatField()
+        {
+            Field[] fields = new Field[] { new SingleField("foo", 5f, Field.Store.NO), new SingleField("foo", 5f, Field.Store.YES) };
+
+            foreach (Field field in fields)
+            {
+                TrySetBoost(field);
+                TrySetByteValue(field);
+                TrySetBytesValue(field);
+                TrySetBytesRefValue(field);
+                TrySetDoubleValue(field);
+                TrySetIntValue(field);
+                field.SetSingleValue(6f); // ok
+                TrySetLongValue(field);
+                TrySetReaderValue(field);
+                TrySetShortValue(field);
+                TrySetStringValue(field);
+                TrySetTokenStreamValue(field);
+
+                Assert.AreEqual(6f, (float)field.GetNumericValue(), 0.0f);
+            }
+        }
+
+        [Test]
+        public virtual void TestIntField()
+        {
+            Field[] fields = new Field[] { new Int32Field("foo", 5, Field.Store.NO), new Int32Field("foo", 5, Field.Store.YES) };
+
+            foreach (Field field in fields)
+            {
+                TrySetBoost(field);
+                TrySetByteValue(field);
+                TrySetBytesValue(field);
+                TrySetBytesRefValue(field);
+                TrySetDoubleValue(field);
+                field.SetInt32Value(6); // ok
+                TrySetFloatValue(field);
+                TrySetLongValue(field);
+                TrySetReaderValue(field);
+                TrySetShortValue(field);
+                TrySetStringValue(field);
+                TrySetTokenStreamValue(field);
+
+                Assert.AreEqual(6, (int)field.GetNumericValue());
+            }
+        }
+
+        [Test]
+        public virtual void TestNumericDocValuesField()
+        {
+            NumericDocValuesField field = new NumericDocValuesField("foo", 5L);
+
+            TrySetBoost(field);
+            TrySetByteValue(field);
+            TrySetBytesValue(field);
+            TrySetBytesRefValue(field);
+            TrySetDoubleValue(field);
+            TrySetIntValue(field);
+            TrySetFloatValue(field);
+            field.SetInt64Value(6); // ok
+            TrySetReaderValue(field);
+            TrySetShortValue(field);
+            TrySetStringValue(field);
+            TrySetTokenStreamValue(field);
+
+            Assert.AreEqual(6L, (long)field.GetNumericValue());
+        }
+
+        [Test]
+        public virtual void TestLongField()
+        {
+            Field[] fields = new Field[] { new Int64Field("foo", 5L, Field.Store.NO), new Int64Field("foo", 5L, Field.Store.YES) };
+
+            foreach (Field field in fields)
+            {
+                TrySetBoost(field);
+                TrySetByteValue(field);
+                TrySetBytesValue(field);
+                TrySetBytesRefValue(field);
+                TrySetDoubleValue(field);
+                TrySetIntValue(field);
+                TrySetFloatValue(field);
+                field.SetInt64Value(6); // ok
+                TrySetReaderValue(field);
+                TrySetShortValue(field);
+                TrySetStringValue(field);
+                TrySetTokenStreamValue(field);
+
+                Assert.AreEqual(6L, (long)field.GetNumericValue());
+            }
+        }
+
+        [Test]
+        public virtual void TestSortedBytesDocValuesField()
+        {
+            SortedDocValuesField field = new SortedDocValuesField("foo", new BytesRef("bar"));
+
+            TrySetBoost(field);
+            TrySetByteValue(field);
+            field.SetBytesValue("fubar".ToBytesRefArray(Encoding.UTF8));
+            field.SetBytesValue(new BytesRef("baz"));
+            TrySetDoubleValue(field);
+            TrySetIntValue(field);
+            TrySetFloatValue(field);
+            TrySetLongValue(field);
+            TrySetReaderValue(field);
+            TrySetShortValue(field);
+            TrySetStringValue(field);
+            TrySetTokenStreamValue(field);
+
+            Assert.AreEqual(new BytesRef("baz"), field.GetBinaryValue());
+        }
+
+        [Test]
+        public virtual void TestBinaryDocValuesField()
+        {
+            BinaryDocValuesField field = new BinaryDocValuesField("foo", new BytesRef("bar"));
+
+            TrySetBoost(field);
+            TrySetByteValue(field);
+            field.SetBytesValue("fubar".ToBytesRefArray(Encoding.UTF8));
+            field.SetBytesValue(new BytesRef("baz"));
+            TrySetDoubleValue(field);
+            TrySetIntValue(field);
+            TrySetFloatValue(field);
+            TrySetLongValue(field);
+            TrySetReaderValue(field);
+            TrySetShortValue(field);
+            TrySetStringValue(field);
+            TrySetTokenStreamValue(field);
+
+            Assert.AreEqual(new BytesRef("baz"), field.GetBinaryValue());
+        }
+
+        [Test]
+        public virtual void TestStringField()
+        {
+            Field[] fields = new Field[] { new StringField("foo", "bar", Field.Store.NO), new StringField("foo", "bar", Field.Store.YES) };
+
+            foreach (Field field in fields)
+            {
+                TrySetBoost(field);
+                TrySetByteValue(field);
+                TrySetBytesValue(field);
+                TrySetBytesRefValue(field);
+                TrySetDoubleValue(field);
+                TrySetIntValue(field);
+                TrySetFloatValue(field);
+                TrySetLongValue(field);
+                TrySetReaderValue(field);
+                TrySetShortValue(field);
+                field.SetStringValue("baz");
+                TrySetTokenStreamValue(field);
+
+                Assert.AreEqual("baz", field.GetStringValue());
+            }
+        }
+
+        [Test]
+        public virtual void TestTextFieldString()
+        {
+            Field[] fields = new Field[] { new TextField("foo", "bar", Field.Store.NO), new TextField("foo", "bar", Field.Store.YES) };
+
+            foreach (Field field in fields)
+            {
+                field.Boost = 5f;
+                TrySetByteValue(field);
+                TrySetBytesValue(field);
+                TrySetBytesRefValue(field);
+                TrySetDoubleValue(field);
+                TrySetIntValue(field);
+                TrySetFloatValue(field);
+                TrySetLongValue(field);
+                TrySetReaderValue(field);
+                TrySetShortValue(field);
+                field.SetStringValue("baz");
+                field.SetTokenStream(new CannedTokenStream(new Token("foo", 0, 3)));
+
+                Assert.AreEqual("baz", field.GetStringValue());
+                Assert.AreEqual(5f, field.Boost, 0f);
+            }
+        }
+
+        [Test]
+        public virtual void TestTextFieldReader()
+        {
+            Field field = new TextField("foo", new StringReader("bar"));
+
+            field.Boost = 5f;
+            TrySetByteValue(field);
+            TrySetBytesValue(field);
+            TrySetBytesRefValue(field);
+            TrySetDoubleValue(field);
+            TrySetIntValue(field);
+            TrySetFloatValue(field);
+            TrySetLongValue(field);
+            field.SetReaderValue(new StringReader("foobar"));
+            TrySetShortValue(field);
+            TrySetStringValue(field);
+            field.SetTokenStream(new CannedTokenStream(new Token("foo", 0, 3)));
+
+            Assert.IsNotNull(field.GetReaderValue());
+            Assert.AreEqual(5f, field.Boost, 0f);
+        }
+
+        /* TODO: this is pretty expert and crazy
+         * see if we can fix it up later
+        public void testTextFieldTokenStream() throws Exception {
+        }
+        */
+
+        [Test]
+        public virtual void TestStoredFieldBytes()
+        {
+            Field[] fields = new Field[] { new StoredField("foo", "bar".GetBytes(Encoding.UTF8)), new StoredField("foo", "bar".GetBytes(Encoding.UTF8), 0, 3), new StoredField("foo", new BytesRef("bar")) };
+
+            foreach (Field field in fields)
+            {
+                TrySetBoost(field);
+                TrySetByteValue(field);
+                field.SetBytesValue("baz".ToBytesRefArray(Encoding.UTF8));
+                field.SetBytesValue(new BytesRef("baz"));
+                TrySetDoubleValue(field);
+                TrySetIntValue(field);
+                TrySetFloatValue(field);
+                TrySetLongValue(field);
+                TrySetReaderValue(field);
+                TrySetShortValue(field);
+                TrySetStringValue(field);
+                TrySetTokenStreamValue(field);
+
+                Assert.AreEqual(new BytesRef("baz"), field.GetBinaryValue());
+            }
+        }
+
+        [Test]
+        public virtual void TestStoredFieldString()
+        {
+            Field field = new StoredField("foo", "bar");
+            TrySetBoost(field);
+            TrySetByteValue(field);
+            TrySetBytesValue(field);
+            TrySetBytesRefValue(field);
+            TrySetDoubleValue(field);
+            TrySetIntValue(field);
+            TrySetFloatValue(field);
+            TrySetLongValue(field);
+            TrySetReaderValue(field);
+            TrySetShortValue(field);
+            field.SetStringValue("baz");
+            TrySetTokenStreamValue(field);
+
+            Assert.AreEqual("baz", field.GetStringValue());
+        }
+
+        [Test]
+        public virtual void TestStoredFieldInt()
+        {
+            Field field = new StoredField("foo", 1);
+            TrySetBoost(field);
+            TrySetByteValue(field);
+            TrySetBytesValue(field);
+            TrySetBytesRefValue(field);
+            TrySetDoubleValue(field);
+            field.SetInt32Value(5);
+            TrySetFloatValue(field);
+            TrySetLongValue(field);
+            TrySetReaderValue(field);
+            TrySetShortValue(field);
+            TrySetStringValue(field);
+            TrySetTokenStreamValue(field);
+
+            Assert.AreEqual(5, (int)field.GetNumericValue());
+        }
+
+        [Test]
+        public virtual void TestStoredFieldDouble()
+        {
+            Field field = new StoredField("foo", 1D);
+            TrySetBoost(field);
+            TrySetByteValue(field);
+            TrySetBytesValue(field);
+            TrySetBytesRefValue(field);
+            field.SetDoubleValue(5D);
+            TrySetIntValue(field);
+            TrySetFloatValue(field);
+            TrySetLongValue(field);
+            TrySetReaderValue(field);
+            TrySetShortValue(field);
+            TrySetStringValue(field);
+            TrySetTokenStreamValue(field);
+
+            Assert.AreEqual(5D, (double)field.GetNumericValue(), 0.0D);
+        }
+
+        [Test]
+        public virtual void TestStoredFieldFloat()
+        {
+            Field field = new StoredField("foo", 1F);
+            TrySetBoost(field);
+            TrySetByteValue(field);
+            TrySetBytesValue(field);
+            TrySetBytesRefValue(field);
+            TrySetDoubleValue(field);
+            TrySetIntValue(field);
+            field.SetSingleValue(5f);
+            TrySetLongValue(field);
+            TrySetReaderValue(field);
+            TrySetShortValue(field);
+            TrySetStringValue(field);
+            TrySetTokenStreamValue(field);
+
+            Assert.AreEqual(5f, (float)field.GetNumericValue(), 0.0f);
+        }
+
+        [Test]
+        public virtual void TestStoredFieldLong()
+        {
+            Field field = new StoredField("foo", 1L);
+            TrySetBoost(field);
+            TrySetByteValue(field);
+            TrySetBytesValue(field);
+            TrySetBytesRefValue(field);
+            TrySetDoubleValue(field);
+            TrySetIntValue(field);
+            TrySetFloatValue(field);
+            field.SetInt64Value(5);
+            TrySetReaderValue(field);
+            TrySetShortValue(field);
+            TrySetStringValue(field);
+            TrySetTokenStreamValue(field);
+
+            Assert.AreEqual(5L, (long)field.GetNumericValue());
+        }
+
+        private void TrySetByteValue(Field f)
+        {
+            try
+            {
+                f.SetByteValue((byte)10);
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException expected)
+#pragma warning restore 168
+            {
+                // expected
+            }
+        }
+
+        private void TrySetBytesValue(Field f)
+        {
+            try
+            {
+                f.SetBytesValue(new byte[] { 5, 5 });
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException expected)
+#pragma warning restore 168
+            {
+                // expected
+            }
+        }
+
+        private void TrySetBytesRefValue(Field f)
+        {
+            try
+            {
+                f.SetBytesValue(new BytesRef("bogus"));
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException expected)
+#pragma warning restore 168
+            {
+                // expected
+            }
+        }
+
+        private void TrySetDoubleValue(Field f)
+        {
+            try
+            {
+                f.SetDoubleValue(double.MaxValue);
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException expected)
+#pragma warning restore 168
+            {
+                // expected
+            }
+        }
+
+        private void TrySetIntValue(Field f)
+        {
+            try
+            {
+                f.SetInt32Value(int.MaxValue);
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException expected)
+#pragma warning restore 168
+            {
+                // expected
+            }
+        }
+
+        private void TrySetLongValue(Field f)
+        {
+            try
+            {
+                f.SetInt64Value(long.MaxValue);
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException expected)
+#pragma warning restore 168
+            {
+                // expected
+            }
+        }
+
+        private void TrySetFloatValue(Field f)
+        {
+            try
+            {
+                f.SetSingleValue(float.MaxValue);
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException expected)
+#pragma warning restore 168
+            {
+                // expected
+            }
+        }
+
+        private void TrySetReaderValue(Field f)
+        {
+            try
+            {
+                f.SetReaderValue(new StringReader("BOO!"));
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException expected)
+#pragma warning restore 168
+            {
+                // expected
+            }
+        }
+
+        private void TrySetShortValue(Field f)
+        {
+            try
+            {
+                f.SetInt16Value(short.MaxValue);
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException expected)
+#pragma warning restore 168
+            {
+                // expected
+            }
+        }
+
+        private void TrySetStringValue(Field f)
+        {
+            try
+            {
+                f.SetStringValue("BOO!");
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException expected)
+#pragma warning restore 168
+            {
+                // expected
+            }
+        }
+
+        private void TrySetTokenStreamValue(Field f)
+        {
+            try
+            {
+                f.SetTokenStream(new CannedTokenStream(new Token("foo", 0, 3)));
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException expected)
+#pragma warning restore 168
+            {
+                // expected
+            }
+        }
+
+        private void TrySetBoost(Field f)
+        {
+            try
+            {
+                f.Boost = 5.0f;
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException expected)
+#pragma warning restore 168
+            {
+                // expected
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/BinaryTokenStream.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/BinaryTokenStream.cs b/src/Lucene.Net.Tests/Index/BinaryTokenStream.cs
new file mode 100644
index 0000000..cc1ead2
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/BinaryTokenStream.cs
@@ -0,0 +1,101 @@
+using Lucene.Net.Analysis.TokenAttributes;
+
+namespace Lucene.Net.Index
+{
+    using Attribute = Lucene.Net.Util.Attribute;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using IAttribute = Lucene.Net.Util.IAttribute;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using TokenStream = Lucene.Net.Analysis.TokenStream;
+
+    // javadocs
+
+    /// <summary>
+    /// A binary tokenstream that lets you index a single
+    /// binary token (BytesRef value).
+    /// </summary>
+    /// <seealso> cref= CannedBinaryTokenStream </seealso>
+    public sealed class BinaryTokenStream : TokenStream
+    {
+        private readonly IByteTermAttribute BytesAtt;// = addAttribute(typeof(ByteTermAttribute));
+        private readonly BytesRef Bytes;
+        private bool Available = true;
+
+        public BinaryTokenStream(BytesRef bytes)
+        {
+            this.Bytes = bytes;
+            BytesAtt = AddAttribute<IByteTermAttribute>();
+        }
+
+        public override bool IncrementToken()
+        {
+            if (Available)
+            {
+                ClearAttributes();
+                Available = false;
+                BytesAtt.BytesRef = Bytes;
+                return true;
+            }
+            return false;
+        }
+
+        public override void Reset()
+        {
+            Available = true;
+        }
+
+        public interface IByteTermAttribute : ITermToBytesRefAttribute
+        {
+            new BytesRef BytesRef { get; set; }
+        }
+
+        public class ByteTermAttribute : Attribute, IByteTermAttribute
+        {
+            internal BytesRef Bytes;
+
+            public void FillBytesRef()
+            {
+                // no-op: the bytes was already filled by our owner's incrementToken
+            }
+
+            public BytesRef BytesRef
+            {
+                get
+                {
+                    return Bytes;
+                }
+                set
+                {
+                    this.Bytes = value;
+                }
+            }
+
+            public override void Clear()
+            {
+            }
+
+            public override void CopyTo(IAttribute target)
+            {
+                ByteTermAttribute other = (ByteTermAttribute)target;
+                other.Bytes = Bytes;
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/SynchronizedList.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/SynchronizedList.cs b/src/Lucene.Net.Tests/Index/SynchronizedList.cs
new file mode 100644
index 0000000..000620d
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/SynchronizedList.cs
@@ -0,0 +1,168 @@
+\ufeffusing System;
+using System.Collections;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Threading;
+using System.Threading.Tasks;
+
+namespace Lucene.Net.Index
+{
+
+    internal class SynchronizedList<T> : IList<T>
+    {
+        private readonly List<T> _list = new List<T>();
+
+        private readonly ReaderWriterLockSlim _lock = new ReaderWriterLockSlim();
+
+        public T this[int index]
+        {
+            get { return _list[index]; }
+            set
+            {
+                _list[index] = value;
+            }
+        }
+
+        public int Count
+        {
+            get { return _list.Count; }
+        }
+
+        public bool IsReadOnly
+        {
+            get { return false; }
+        }
+
+        public void Add(T item)
+        {
+            _lock.EnterWriteLock();
+
+            try
+            {
+                _list.Add(item);
+            }
+            finally
+            {
+                _lock.ExitWriteLock();
+            }
+        }
+
+        public void Clear()
+        {
+            _lock.EnterWriteLock();
+
+            try
+            {
+                _list.Clear();
+            }
+            finally
+            {
+                _lock.ExitWriteLock();
+            }
+        }
+
+        public bool Contains(T item)
+        {
+            _lock.EnterReadLock();
+
+            try
+            {
+                return _list.Contains(item);
+            }
+            finally
+            {
+                _lock.ExitReadLock();
+            }
+        }
+
+        public void CopyTo(T[] array, int arrayIndex)
+        {
+            _lock.EnterWriteLock();
+
+            try
+            {
+                _list.CopyTo(array, arrayIndex);
+            }
+            finally
+            {
+                _lock.ExitWriteLock();
+            }
+        }
+
+        public IEnumerator<T> GetEnumerator()
+        {
+            _lock.EnterReadLock();
+
+            try
+            {
+                return _list.GetEnumerator();
+            }
+            finally
+            {
+                _lock.ExitReadLock();
+            }
+        }
+
+        public int IndexOf(T item)
+        {
+            _lock.EnterReadLock();
+
+            try
+            {
+                return _list.IndexOf(item);
+            }
+            finally
+            {
+                _lock.ExitReadLock();
+            }
+        }
+
+        public void Insert(int index, T item)
+        {
+            _lock.EnterWriteLock();
+
+            try
+            {
+                _list.Insert(index, item);
+            }
+            finally
+            {
+                _lock.ExitWriteLock();
+            }
+        }
+
+        public bool Remove(T item)
+        {
+            _lock.EnterWriteLock();
+
+            try
+            {
+                return _list.Remove(item);
+            }
+            finally
+            {
+                _lock.ExitWriteLock();
+            }
+        }
+
+        public void RemoveAt(int index)
+        {
+            _lock.EnterWriteLock();
+
+            try
+            {
+                _list.RemoveAt(index);
+            }
+            finally
+            {
+                _lock.ExitWriteLock();
+            }
+        }
+
+        IEnumerator IEnumerable.GetEnumerator()
+        {
+            return _list.GetEnumerator();
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/Test2BBinaryDocValues.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/Test2BBinaryDocValues.cs b/src/Lucene.Net.Tests/Index/Test2BBinaryDocValues.cs
new file mode 100644
index 0000000..e7bc021
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/Test2BBinaryDocValues.cs
@@ -0,0 +1,171 @@
+using Lucene.Net.Documents;
+using NUnit.Framework;
+using System;
+
+namespace Lucene.Net.Index
+{
+    /*
+    * Licensed to the Apache Software Foundation (ASF) under one or more
+    * contributor license agreements.  See the NOTICE file distributed with
+    * this work for additional information regarding copyright ownership.
+    * The ASF licenses this file to You under the Apache License, Version 2.0
+    * (the "License"); you may not use this file except in compliance with
+    * the License.  You may obtain a copy of the License at
+    *
+    *     http://www.apache.org/licenses/LICENSE-2.0
+    *
+    * Unless required by applicable law or agreed to in writing, software
+    * distributed under the License is distributed on an "AS IS" BASIS,
+    * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    * See the License for the specific language governing permissions and
+    * limitations under the License.
+    */
+
+    using BaseDirectoryWrapper = Lucene.Net.Store.BaseDirectoryWrapper;
+    using BinaryDocValuesField = BinaryDocValuesField;
+    using ByteArrayDataInput = Lucene.Net.Store.ByteArrayDataInput;
+    using ByteArrayDataOutput = Lucene.Net.Store.ByteArrayDataOutput;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Document = Documents.Document;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+
+    [SuppressCodecs("Lucene3x")]
+    [Ignore("takes ~ 45 minutes")]
+    [TestFixture]
+    public class Test2BBinaryDocValues : LuceneTestCase
+    {
+        // indexes Integer.MAX_VALUE docs with a fixed binary field
+        [Test]
+        public virtual void TestFixedBinary([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            BaseDirectoryWrapper dir = NewFSDirectory(CreateTempDir("2BFixedBinary"));
+            if (dir is MockDirectoryWrapper)
+            {
+                ((MockDirectoryWrapper)dir).Throttling = MockDirectoryWrapper.Throttling_e.NEVER;
+            }
+            var config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))
+                            .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
+                            .SetRAMBufferSizeMB(256.0)
+                            .SetMergeScheduler(scheduler)
+                            .SetMergePolicy(NewLogMergePolicy(false, 10))
+                            .SetOpenMode(OpenMode.CREATE);
+            IndexWriter w = new IndexWriter(dir, config);
+
+            Document doc = new Document();
+            var bytes = new byte[4];
+            BytesRef data = new BytesRef(bytes);
+            BinaryDocValuesField dvField = new BinaryDocValuesField("dv", data);
+            doc.Add(dvField);
+
+            for (int i = 0; i < int.MaxValue; i++)
+            {
+                bytes[0] = (byte)(i >> 24);
+                bytes[1] = (byte)(i >> 16);
+                bytes[2] = (byte)(i >> 8);
+                bytes[3] = (byte)i;
+                w.AddDocument(doc);
+                if (i % 100000 == 0)
+                {
+                    Console.WriteLine("indexed: " + i);
+                    Console.Out.Flush();
+                }
+            }
+
+            w.ForceMerge(1);
+            w.Dispose();
+
+            Console.WriteLine("verifying...");
+            Console.Out.Flush();
+
+            DirectoryReader r = DirectoryReader.Open(dir);
+            int expectedValue = 0;
+            foreach (AtomicReaderContext context in r.Leaves)
+            {
+                AtomicReader reader = context.AtomicReader;
+                BytesRef scratch = new BytesRef();
+                BinaryDocValues dv = reader.GetBinaryDocValues("dv");
+                for (int i = 0; i < reader.MaxDoc; i++)
+                {
+                    bytes[0] = (byte)(expectedValue >> 24);
+                    bytes[1] = (byte)(expectedValue >> 16);
+                    bytes[2] = (byte)(expectedValue >> 8);
+                    bytes[3] = (byte)expectedValue;
+                    dv.Get(i, scratch);
+                    Assert.AreEqual(data, scratch);
+                    expectedValue++;
+                }
+            }
+
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        // indexes Integer.MAX_VALUE docs with a variable binary field
+        [Test]
+        public virtual void TestVariableBinary([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            BaseDirectoryWrapper dir = NewFSDirectory(CreateTempDir("2BVariableBinary"));
+            if (dir is MockDirectoryWrapper)
+            {
+                ((MockDirectoryWrapper)dir).Throttling = MockDirectoryWrapper.Throttling_e.NEVER;
+            }
+
+            var config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))
+                            .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
+                            .SetRAMBufferSizeMB(256.0)
+                            .SetMergeScheduler(scheduler)
+                            .SetMergePolicy(NewLogMergePolicy(false, 10))
+                            .SetOpenMode(OpenMode.CREATE);
+            IndexWriter w = new IndexWriter(dir, config);
+
+            Document doc = new Document();
+            var bytes = new byte[4];
+            ByteArrayDataOutput encoder = new ByteArrayDataOutput(bytes);
+            BytesRef data = new BytesRef(bytes);
+            BinaryDocValuesField dvField = new BinaryDocValuesField("dv", data);
+            doc.Add(dvField);
+
+            for (int i = 0; i < int.MaxValue; i++)
+            {
+                encoder.Reset(bytes);
+                encoder.WriteVInt32(i % 65535); // 1, 2, or 3 bytes
+                data.Length = encoder.Position;
+                w.AddDocument(doc);
+                if (i % 100000 == 0)
+                {
+                    Console.WriteLine("indexed: " + i);
+                    Console.Out.Flush();
+                }
+            }
+
+            w.ForceMerge(1);
+            w.Dispose();
+
+            Console.WriteLine("verifying...");
+            Console.Out.Flush();
+
+            DirectoryReader r = DirectoryReader.Open(dir);
+            int expectedValue = 0;
+            ByteArrayDataInput input = new ByteArrayDataInput();
+            foreach (AtomicReaderContext context in r.Leaves)
+            {
+                AtomicReader reader = context.AtomicReader;
+                BytesRef scratch = new BytesRef(bytes);
+                BinaryDocValues dv = reader.GetBinaryDocValues("dv");
+                for (int i = 0; i < reader.MaxDoc; i++)
+                {
+                    dv.Get(i, scratch);
+                    input.Reset((byte[])(Array)scratch.Bytes, scratch.Offset, scratch.Length);
+                    Assert.AreEqual(expectedValue % 65535, input.ReadVInt32());
+                    Assert.IsTrue(input.Eof);
+                    expectedValue++;
+                }
+            }
+
+            r.Dispose();
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/Test2BDocs.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/Test2BDocs.cs b/src/Lucene.Net.Tests/Index/Test2BDocs.cs
new file mode 100644
index 0000000..35963e5
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/Test2BDocs.cs
@@ -0,0 +1,105 @@
+using Lucene.Net.Support;
+using NUnit.Framework;
+
+namespace Lucene.Net.Index
+{
+    /*
+    * Licensed to the Apache Software Foundation (ASF) under one or more
+    * contributor license agreements.  See the NOTICE file distributed with
+    * this work for additional information regarding copyright ownership.
+    * The ASF licenses this file to You under the Apache License, Version 2.0
+    * (the "License"); you may not use this file except in compliance with
+    * the License.  You may obtain a copy of the License at
+    *
+    *     http://www.apache.org/licenses/LICENSE-2.0
+    *
+    * Unless required by applicable law or agreed to in writing, software
+    * distributed under the License is distributed on an "AS IS" BASIS,
+    * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    * See the License for the specific language governing permissions and
+    * limitations under the License.
+    */
+
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    [TestFixture]
+    public class Test2BDocs : LuceneTestCase
+    {
+        internal static Directory Dir;
+
+        [OneTimeSetUp]
+        public static void BeforeClass()
+        {
+            Dir = NewFSDirectory(CreateTempDir("2Bdocs"));
+            IndexWriter iw = new IndexWriter(Dir, new IndexWriterConfig(TEST_VERSION_CURRENT, null));
+            Document doc = new Document();
+            for (int i = 0; i < 262144; i++)
+            {
+                iw.AddDocument(doc);
+            }
+            iw.ForceMerge(1);
+            iw.Dispose();
+        }
+
+        [OneTimeTearDown]
+        public void AfterClass()
+        {
+            Dir.Dispose();
+            Dir = null;
+            base.TearDown();
+        }
+
+        public override void TearDown()
+        {
+            // LUCENENET: We don't want our temp directory deleted until after
+            // all of the tests in the class run. So we need to override this and
+            // call base.TearDown() manually during TestFixtureTearDown
+        }
+
+        [Test]
+        public virtual void TestOverflow()
+        {
+            DirectoryReader ir = DirectoryReader.Open(Dir);
+            IndexReader[] subReaders = new IndexReader[8192];
+            Arrays.Fill(subReaders, ir);
+            try
+            {
+                new MultiReader(subReaders);
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException expected)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            ir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestExactlyAtLimit()
+        {
+            Directory dir2 = NewFSDirectory(CreateTempDir("2BDocs2"));
+            IndexWriter iw = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, null));
+            Document doc = new Document();
+            for (int i = 0; i < 262143; i++)
+            {
+                iw.AddDocument(doc);
+            }
+            iw.Dispose();
+            DirectoryReader ir = DirectoryReader.Open(Dir);
+            DirectoryReader ir2 = DirectoryReader.Open(dir2);
+            IndexReader[] subReaders = new IndexReader[8192];
+            Arrays.Fill(subReaders, ir);
+            subReaders[subReaders.Length - 1] = ir2;
+            MultiReader mr = new MultiReader(subReaders);
+            Assert.AreEqual(int.MaxValue, mr.MaxDoc);
+            Assert.AreEqual(int.MaxValue, mr.NumDocs);
+            ir.Dispose();
+            ir2.Dispose();
+            dir2.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/Test2BNumericDocValues.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/Test2BNumericDocValues.cs b/src/Lucene.Net.Tests/Index/Test2BNumericDocValues.cs
new file mode 100644
index 0000000..7c37423
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/Test2BNumericDocValues.cs
@@ -0,0 +1,89 @@
+using Lucene.Net.Attributes;
+using Lucene.Net.Documents;
+using NUnit.Framework;
+using System;
+
+namespace Lucene.Net.Index
+{
+    /*
+    * Licensed to the Apache Software Foundation (ASF) under one or more
+    * contributor license agreements.  See the NOTICE file distributed with
+    * this work for additional information regarding copyright ownership.
+    * The ASF licenses this file to You under the Apache License, Version 2.0
+    * (the "License"); you may not use this file except in compliance with
+    * the License.  You may obtain a copy of the License at
+    *
+    *     http://www.apache.org/licenses/LICENSE-2.0
+    *
+    * Unless required by applicable law or agreed to in writing, software
+    * distributed under the License is distributed on an "AS IS" BASIS,
+    * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    * See the License for the specific language governing permissions and
+    * limitations under the License.
+    */
+
+    using BaseDirectoryWrapper = Lucene.Net.Store.BaseDirectoryWrapper;
+    using Document = Documents.Document;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+    using NumericDocValuesField = NumericDocValuesField;
+
+    //@TimeoutSuite(millis = 80 * TimeUnits.HOUR) @Ignore("takes ~ 30 minutes") @SuppressCodecs("Lucene3x") public class Test2BNumericDocValues extends Lucene.Net.Util.LuceneTestCase
+    [SuppressCodecs("Lucene3x")]
+    [Ignore("takes ~ 30 minutes")]
+    [TestFixture]
+    public class Test2BNumericDocValues : LuceneTestCase
+    {
+        // indexes Integer.MAX_VALUE docs with an increasing dv field
+        [Test, LongRunningTest]
+        public virtual void TestNumerics([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            BaseDirectoryWrapper dir = NewFSDirectory(CreateTempDir("2BNumerics"));
+            if (dir is MockDirectoryWrapper)
+            {
+                ((MockDirectoryWrapper)dir).Throttling = MockDirectoryWrapper.Throttling_e.NEVER;
+            }
+
+            IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))
+           .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH).SetRAMBufferSizeMB(256.0).SetMergeScheduler(scheduler).SetMergePolicy(NewLogMergePolicy(false, 10)).SetOpenMode(OpenMode.CREATE));
+
+            Document doc = new Document();
+            NumericDocValuesField dvField = new NumericDocValuesField("dv", 0);
+            doc.Add(dvField);
+
+            for (int i = 0; i < int.MaxValue; i++)
+            {
+                dvField.SetInt64Value(i);
+                w.AddDocument(doc);
+                if (i % 100000 == 0)
+                {
+                    Console.WriteLine("indexed: " + i);
+                    Console.Out.Flush();
+                }
+            }
+
+            w.ForceMerge(1);
+            w.Dispose();
+
+            Console.WriteLine("verifying...");
+            Console.Out.Flush();
+
+            DirectoryReader r = DirectoryReader.Open(dir);
+            long expectedValue = 0;
+            foreach (AtomicReaderContext context in r.Leaves)
+            {
+                AtomicReader reader = context.AtomicReader;
+                NumericDocValues dv = reader.GetNumericDocValues("dv");
+                for (int i = 0; i < reader.MaxDoc; i++)
+                {
+                    Assert.AreEqual(expectedValue, dv.Get(i));
+                    expectedValue++;
+                }
+            }
+
+            r.Dispose();
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/Test2BPositions.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/Test2BPositions.cs b/src/Lucene.Net.Tests/Index/Test2BPositions.cs
new file mode 100644
index 0000000..d004779
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/Test2BPositions.cs
@@ -0,0 +1,123 @@
+using Lucene.Net.Analysis.TokenAttributes;
+using Lucene.Net.Documents;
+using NUnit.Framework;
+using System;
+
+namespace Lucene.Net.Index
+{
+    using BaseDirectoryWrapper = Lucene.Net.Store.BaseDirectoryWrapper;
+    using CharTermAttribute = Lucene.Net.Analysis.TokenAttributes.CharTermAttribute;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldType = FieldType;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+    using PositionIncrementAttribute = Lucene.Net.Analysis.TokenAttributes.PositionIncrementAttribute;
+    using TextField = TextField;
+    using TokenStream = Lucene.Net.Analysis.TokenStream;
+
+    /*using Ignore = org.junit.Ignore;
+
+    using TimeoutSuite = com.carrotsearch.randomizedtesting.annotations.TimeoutSuite;*/
+
+    /// <summary>
+    /// Test indexes ~82M docs with 52 positions each, so you get > Integer.MAX_VALUE positions
+    /// @lucene.experimental
+    /// </summary>
+    [SuppressCodecs("SimpleText", "Memory", "Direct")]
+    [TestFixture]
+    public class Test2BPositions : LuceneTestCase
+    // uses lots of space and takes a few minutes
+    {
+        [Ignore("Very slow. Enable manually by removing Ignore.")]
+        [Test]
+        public virtual void Test([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            BaseDirectoryWrapper dir = NewFSDirectory(CreateTempDir("2BPositions"));
+            if (dir is MockDirectoryWrapper)
+            {
+                ((MockDirectoryWrapper)dir).Throttling = MockDirectoryWrapper.Throttling_e.NEVER;
+            }
+
+            IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))
+           .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH).SetRAMBufferSizeMB(256.0).SetMergeScheduler(scheduler).SetMergePolicy(NewLogMergePolicy(false, 10)).SetOpenMode(OpenMode.CREATE));
+
+            MergePolicy mp = w.Config.MergePolicy;
+            if (mp is LogByteSizeMergePolicy)
+            {
+                // 1 petabyte:
+                ((LogByteSizeMergePolicy)mp).MaxMergeMB = 1024 * 1024 * 1024;
+            }
+
+            Document doc = new Document();
+            FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
+            ft.OmitNorms = true;
+            Field field = new Field("field", new MyTokenStream(), ft);
+            doc.Add(field);
+
+            int numDocs = (int.MaxValue / 26) + 1;
+            for (int i = 0; i < numDocs; i++)
+            {
+                w.AddDocument(doc);
+                if (VERBOSE && i % 100000 == 0)
+                {
+                    Console.WriteLine(i + " of " + numDocs + "...");
+                }
+            }
+            w.ForceMerge(1);
+            w.Dispose();
+            dir.Dispose();
+        }
+
+        public sealed class MyTokenStream : TokenStream
+        {
+            internal readonly ICharTermAttribute TermAtt;
+            internal readonly IPositionIncrementAttribute PosIncAtt;
+            internal int Index;
+
+            public MyTokenStream()
+            {
+                TermAtt = AddAttribute<ICharTermAttribute>();
+                PosIncAtt = AddAttribute<IPositionIncrementAttribute>();
+            }
+
+            public override bool IncrementToken()
+            {
+                if (Index < 52)
+                {
+                    ClearAttributes();
+                    TermAtt.Length = 1;
+                    TermAtt.Buffer[0] = 'a';
+                    PosIncAtt.PositionIncrement = 1 + Index;
+                    Index++;
+                    return true;
+                }
+                return false;
+            }
+
+            public override void Reset()
+            {
+                Index = 0;
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/Test2BPostings.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/Test2BPostings.cs b/src/Lucene.Net.Tests/Index/Test2BPostings.cs
new file mode 100644
index 0000000..2b79afd
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/Test2BPostings.cs
@@ -0,0 +1,125 @@
+using Lucene.Net.Analysis.TokenAttributes;
+using Lucene.Net.Attributes;
+using Lucene.Net.Documents;
+using NUnit.Framework;
+using System;
+
+namespace Lucene.Net.Index
+{
+    using BaseDirectoryWrapper = Lucene.Net.Store.BaseDirectoryWrapper;
+    using CharTermAttribute = Lucene.Net.Analysis.TokenAttributes.CharTermAttribute;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldType = FieldType;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+    using TextField = TextField;
+    using TokenStream = Lucene.Net.Analysis.TokenStream;
+
+    /// <summary>
+    /// Test indexes ~82M docs with 26 terms each, so you get > Integer.MAX_VALUE terms/docs pairs
+    /// @lucene.experimental
+    /// </summary>
+    [SuppressCodecs("SimpleText", "Memory", "Direct", "Compressing")]
+    [TestFixture]
+    public class Test2BPostings : LuceneTestCase
+    {
+        [Ignore("Very slow. Enable manually by removing Ignore.")]
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(int.MaxValue)]
+#endif
+        [Test, LongRunningTest, HasTimeout]
+        public virtual void Test([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            BaseDirectoryWrapper dir = NewFSDirectory(CreateTempDir("2BPostings"));
+            if (dir is MockDirectoryWrapper)
+            {
+                ((MockDirectoryWrapper)dir).Throttling = MockDirectoryWrapper.Throttling_e.NEVER;
+            }
+
+            var config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))
+                            .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
+                            .SetRAMBufferSizeMB(256.0)
+                            .SetMergeScheduler(scheduler)
+                            .SetMergePolicy(NewLogMergePolicy(false, 10))
+                            .SetOpenMode(OpenMode.CREATE);
+
+            IndexWriter w = new IndexWriter(dir, config);
+
+            MergePolicy mp = w.Config.MergePolicy;
+            if (mp is LogByteSizeMergePolicy)
+            {
+                // 1 petabyte:
+                ((LogByteSizeMergePolicy)mp).MaxMergeMB = 1024 * 1024 * 1024;
+            }
+
+            Document doc = new Document();
+            FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
+            ft.OmitNorms = true;
+            ft.IndexOptions = IndexOptions.DOCS_ONLY;
+            Field field = new Field("field", new MyTokenStream(), ft);
+            doc.Add(field);
+
+            int numDocs = (int.MaxValue / 26) + 1;
+            for (int i = 0; i < numDocs; i++)
+            {
+                w.AddDocument(doc);
+                if (VERBOSE && i % 100000 == 0)
+                {
+                    Console.WriteLine(i + " of " + numDocs + "...");
+                }
+            }
+            w.ForceMerge(1);
+            w.Dispose();
+            dir.Dispose();
+        }
+
+        public sealed class MyTokenStream : TokenStream
+        {
+            internal readonly ICharTermAttribute TermAtt;
+            internal int Index;
+
+            public MyTokenStream()
+            {
+                TermAtt = AddAttribute<ICharTermAttribute>();
+            }
+
+            public override bool IncrementToken()
+            {
+                if (Index <= 'z')
+                {
+                    ClearAttributes();
+                    TermAtt.Length = 1;
+                    TermAtt.Buffer[0] = (char)Index++;
+                    return true;
+                }
+                return false;
+            }
+
+            public override void Reset()
+            {
+                Index = 'a';
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/Test2BPostingsBytes.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/Test2BPostingsBytes.cs b/src/Lucene.Net.Tests/Index/Test2BPostingsBytes.cs
new file mode 100644
index 0000000..42f9329
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/Test2BPostingsBytes.cs
@@ -0,0 +1,167 @@
+using Lucene.Net.Analysis.TokenAttributes;
+using Lucene.Net.Documents;
+using NUnit.Framework;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Support;
+    using BaseDirectoryWrapper = Lucene.Net.Store.BaseDirectoryWrapper;
+    using CharTermAttribute = Lucene.Net.Analysis.TokenAttributes.CharTermAttribute;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldType = FieldType;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+    using TextField = TextField;
+    using TokenStream = Lucene.Net.Analysis.TokenStream;
+
+    /*using Ignore = org.junit.Ignore;
+
+    using TimeoutSuite = com.carrotsearch.randomizedtesting.annotations.TimeoutSuite;*/
+
+    /// <summary>
+    /// Test indexes 2B docs with 65k freqs each,
+    /// so you get > Integer.MAX_VALUE postings data for the term
+    /// @lucene.experimental
+    /// </summary>
+    [SuppressCodecs("SimpleText", "Memory", "Direct", "Lucene3x")]
+    [TestFixture]
+    public class Test2BPostingsBytes : LuceneTestCase
+    // disable Lucene3x: older lucene formats always had this issue.
+    // @Absurd @Ignore takes ~20GB-30GB of space and 10 minutes.
+    // with some codecs needs more heap space as well.
+    {
+        [Ignore("Very slow. Enable manually by removing Ignore.")]
+        [Test]
+        public virtual void Test([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            BaseDirectoryWrapper dir = NewFSDirectory(CreateTempDir("2BPostingsBytes1"));
+            if (dir is MockDirectoryWrapper)
+            {
+                ((MockDirectoryWrapper)dir).Throttling = MockDirectoryWrapper.Throttling_e.NEVER;
+            }
+
+            var config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))
+                            .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
+                            .SetRAMBufferSizeMB(256.0)
+                            .SetMergeScheduler(scheduler)
+                            .SetMergePolicy(NewLogMergePolicy(false, 10))
+                            .SetOpenMode(OpenMode.CREATE);
+            IndexWriter w = new IndexWriter(dir, config);
+
+            MergePolicy mp = w.Config.MergePolicy;
+            if (mp is LogByteSizeMergePolicy)
+            {
+                // 1 petabyte:
+                ((LogByteSizeMergePolicy)mp).MaxMergeMB = 1024 * 1024 * 1024;
+            }
+
+            Document doc = new Document();
+            FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
+            ft.IndexOptions = IndexOptions.DOCS_AND_FREQS;
+            ft.OmitNorms = true;
+            MyTokenStream tokenStream = new MyTokenStream();
+            Field field = new Field("field", tokenStream, ft);
+            doc.Add(field);
+
+            const int numDocs = 1000;
+            for (int i = 0; i < numDocs; i++)
+            {
+                if (i % 2 == 1) // trick blockPF's little optimization
+                {
+                    tokenStream.n = 65536;
+                }
+                else
+                {
+                    tokenStream.n = 65537;
+                }
+                w.AddDocument(doc);
+            }
+            w.ForceMerge(1);
+            w.Dispose();
+
+            DirectoryReader oneThousand = DirectoryReader.Open(dir);
+            IndexReader[] subReaders = new IndexReader[1000];
+            Arrays.Fill(subReaders, oneThousand);
+            MultiReader mr = new MultiReader(subReaders);
+            BaseDirectoryWrapper dir2 = NewFSDirectory(CreateTempDir("2BPostingsBytes2"));
+            if (dir2 is MockDirectoryWrapper)
+            {
+                ((MockDirectoryWrapper)dir2).Throttling = MockDirectoryWrapper.Throttling_e.NEVER;
+            }
+            IndexWriter w2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, null));
+            w2.AddIndexes(mr);
+            w2.ForceMerge(1);
+            w2.Dispose();
+            oneThousand.Dispose();
+
+            DirectoryReader oneMillion = DirectoryReader.Open(dir2);
+            subReaders = new IndexReader[2000];
+            Arrays.Fill(subReaders, oneMillion);
+            mr = new MultiReader(subReaders);
+            BaseDirectoryWrapper dir3 = NewFSDirectory(CreateTempDir("2BPostingsBytes3"));
+            if (dir3 is MockDirectoryWrapper)
+            {
+                ((MockDirectoryWrapper)dir3).Throttling = MockDirectoryWrapper.Throttling_e.NEVER;
+            }
+            IndexWriter w3 = new IndexWriter(dir3, new IndexWriterConfig(TEST_VERSION_CURRENT, null));
+            w3.AddIndexes(mr);
+            w3.ForceMerge(1);
+            w3.Dispose();
+            oneMillion.Dispose();
+
+            dir.Dispose();
+            dir2.Dispose();
+            dir3.Dispose();
+        }
+
+        public sealed class MyTokenStream : TokenStream
+        {
+            internal readonly ICharTermAttribute TermAtt;
+            internal int Index;
+            internal int n;
+
+            public MyTokenStream()
+            {
+                TermAtt = AddAttribute<ICharTermAttribute>();
+            }
+
+            public override bool IncrementToken()
+            {
+                if (Index < n)
+                {
+                    ClearAttributes();
+                    TermAtt.Buffer[0] = 'a';
+                    TermAtt.Length = 1;
+                    Index++;
+                    return true;
+                }
+                return false;
+            }
+
+            public override void Reset()
+            {
+                Index = 0;
+            }
+        }
+    }
+}
\ No newline at end of file


[08/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestCachingWrapperFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestCachingWrapperFilter.cs b/src/Lucene.Net.Tests/Search/TestCachingWrapperFilter.cs
new file mode 100644
index 0000000..2bbb21c
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestCachingWrapperFilter.cs
@@ -0,0 +1,513 @@
+using System;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using Lucene.Net.Index;
+    using NUnit.Framework;
+    using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
+    using IBits = Lucene.Net.Util.IBits;
+    using Directory = Lucene.Net.Store.Directory;
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FixedBitSet = Lucene.Net.Util.FixedBitSet;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using IOUtils = Lucene.Net.Util.IOUtils;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using SerialMergeScheduler = Lucene.Net.Index.SerialMergeScheduler;
+    using SlowCompositeReaderWrapper = Lucene.Net.Index.SlowCompositeReaderWrapper;
+    using StringField = StringField;
+    using Term = Lucene.Net.Index.Term;
+
+    [TestFixture]
+    public class TestCachingWrapperFilter : LuceneTestCase
+    {
+        internal Directory Dir;
+        internal DirectoryReader Ir;
+        internal IndexSearcher @is;
+        internal RandomIndexWriter Iw;
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Dir = NewDirectory();
+            Iw = new RandomIndexWriter(Random(), Dir, Similarity, TimeZone);
+            Document doc = new Document();
+            Field idField = new StringField("id", "", Field.Store.NO);
+            doc.Add(idField);
+            // add 500 docs with id 0..499
+            for (int i = 0; i < 500; i++)
+            {
+                idField.SetStringValue(Convert.ToString(i));
+                Iw.AddDocument(doc);
+            }
+            // delete 20 of them
+            for (int i = 0; i < 20; i++)
+            {
+                Iw.DeleteDocuments(new Term("id", Convert.ToString(Random().Next(Iw.MaxDoc))));
+            }
+            Ir = Iw.Reader;
+            @is = NewSearcher(Ir);
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            IOUtils.Close(Iw, Ir, Dir);
+            base.TearDown();
+        }
+
+        private void AssertFilterEquals(Filter f1, Filter f2)
+        {
+            Query query = new MatchAllDocsQuery();
+            TopDocs hits1 = @is.Search(query, f1, Ir.MaxDoc);
+            TopDocs hits2 = @is.Search(query, f2, Ir.MaxDoc);
+            Assert.AreEqual(hits1.TotalHits, hits2.TotalHits);
+            CheckHits.CheckEqual(query, hits1.ScoreDocs, hits2.ScoreDocs);
+            // now do it again to confirm caching works
+            TopDocs hits3 = @is.Search(query, f1, Ir.MaxDoc);
+            TopDocs hits4 = @is.Search(query, f2, Ir.MaxDoc);
+            Assert.AreEqual(hits3.TotalHits, hits4.TotalHits);
+            CheckHits.CheckEqual(query, hits3.ScoreDocs, hits4.ScoreDocs);
+        }
+
+        /// <summary>
+        /// test null iterator </summary>
+        [Test]
+        public virtual void TestEmpty()
+        {
+            Query query = new BooleanQuery();
+            Filter expected = new QueryWrapperFilter(query);
+            Filter actual = new CachingWrapperFilter(expected);
+            AssertFilterEquals(expected, actual);
+        }
+
+        /// <summary>
+        /// test iterator returns NO_MORE_DOCS </summary>
+        [Test]
+        public virtual void TestEmpty2()
+        {
+            BooleanQuery query = new BooleanQuery();
+            query.Add(new TermQuery(new Term("id", "0")), Occur.MUST);
+            query.Add(new TermQuery(new Term("id", "0")), Occur.MUST_NOT);
+            Filter expected = new QueryWrapperFilter(query);
+            Filter actual = new CachingWrapperFilter(expected);
+            AssertFilterEquals(expected, actual);
+        }
+
+        /// <summary>
+        /// test null docidset </summary>
+        [Test]
+        public virtual void TestEmpty3()
+        {
+            Filter expected = new PrefixFilter(new Term("bogusField", "bogusVal"));
+            Filter actual = new CachingWrapperFilter(expected);
+            AssertFilterEquals(expected, actual);
+        }
+
+        /// <summary>
+        /// test iterator returns single document </summary>
+        [Test]
+        public virtual void TestSingle()
+        {
+            for (int i = 0; i < 10; i++)
+            {
+                int id = Random().Next(Ir.MaxDoc);
+                Query query = new TermQuery(new Term("id", Convert.ToString(id)));
+                Filter expected = new QueryWrapperFilter(query);
+                Filter actual = new CachingWrapperFilter(expected);
+                AssertFilterEquals(expected, actual);
+            }
+        }
+
+        /// <summary>
+        /// test sparse filters (match single documents) </summary>
+        [Test]
+        public virtual void TestSparse()
+        {
+            for (int i = 0; i < 10; i++)
+            {
+                int id_start = Random().Next(Ir.MaxDoc - 1);
+                int id_end = id_start + 1;
+                Query query = TermRangeQuery.NewStringRange("id", Convert.ToString(id_start), Convert.ToString(id_end), true, true);
+                Filter expected = new QueryWrapperFilter(query);
+                Filter actual = new CachingWrapperFilter(expected);
+                AssertFilterEquals(expected, actual);
+            }
+        }
+
+        /// <summary>
+        /// test dense filters (match entire index) </summary>
+        [Test]
+        public virtual void TestDense()
+        {
+            Query query = new MatchAllDocsQuery();
+            Filter expected = new QueryWrapperFilter(query);
+            Filter actual = new CachingWrapperFilter(expected);
+            AssertFilterEquals(expected, actual);
+        }
+
+        [Test]
+        public virtual void TestCachingWorks()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            writer.Dispose();
+
+            IndexReader reader = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir));
+            AtomicReaderContext context = (AtomicReaderContext)reader.Context;
+            MockFilter filter = new MockFilter();
+            CachingWrapperFilter cacher = new CachingWrapperFilter(filter);
+
+            // first time, nested filter is called
+            DocIdSet strongRef = cacher.GetDocIdSet(context, (context.AtomicReader).LiveDocs);
+            Assert.IsTrue(filter.WasCalled(), "first time");
+
+            // make sure no exception if cache is holding the wrong docIdSet
+            cacher.GetDocIdSet(context, (context.AtomicReader).LiveDocs);
+
+            // second time, nested filter should not be called
+            filter.Clear();
+            cacher.GetDocIdSet(context, (context.AtomicReader).LiveDocs);
+            Assert.IsFalse(filter.WasCalled(), "second time");
+
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestNullDocIdSet()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            writer.Dispose();
+
+            IndexReader reader = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir));
+            AtomicReaderContext context = (AtomicReaderContext)reader.Context;
+
+            Filter filter = new FilterAnonymousInnerClassHelper(this, context);
+            CachingWrapperFilter cacher = new CachingWrapperFilter(filter);
+
+            // the caching filter should return the empty set constant
+            //Assert.IsNull(cacher.GetDocIdSet(context, "second time", (context.AtomicReader).LiveDocs));
+            Assert.IsNull(cacher.GetDocIdSet(context, (context.AtomicReader).LiveDocs));
+
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        private class FilterAnonymousInnerClassHelper : Filter
+        {
+            private readonly TestCachingWrapperFilter OuterInstance;
+
+            private AtomicReaderContext Context;
+
+            public FilterAnonymousInnerClassHelper(TestCachingWrapperFilter outerInstance, AtomicReaderContext context)
+            {
+                this.OuterInstance = outerInstance;
+                this.Context = context;
+            }
+
+            public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs)
+            {
+                return null;
+            }
+        }
+
+        [Test]
+        public virtual void TestNullDocIdSetIterator()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            writer.Dispose();
+
+            IndexReader reader = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir));
+            AtomicReaderContext context = (AtomicReaderContext)reader.Context;
+
+            Filter filter = new FilterAnonymousInnerClassHelper2(this, context);
+            CachingWrapperFilter cacher = new CachingWrapperFilter(filter);
+
+            // the caching filter should return the empty set constant
+            Assert.IsNull(cacher.GetDocIdSet(context, (context.AtomicReader).LiveDocs));
+
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        private class FilterAnonymousInnerClassHelper2 : Filter
+        {
+            private readonly TestCachingWrapperFilter OuterInstance;
+
+            private AtomicReaderContext Context;
+
+            public FilterAnonymousInnerClassHelper2(TestCachingWrapperFilter outerInstance, AtomicReaderContext context)
+            {
+                this.OuterInstance = outerInstance;
+                this.Context = context;
+            }
+
+            public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs)
+            {
+                return new DocIdSetAnonymousInnerClassHelper(this);
+            }
+
+            private class DocIdSetAnonymousInnerClassHelper : DocIdSet
+            {
+                private readonly FilterAnonymousInnerClassHelper2 OuterInstance;
+
+                public DocIdSetAnonymousInnerClassHelper(FilterAnonymousInnerClassHelper2 outerInstance)
+                {
+                    this.OuterInstance = outerInstance;
+                }
+
+                public override DocIdSetIterator GetIterator()
+                {
+                    return null;
+                }
+            }
+        }
+
+        private static void AssertDocIdSetCacheable(IndexReader reader, Filter filter, bool shouldCacheable)
+        {
+            Assert.IsTrue(reader.Context is AtomicReaderContext);
+            AtomicReaderContext context = (AtomicReaderContext)reader.Context;
+            CachingWrapperFilter cacher = new CachingWrapperFilter(filter);
+            DocIdSet originalSet = filter.GetDocIdSet(context, (context.AtomicReader).LiveDocs);
+            DocIdSet cachedSet = cacher.GetDocIdSet(context, (context.AtomicReader).LiveDocs);
+            if (originalSet == null)
+            {
+                Assert.IsNull(cachedSet);
+            }
+            if (cachedSet == null)
+            {
+                Assert.IsTrue(originalSet == null || originalSet.GetIterator() == null);
+            }
+            else
+            {
+                Assert.IsTrue(cachedSet.IsCacheable);
+                Assert.AreEqual(shouldCacheable, originalSet.IsCacheable);
+                //System.out.println("Original: "+originalSet.getClass().getName()+" -- cached: "+cachedSet.getClass().getName());
+                if (originalSet.IsCacheable)
+                {
+                    Assert.AreEqual(originalSet.GetType(), cachedSet.GetType(), "Cached DocIdSet must be of same class like uncached, if cacheable");
+                }
+                else
+                {
+                    Assert.IsTrue(cachedSet is FixedBitSet || cachedSet == null, "Cached DocIdSet must be an FixedBitSet if the original one was not cacheable");
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestIsCacheAble()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            writer.AddDocument(new Document());
+            writer.Dispose();
+
+            IndexReader reader = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir));
+
+            // not cacheable:
+            AssertDocIdSetCacheable(reader, new QueryWrapperFilter(new TermQuery(new Term("test", "value"))), false);
+            // returns default empty docidset, always cacheable:
+            AssertDocIdSetCacheable(reader, NumericRangeFilter.NewInt32Range("test", Convert.ToInt32(10000), Convert.ToInt32(-10000), true, true), true);
+            // is cacheable:
+            AssertDocIdSetCacheable(reader, FieldCacheRangeFilter.NewInt32Range("test", Convert.ToInt32(10), Convert.ToInt32(20), true, true), true);
+            // a fixedbitset filter is always cacheable
+            AssertDocIdSetCacheable(reader, new FilterAnonymousInnerClassHelper3(this), true);
+
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        private class FilterAnonymousInnerClassHelper3 : Filter
+        {
+            private readonly TestCachingWrapperFilter OuterInstance;
+
+            public FilterAnonymousInnerClassHelper3(TestCachingWrapperFilter outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs)
+            {
+                return new FixedBitSet(context.Reader.MaxDoc);
+            }
+        }
+
+        [Test]
+        public virtual void TestEnforceDeletions()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergeScheduler(new SerialMergeScheduler()).SetMergePolicy(NewLogMergePolicy(10)));
+            // asserts below requires no unexpected merges:
+
+            // NOTE: cannot use writer.getReader because RIW (on
+            // flipping a coin) may give us a newly opened reader,
+            // but we use .reopen on this reader below and expect to
+            // (must) get an NRT reader:
+            DirectoryReader reader = DirectoryReader.Open(writer.w, true);
+            // same reason we don't wrap?
+            IndexSearcher searcher = NewSearcher(reader, false, Similarity);
+
+            // add a doc, refresh the reader, and check that it's there
+            Document doc = new Document();
+            doc.Add(NewStringField("id", "1", Field.Store.YES));
+            writer.AddDocument(doc);
+
+            reader = RefreshReader(reader);
+            searcher = NewSearcher(reader, false, Similarity);
+
+            TopDocs docs = searcher.Search(new MatchAllDocsQuery(), 1);
+            Assert.AreEqual(1, docs.TotalHits, "Should find a hit...");
+
+            Filter startFilter = new QueryWrapperFilter(new TermQuery(new Term("id", "1")));
+
+            CachingWrapperFilter filter = new CachingWrapperFilter(startFilter);
+
+            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
+            Assert.IsTrue(filter.SizeInBytes() > 0);
+
+            Assert.AreEqual(1, docs.TotalHits, "[query + filter] Should find a hit...");
+
+            Query constantScore = new ConstantScoreQuery(filter);
+            docs = searcher.Search(constantScore, 1);
+            Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit...");
+
+            // make sure we get a cache hit when we reopen reader
+            // that had no change to deletions
+
+            // fake delete (deletes nothing):
+            writer.DeleteDocuments(new Term("foo", "bar"));
+
+            IndexReader oldReader = reader;
+            reader = RefreshReader(reader);
+            Assert.IsTrue(reader == oldReader);
+            int missCount = filter.missCount;
+            docs = searcher.Search(constantScore, 1);
+            Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit...");
+
+            // cache hit:
+            Assert.AreEqual(missCount, filter.missCount);
+
+            // now delete the doc, refresh the reader, and see that it's not there
+            writer.DeleteDocuments(new Term("id", "1"));
+
+            // NOTE: important to hold ref here so GC doesn't clear
+            // the cache entry!  Else the assert below may sometimes
+            // fail:
+            oldReader = reader;
+            reader = RefreshReader(reader);
+
+            searcher = NewSearcher(reader, false, Similarity);
+
+            missCount = filter.missCount;
+            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
+            Assert.AreEqual(0, docs.TotalHits, "[query + filter] Should *not* find a hit...");
+
+            // cache hit
+            Assert.AreEqual(missCount, filter.missCount);
+            docs = searcher.Search(constantScore, 1);
+            Assert.AreEqual(0, docs.TotalHits, "[just filter] Should *not* find a hit...");
+
+            // apply deletes dynamically:
+            filter = new CachingWrapperFilter(startFilter);
+            writer.AddDocument(doc);
+            reader = RefreshReader(reader);
+            searcher = NewSearcher(reader, false, Similarity);
+
+            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
+            Assert.AreEqual(1, docs.TotalHits, "[query + filter] Should find a hit...");
+            missCount = filter.missCount;
+            Assert.IsTrue(missCount > 0);
+            constantScore = new ConstantScoreQuery(filter);
+            docs = searcher.Search(constantScore, 1);
+            Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit...");
+            Assert.AreEqual(missCount, filter.missCount);
+
+            writer.AddDocument(doc);
+
+            // NOTE: important to hold ref here so GC doesn't clear
+            // the cache entry!  Else the assert below may sometimes
+            // fail:
+            oldReader = reader;
+
+            reader = RefreshReader(reader);
+            searcher = NewSearcher(reader, false, Similarity);
+
+            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
+            Assert.AreEqual(2, docs.TotalHits, "[query + filter] Should find 2 hits...");
+            Assert.IsTrue(filter.missCount > missCount);
+            missCount = filter.missCount;
+
+            constantScore = new ConstantScoreQuery(filter);
+            docs = searcher.Search(constantScore, 1);
+            Assert.AreEqual(2, docs.TotalHits, "[just filter] Should find a hit...");
+            Assert.AreEqual(missCount, filter.missCount);
+
+            // now delete the doc, refresh the reader, and see that it's not there
+            writer.DeleteDocuments(new Term("id", "1"));
+
+            reader = RefreshReader(reader);
+            searcher = NewSearcher(reader, false, Similarity);
+
+            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
+            Assert.AreEqual(0, docs.TotalHits, "[query + filter] Should *not* find a hit...");
+            // CWF reused the same entry (it dynamically applied the deletes):
+            Assert.AreEqual(missCount, filter.missCount);
+
+            docs = searcher.Search(constantScore, 1);
+            Assert.AreEqual(0, docs.TotalHits, "[just filter] Should *not* find a hit...");
+            // CWF reused the same entry (it dynamically applied the deletes):
+            Assert.AreEqual(missCount, filter.missCount);
+
+            // NOTE: silliness to make sure JRE does not eliminate
+            // our holding onto oldReader to prevent
+            // CachingWrapperFilter's WeakHashMap from dropping the
+            // entry:
+            Assert.IsTrue(oldReader != null);
+
+            reader.Dispose();
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        private static DirectoryReader RefreshReader(DirectoryReader reader)
+        {
+            DirectoryReader oldReader = reader;
+            reader = DirectoryReader.OpenIfChanged(reader);
+            if (reader != null)
+            {
+                oldReader.Dispose();
+                return reader;
+            }
+            else
+            {
+                return oldReader;
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestComplexExplanations.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestComplexExplanations.cs b/src/Lucene.Net.Tests/Search/TestComplexExplanations.cs
new file mode 100644
index 0000000..4408fa9
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestComplexExplanations.cs
@@ -0,0 +1,389 @@
+using NUnit.Framework;
+
+namespace Lucene.Net.Search
+{
+    using Lucene.Net.Search.Spans;
+    using DefaultSimilarity = Lucene.Net.Search.Similarities.DefaultSimilarity;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Term = Lucene.Net.Index.Term;
+
+    /// <summary>
+    /// TestExplanations subclass that builds up super crazy complex queries
+    /// on the assumption that if the explanations work out right for them,
+    /// they should work for anything.
+    /// </summary>
+    [TestFixture]
+    public class TestComplexExplanations : TestExplanations
+    {
+        /// <summary>
+        /// Override the Similarity used in our searcher with one that plays
+        /// nice with boosts of 0.0
+        /// </summary>
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Searcher.Similarity = CreateQnorm1Similarity();
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            Searcher.Similarity = IndexSearcher.DefaultSimilarity;
+            base.TearDown();
+        }
+
+        // must be static for weight serialization tests
+        private static DefaultSimilarity CreateQnorm1Similarity()
+        {
+            return new DefaultSimilarityAnonymousInnerClassHelper();
+        }
+
+        private class DefaultSimilarityAnonymousInnerClassHelper : DefaultSimilarity
+        {
+            public DefaultSimilarityAnonymousInnerClassHelper()
+            {
+            }
+
+            public override float QueryNorm(float sumOfSquaredWeights)
+            {
+                return 1.0f; // / (float) Math.sqrt(1.0f + sumOfSquaredWeights);
+            }
+        }
+
+        [Test]
+        public virtual void Test1()
+        {
+            BooleanQuery q = new BooleanQuery();
+
+            PhraseQuery phraseQuery = new PhraseQuery();
+            phraseQuery.Slop = 1;
+            phraseQuery.Add(new Term(FIELD, "w1"));
+            phraseQuery.Add(new Term(FIELD, "w2"));
+            q.Add(phraseQuery, Occur.MUST);
+            q.Add(Snear(St("w2"), Sor("w5", "zz"), 4, true), Occur.SHOULD);
+            q.Add(Snear(Sf("w3", 2), St("w2"), St("w3"), 5, true), Occur.SHOULD);
+
+            Query t = new FilteredQuery(new TermQuery(new Term(FIELD, "xx")), new ItemizedFilter(new int[] { 1, 3 }));
+            t.Boost = 1000;
+            q.Add(t, Occur.SHOULD);
+
+            t = new ConstantScoreQuery(new ItemizedFilter(new int[] { 0, 2 }));
+            t.Boost = 30;
+            q.Add(t, Occur.SHOULD);
+
+            DisjunctionMaxQuery dm = new DisjunctionMaxQuery(0.2f);
+            dm.Add(Snear(St("w2"), Sor("w5", "zz"), 4, true));
+            dm.Add(new TermQuery(new Term(FIELD, "QQ")));
+
+            BooleanQuery xxYYZZ = new BooleanQuery();
+            xxYYZZ.Add(new TermQuery(new Term(FIELD, "xx")), Occur.SHOULD);
+            xxYYZZ.Add(new TermQuery(new Term(FIELD, "yy")), Occur.SHOULD);
+            xxYYZZ.Add(new TermQuery(new Term(FIELD, "zz")), Occur.MUST_NOT);
+
+            dm.Add(xxYYZZ);
+
+            BooleanQuery xxW1 = new BooleanQuery();
+            xxW1.Add(new TermQuery(new Term(FIELD, "xx")), Occur.MUST_NOT);
+            xxW1.Add(new TermQuery(new Term(FIELD, "w1")), Occur.MUST_NOT);
+
+            dm.Add(xxW1);
+
+            DisjunctionMaxQuery dm2 = new DisjunctionMaxQuery(0.5f);
+            dm2.Add(new TermQuery(new Term(FIELD, "w1")));
+            dm2.Add(new TermQuery(new Term(FIELD, "w2")));
+            dm2.Add(new TermQuery(new Term(FIELD, "w3")));
+            dm.Add(dm2);
+
+            q.Add(dm, Occur.SHOULD);
+
+            BooleanQuery b = new BooleanQuery();
+            b.MinimumNumberShouldMatch = 2;
+            b.Add(Snear("w1", "w2", 1, true), Occur.SHOULD);
+            b.Add(Snear("w2", "w3", 1, true), Occur.SHOULD);
+            b.Add(Snear("w1", "w3", 3, true), Occur.SHOULD);
+
+            q.Add(b, Occur.SHOULD);
+
+            Qtest(q, new int[] { 0, 1, 2 });
+        }
+
+        [Test]
+        public virtual void Test2()
+        {
+            BooleanQuery q = new BooleanQuery();
+
+            PhraseQuery phraseQuery = new PhraseQuery();
+            phraseQuery.Slop = 1;
+            phraseQuery.Add(new Term(FIELD, "w1"));
+            phraseQuery.Add(new Term(FIELD, "w2"));
+            q.Add(phraseQuery, Occur.MUST);
+            q.Add(Snear(St("w2"), Sor("w5", "zz"), 4, true), Occur.SHOULD);
+            q.Add(Snear(Sf("w3", 2), St("w2"), St("w3"), 5, true), Occur.SHOULD);
+
+            Query t = new FilteredQuery(new TermQuery(new Term(FIELD, "xx")), new ItemizedFilter(new int[] { 1, 3 }));
+            t.Boost = 1000;
+            q.Add(t, Occur.SHOULD);
+
+            t = new ConstantScoreQuery(new ItemizedFilter(new int[] { 0, 2 }));
+            t.Boost = -20.0f;
+            q.Add(t, Occur.SHOULD);
+
+            DisjunctionMaxQuery dm = new DisjunctionMaxQuery(0.2f);
+            dm.Add(Snear(St("w2"), Sor("w5", "zz"), 4, true));
+            dm.Add(new TermQuery(new Term(FIELD, "QQ")));
+
+            BooleanQuery xxYYZZ = new BooleanQuery();
+            xxYYZZ.Add(new TermQuery(new Term(FIELD, "xx")), Occur.SHOULD);
+            xxYYZZ.Add(new TermQuery(new Term(FIELD, "yy")), Occur.SHOULD);
+            xxYYZZ.Add(new TermQuery(new Term(FIELD, "zz")), Occur.MUST_NOT);
+
+            dm.Add(xxYYZZ);
+
+            BooleanQuery xxW1 = new BooleanQuery();
+            xxW1.Add(new TermQuery(new Term(FIELD, "xx")), Occur.MUST_NOT);
+            xxW1.Add(new TermQuery(new Term(FIELD, "w1")), Occur.MUST_NOT);
+
+            dm.Add(xxW1);
+
+            DisjunctionMaxQuery dm2 = new DisjunctionMaxQuery(0.5f);
+            dm2.Add(new TermQuery(new Term(FIELD, "w1")));
+            dm2.Add(new TermQuery(new Term(FIELD, "w2")));
+            dm2.Add(new TermQuery(new Term(FIELD, "w3")));
+            dm.Add(dm2);
+
+            q.Add(dm, Occur.SHOULD);
+
+            BooleanQuery b = new BooleanQuery();
+            b.MinimumNumberShouldMatch = 2;
+            b.Add(Snear("w1", "w2", 1, true), Occur.SHOULD);
+            b.Add(Snear("w2", "w3", 1, true), Occur.SHOULD);
+            b.Add(Snear("w1", "w3", 3, true), Occur.SHOULD);
+            b.Boost = 0.0f;
+
+            q.Add(b, Occur.SHOULD);
+
+            Qtest(q, new int[] { 0, 1, 2 });
+        }
+
+        // :TODO: we really need more crazy complex cases.
+
+        // //////////////////////////////////////////////////////////////////
+
+        // The rest of these aren't that complex, but they are <i>somewhat</i>
+        // complex, and they expose weakness in dealing with queries that match
+        // with scores of 0 wrapped in other queries
+
+        [Test]
+        public virtual void TestT3()
+        {
+            TermQuery query = new TermQuery(new Term(FIELD, "w1"));
+            query.Boost = 0;
+            Bqtest(query, new int[] { 0, 1, 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestMA3()
+        {
+            Query q = new MatchAllDocsQuery();
+            q.Boost = 0;
+            Bqtest(q, new int[] { 0, 1, 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestFQ5()
+        {
+            TermQuery query = new TermQuery(new Term(FIELD, "xx"));
+            query.Boost = 0;
+            Bqtest(new FilteredQuery(query, new ItemizedFilter(new int[] { 1, 3 })), new int[] { 3 });
+        }
+
+        [Test]
+        public virtual void TestCSQ4()
+        {
+            Query q = new ConstantScoreQuery(new ItemizedFilter(new int[] { 3 }));
+            q.Boost = 0;
+            Bqtest(q, new int[] { 3 });
+        }
+
+        [Test]
+        public virtual void TestDMQ10()
+        {
+            DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.5f);
+
+            BooleanQuery query = new BooleanQuery();
+            query.Add(new TermQuery(new Term(FIELD, "yy")), Occur.SHOULD);
+            TermQuery boostedQuery = new TermQuery(new Term(FIELD, "w5"));
+            boostedQuery.Boost = 100;
+            query.Add(boostedQuery, Occur.SHOULD);
+
+            q.Add(query);
+
+            TermQuery xxBoostedQuery = new TermQuery(new Term(FIELD, "xx"));
+            xxBoostedQuery.Boost = 0;
+
+            q.Add(xxBoostedQuery);
+            q.Boost = 0.0f;
+            Bqtest(q, new int[] { 0, 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestMPQ7()
+        {
+            MultiPhraseQuery q = new MultiPhraseQuery();
+            q.Add(Ta(new string[] { "w1" }));
+            q.Add(Ta(new string[] { "w2" }));
+            q.Slop = 1;
+            q.Boost = 0.0f;
+            Bqtest(q, new int[] { 0, 1, 2 });
+        }
+
+        [Test]
+        public virtual void TestBQ12()
+        {
+            // NOTE: using qtest not bqtest
+            BooleanQuery query = new BooleanQuery();
+            query.Add(new TermQuery(new Term(FIELD, "w1")), Occur.SHOULD);
+            TermQuery boostedQuery = new TermQuery(new Term(FIELD, "w2"));
+            boostedQuery.Boost = 0;
+            query.Add(boostedQuery, Occur.SHOULD);
+
+            Qtest(query, new int[] { 0, 1, 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestBQ13()
+        {
+            // NOTE: using qtest not bqtest
+            BooleanQuery query = new BooleanQuery();
+            query.Add(new TermQuery(new Term(FIELD, "w1")), Occur.SHOULD);
+            TermQuery boostedQuery = new TermQuery(new Term(FIELD, "w5"));
+            boostedQuery.Boost = 0;
+            query.Add(boostedQuery, Occur.MUST_NOT);
+
+            Qtest(query, new int[] { 1, 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestBQ18()
+        {
+            // NOTE: using qtest not bqtest
+            BooleanQuery query = new BooleanQuery();
+            TermQuery boostedQuery = new TermQuery(new Term(FIELD, "w1"));
+            boostedQuery.Boost = 0;
+            query.Add(boostedQuery, Occur.MUST);
+            query.Add(new TermQuery(new Term(FIELD, "w2")), Occur.SHOULD);
+
+            Qtest(query, new int[] { 0, 1, 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestBQ21()
+        {
+            BooleanQuery query = new BooleanQuery();
+            query.Add(new TermQuery(new Term(FIELD, "w1")), Occur.MUST);
+            query.Add(new TermQuery(new Term(FIELD, "w2")), Occur.SHOULD);
+            query.Boost = 0;
+
+            Bqtest(query, new int[] { 0, 1, 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestBQ22()
+        {
+            BooleanQuery query = new BooleanQuery();
+            TermQuery boostedQuery = new TermQuery(new Term(FIELD, "w1"));
+            boostedQuery.Boost = 0;
+            query.Add(boostedQuery, Occur.MUST);
+            query.Add(new TermQuery(new Term(FIELD, "w2")), Occur.SHOULD);
+            query.Boost = 0;
+
+            Bqtest(query, new int[] { 0, 1, 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestST3()
+        {
+            SpanQuery q = St("w1");
+            q.Boost = 0;
+            Bqtest(q, new int[] { 0, 1, 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestST6()
+        {
+            SpanQuery q = St("xx");
+            q.Boost = 0;
+            Qtest(q, new int[] { 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestSF3()
+        {
+            SpanQuery q = Sf(("w1"), 1);
+            q.Boost = 0;
+            Bqtest(q, new int[] { 0, 1, 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestSF7()
+        {
+            SpanQuery q = Sf(("xx"), 3);
+            q.Boost = 0;
+            Bqtest(q, new int[] { 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestSNot3()
+        {
+            SpanQuery q = Snot(Sf("w1", 10), St("QQ"));
+            q.Boost = 0;
+            Bqtest(q, new int[] { 0, 1, 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestSNot6()
+        {
+            SpanQuery q = Snot(Sf("w1", 10), St("xx"));
+            q.Boost = 0;
+            Bqtest(q, new int[] { 0, 1, 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestSNot8()
+        {
+            // NOTE: using qtest not bqtest
+            SpanQuery f = Snear("w1", "w3", 10, true);
+            f.Boost = 0;
+            SpanQuery q = Snot(f, St("xx"));
+            Qtest(q, new int[] { 0, 1, 3 });
+        }
+
+        [Test]
+        public virtual void TestSNot9()
+        {
+            // NOTE: using qtest not bqtest
+            SpanQuery t = St("xx");
+            t.Boost = 0;
+            SpanQuery q = Snot(Snear("w1", "w3", 10, true), t);
+            Qtest(q, new int[] { 0, 1, 3 });
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestComplexExplanationsOfNonMatches.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestComplexExplanationsOfNonMatches.cs b/src/Lucene.Net.Tests/Search/TestComplexExplanationsOfNonMatches.cs
new file mode 100644
index 0000000..18a2759
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestComplexExplanationsOfNonMatches.cs
@@ -0,0 +1,197 @@
+using NUnit.Framework;
+
+namespace Lucene.Net.Search
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// subclass of TestSimpleExplanations that verifies non matches.
+    /// </summary>
+    [TestFixture]
+    public class TestComplexExplanationsOfNonMatches : TestComplexExplanations
+    {
+        /// <summary>
+        /// Overrides superclass to ignore matches and focus on non-matches
+        /// </summary>
+        /// <seealso cref= CheckHits#checkNoMatchExplanations </seealso>
+        public override void Qtest(Query q, int[] expDocNrs)
+        {
+            CheckHits.CheckNoMatchExplanations(q, FIELD, Searcher, expDocNrs);
+        }
+
+
+        #region TestComplexExplanations
+        // LUCENENET NOTE: Tests in a base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void Test1()
+        {
+            base.Test1();
+        }
+
+        [Test]
+        public override void Test2()
+        {
+            base.Test2();
+        }
+
+        // :TODO: we really need more crazy complex cases.
+
+        // //////////////////////////////////////////////////////////////////
+
+        // The rest of these aren't that complex, but they are <i>somewhat</i>
+        // complex, and they expose weakness in dealing with queries that match
+        // with scores of 0 wrapped in other queries
+
+        [Test]
+        public override void TestT3()
+        {
+            base.TestT3();
+        }
+
+        [Test]
+        public override void TestMA3()
+        {
+            base.TestMA3();
+        }
+
+        [Test]
+        public override void TestFQ5()
+        {
+            base.TestFQ5();
+        }
+
+        [Test]
+        public override void TestCSQ4()
+        {
+            base.TestCSQ4();
+        }
+
+        [Test]
+        public override void TestDMQ10()
+        {
+            base.TestDMQ10();
+        }
+
+        [Test]
+        public override void TestMPQ7()
+        {
+            base.TestMPQ7();
+        }
+
+        [Test]
+        public override void TestBQ12()
+        {
+            base.TestBQ12();
+        }
+
+        [Test]
+        public override void TestBQ13()
+        {
+            base.TestBQ13();
+        }
+
+        [Test]
+        public override void TestBQ18()
+        {
+            base.TestBQ18();
+        }
+
+        [Test]
+        public override void TestBQ21()
+        {
+            base.TestBQ21();
+        }
+
+        [Test]
+        public override void TestBQ22()
+        {
+            base.TestBQ22();
+        }
+
+        [Test]
+        public override void TestST3()
+        {
+            base.TestST3();
+        }
+
+        [Test]
+        public override void TestST6()
+        {
+            base.TestST6();
+        }
+
+        [Test]
+        public override void TestSF3()
+        {
+            base.TestSF3();
+        }
+
+        [Test]
+        public override void TestSF7()
+        {
+            base.TestSF7();
+        }
+
+        [Test]
+        public override void TestSNot3()
+        {
+            base.TestSNot3();
+        }
+
+        [Test]
+        public override void TestSNot6()
+        {
+            base.TestSNot6();
+        }
+
+        [Test]
+        public override void TestSNot8()
+        {
+            base.TestSNot8();
+        }
+
+        [Test]
+        public override void TestSNot9()
+        {
+            base.TestSNot9();
+        }
+
+        #endregion
+
+        #region TestExplanations
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+
+        /// <summary>
+        /// Placeholder: JUnit freaks if you don't have one test ... making
+        /// class abstract doesn't help
+        /// </summary>
+        [Test]
+        public override void TestNoop()
+        {
+            base.TestNoop();
+        }
+
+        #endregion
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestConjunctions.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestConjunctions.cs b/src/Lucene.Net.Tests/Search/TestConjunctions.cs
new file mode 100644
index 0000000..2f5cdc2
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestConjunctions.cs
@@ -0,0 +1,161 @@
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using NUnit.Framework;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Analyzer = Lucene.Net.Analysis.Analyzer;
+    using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using FieldInvertState = Lucene.Net.Index.FieldInvertState;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using IndexWriterConfig = Lucene.Net.Index.IndexWriterConfig;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Similarity = Lucene.Net.Search.Similarities.Similarity;
+    using Store = Field.Store;
+    using StringField = StringField;
+    using Term = Lucene.Net.Index.Term;
+    using TextField = TextField;
+
+    [TestFixture]
+    public class TestConjunctions : LuceneTestCase
+    {
+        internal Analyzer Analyzer;
+        internal Directory Dir;
+        internal IndexReader Reader;
+        internal IndexSearcher Searcher;
+
+        internal const string F1 = "title";
+        internal const string F2 = "body";
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Analyzer = new MockAnalyzer(Random());
+            Dir = NewDirectory();
+            IndexWriterConfig config = NewIndexWriterConfig(TEST_VERSION_CURRENT, Analyzer);
+            config.SetMergePolicy(NewLogMergePolicy()); // we will use docids to validate
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), Dir, config);
+            writer.AddDocument(Doc("lucene", "lucene is a very popular search engine library"));
+            writer.AddDocument(Doc("solr", "solr is a very popular search server and is using lucene"));
+            writer.AddDocument(Doc("nutch", "nutch is an internet search engine with web crawler and is using lucene and hadoop"));
+            Reader = writer.Reader;
+            writer.Dispose();
+            Searcher = NewSearcher(Reader);
+            Searcher.Similarity = new TFSimilarity();
+        }
+
+        internal static Document Doc(string v1, string v2)
+        {
+            Document doc = new Document();
+            doc.Add(new StringField(F1, v1, Store.YES));
+            doc.Add(new TextField(F2, v2, Store.YES));
+            return doc;
+        }
+
+        [Test]
+        public virtual void TestTermConjunctionsWithOmitTF()
+        {
+            BooleanQuery bq = new BooleanQuery();
+            bq.Add(new TermQuery(new Term(F1, "nutch")), Occur.MUST);
+            bq.Add(new TermQuery(new Term(F2, "is")), Occur.MUST);
+            TopDocs td = Searcher.Search(bq, 3);
+            Assert.AreEqual(1, td.TotalHits);
+            Assert.AreEqual(3F, td.ScoreDocs[0].Score, 0.001F); // f1:nutch + f2:is + f2:is
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            Reader.Dispose();
+            Dir.Dispose();
+            base.TearDown();
+        }
+
+        // Similarity that returns the TF as score
+        private class TFSimilarity : Similarity
+        {
+            public override long ComputeNorm(FieldInvertState state)
+            {
+                return 1; // we dont care
+            }
+
+            public override SimWeight ComputeWeight(float queryBoost, CollectionStatistics collectionStats, params TermStatistics[] termStats)
+            {
+                return new SimWeightAnonymousInnerClassHelper(this);
+            }
+
+            private class SimWeightAnonymousInnerClassHelper : SimWeight
+            {
+                private readonly TFSimilarity OuterInstance;
+
+                public SimWeightAnonymousInnerClassHelper(TFSimilarity outerInstance)
+                {
+                    this.OuterInstance = outerInstance;
+                }
+
+                public override float GetValueForNormalization()
+                {
+                    return 1; // we don't care
+                }
+
+                public override void Normalize(float queryNorm, float topLevelBoost)
+                {
+                    // we don't care
+                }
+            }
+
+            public override SimScorer GetSimScorer(SimWeight weight, AtomicReaderContext context)
+            {
+                return new SimScorerAnonymousInnerClassHelper(this);
+            }
+
+            private class SimScorerAnonymousInnerClassHelper : SimScorer
+            {
+                private readonly TFSimilarity OuterInstance;
+
+                public SimScorerAnonymousInnerClassHelper(TFSimilarity outerInstance)
+                {
+                    this.OuterInstance = outerInstance;
+                }
+
+                public override float Score(int doc, float freq)
+                {
+                    return freq;
+                }
+
+                public override float ComputeSlopFactor(int distance)
+                {
+                    return 1F;
+                }
+
+                public override float ComputePayloadFactor(int doc, int start, int end, BytesRef payload)
+                {
+                    return 1F;
+                }
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestConstantScoreQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestConstantScoreQuery.cs b/src/Lucene.Net.Tests/Search/TestConstantScoreQuery.cs
new file mode 100644
index 0000000..e458c63
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestConstantScoreQuery.cs
@@ -0,0 +1,241 @@
+using System.Diagnostics;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using NUnit.Framework;
+    using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
+    using DefaultSimilarity = Lucene.Net.Search.Similarities.DefaultSimilarity;
+    using Directory = Lucene.Net.Store.Directory;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Term = Lucene.Net.Index.Term;
+
+    /// <summary>
+    /// this class only tests some basic functionality in CSQ, the main parts are mostly
+    /// tested by MultiTermQuery tests, explanations seems to be tested in TestExplanations!
+    /// </summary>
+    [TestFixture]
+    public class TestConstantScoreQuery : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestCSQ()
+        {
+            Query q1 = new ConstantScoreQuery(new TermQuery(new Term("a", "b")));
+            Query q2 = new ConstantScoreQuery(new TermQuery(new Term("a", "c")));
+            Query q3 = new ConstantScoreQuery(TermRangeFilter.NewStringRange("a", "b", "c", true, true));
+            QueryUtils.Check(q1);
+            QueryUtils.Check(q2);
+            QueryUtils.CheckEqual(q1, q1);
+            QueryUtils.CheckEqual(q2, q2);
+            QueryUtils.CheckEqual(q3, q3);
+            QueryUtils.CheckUnequal(q1, q2);
+            QueryUtils.CheckUnequal(q2, q3);
+            QueryUtils.CheckUnequal(q1, q3);
+            QueryUtils.CheckUnequal(q1, new TermQuery(new Term("a", "b")));
+        }
+
+        private void CheckHits(IndexSearcher searcher, Query q, float expectedScore, string scorerClassName, string innerScorerClassName)
+        {
+            int[] count = new int[1];
+            searcher.Search(q, new CollectorAnonymousInnerClassHelper(this, expectedScore, scorerClassName, innerScorerClassName, count));
+            Assert.AreEqual(1, count[0], "invalid number of results");
+        }
+
+        private class CollectorAnonymousInnerClassHelper : ICollector
+        {
+            private readonly TestConstantScoreQuery OuterInstance;
+
+            private float ExpectedScore;
+            private string ScorerClassName;
+            private string InnerScorerClassName;
+            private int[] Count;
+
+            public CollectorAnonymousInnerClassHelper(TestConstantScoreQuery outerInstance, float expectedScore, string scorerClassName, string innerScorerClassName, int[] count)
+            {
+                this.OuterInstance = outerInstance;
+                this.ExpectedScore = expectedScore;
+                this.ScorerClassName = scorerClassName;
+                this.InnerScorerClassName = innerScorerClassName;
+                this.Count = count;
+            }
+
+            private Scorer scorer;
+
+            public virtual void SetScorer(Scorer scorer)
+            {
+                this.scorer = scorer;
+                Assert.AreEqual(ScorerClassName, scorer.GetType().Name, "Scorer is implemented by wrong class");
+                if (InnerScorerClassName != null && scorer is ConstantScoreQuery.ConstantScorer)
+                {
+                    ConstantScoreQuery.ConstantScorer innerScorer = (ConstantScoreQuery.ConstantScorer)scorer;
+                    Assert.AreEqual(InnerScorerClassName, innerScorer.docIdSetIterator.GetType().Name, "inner Scorer is implemented by wrong class");
+                }
+            }
+
+            public virtual void Collect(int doc)
+            {
+                Assert.AreEqual(ExpectedScore, this.scorer.GetScore(), 0, "Score differs from expected");
+                Count[0]++;
+            }
+
+            public virtual void SetNextReader(AtomicReaderContext context)
+            {
+            }
+
+            public virtual bool AcceptsDocsOutOfOrder
+            {
+                get { return true; }
+            }
+        }
+
+        [Test]
+        public virtual void TestWrapped2Times()
+        {
+            Directory directory = null;
+            IndexReader reader = null;
+            IndexSearcher searcher = null;
+            try
+            {
+                directory = NewDirectory();
+                RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, Similarity, TimeZone);
+
+                Document doc = new Document();
+                doc.Add(NewStringField("field", "term", Field.Store.NO));
+                writer.AddDocument(doc);
+
+                reader = writer.Reader;
+                writer.Dispose();
+                // we don't wrap with AssertingIndexSearcher in order to have the original scorer in setScorer.
+                searcher = NewSearcher(reader, true, false);
+
+                // set a similarity that does not normalize our boost away
+                searcher.Similarity = new DefaultSimilarityAnonymousInnerClassHelper(this);
+
+                Query csq1 = new ConstantScoreQuery(new TermQuery(new Term("field", "term")));
+                csq1.Boost = 2.0f;
+                Query csq2 = new ConstantScoreQuery(csq1);
+                csq2.Boost = 5.0f;
+
+                BooleanQuery bq = new BooleanQuery();
+                bq.Add(csq1, Occur.SHOULD);
+                bq.Add(csq2, Occur.SHOULD);
+
+                Query csqbq = new ConstantScoreQuery(bq);
+                csqbq.Boost = 17.0f;
+
+                CheckHits(searcher, csq1, csq1.Boost, typeof(ConstantScoreQuery.ConstantScorer).Name, null);
+                CheckHits(searcher, csq2, csq2.Boost, typeof(ConstantScoreQuery.ConstantScorer).Name, typeof(ConstantScoreQuery.ConstantScorer).Name);
+
+                // for the combined BQ, the scorer should always be BooleanScorer's BucketScorer, because our scorer supports out-of order collection!
+                string bucketScorerClass = typeof(FakeScorer).Name;
+                CheckHits(searcher, bq, csq1.Boost + csq2.Boost, bucketScorerClass, null);
+                CheckHits(searcher, csqbq, csqbq.Boost, typeof(ConstantScoreQuery.ConstantScorer).Name, bucketScorerClass);
+            }
+            finally
+            {
+                if (reader != null)
+                {
+                    reader.Dispose();
+                }
+                if (directory != null)
+                {
+                    directory.Dispose();
+                }
+            }
+        }
+
+        private class DefaultSimilarityAnonymousInnerClassHelper : DefaultSimilarity
+        {
+            private readonly TestConstantScoreQuery OuterInstance;
+
+            public DefaultSimilarityAnonymousInnerClassHelper(TestConstantScoreQuery outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            public override float QueryNorm(float sumOfSquaredWeights)
+            {
+                return 1.0f;
+            }
+        }
+
+        [Test]
+        public virtual void TestConstantScoreQueryAndFilter()
+        {
+            Directory d = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), d, Similarity, TimeZone);
+            Document doc = new Document();
+            doc.Add(NewStringField("field", "a", Field.Store.NO));
+            w.AddDocument(doc);
+            doc = new Document();
+            doc.Add(NewStringField("field", "b", Field.Store.NO));
+            w.AddDocument(doc);
+            IndexReader r = w.Reader;
+            w.Dispose();
+
+            Filter filterB = new CachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("field", "b"))));
+            Query query = new ConstantScoreQuery(filterB);
+
+            IndexSearcher s = NewSearcher(r);
+            Assert.AreEqual(1, s.Search(query, filterB, 1).TotalHits); // Query for field:b, Filter field:b
+
+            Filter filterA = new CachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("field", "a"))));
+            query = new ConstantScoreQuery(filterA);
+
+            Assert.AreEqual(0, s.Search(query, filterB, 1).TotalHits); // Query field:b, Filter field:a
+
+            r.Dispose();
+            d.Dispose();
+        }
+
+        // LUCENE-5307
+        // don't reuse the scorer of filters since they have been created with bulkScorer=false
+        [Test]
+        public virtual void TestQueryWrapperFilter()
+        {
+            Directory d = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), d, Similarity, TimeZone);
+            Document doc = new Document();
+            doc.Add(NewStringField("field", "a", Field.Store.NO));
+            w.AddDocument(doc);
+            IndexReader r = w.Reader;
+            w.Dispose();
+
+            Filter filter = new QueryWrapperFilter(AssertingQuery.Wrap(Random(), new TermQuery(new Term("field", "a"))));
+            IndexSearcher s = NewSearcher(r);
+            Debug.Assert(s is AssertingIndexSearcher);
+            // this used to fail
+            s.Search(new ConstantScoreQuery(filter), new TotalHitCountCollector());
+
+            // check the rewrite
+            Query rewritten = (new ConstantScoreQuery(filter)).Rewrite(r);
+            Assert.IsTrue(rewritten is ConstantScoreQuery);
+            Assert.IsTrue(((ConstantScoreQuery)rewritten).Query is AssertingQuery);
+
+            r.Dispose();
+            d.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestControlledRealTimeReopenThread.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestControlledRealTimeReopenThread.cs b/src/Lucene.Net.Tests/Search/TestControlledRealTimeReopenThread.cs
new file mode 100644
index 0000000..d447b9e
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestControlledRealTimeReopenThread.cs
@@ -0,0 +1,731 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Threading;
+using System.Threading.Tasks;
+using Lucene.Net.Randomized.Generators;
+using Lucene.Net.Support;
+using NUnit.Framework;
+
+namespace Lucene.Net.Search
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    using Analyzer = Lucene.Net.Analysis.Analyzer;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using Document = Lucene.Net.Documents.Document;
+    using Field = Lucene.Net.Documents.Field;
+    using TextField = Lucene.Net.Documents.TextField;
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
+    using IndexCommit = Lucene.Net.Index.IndexCommit;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using IndexWriter = Lucene.Net.Index.IndexWriter;
+    using IndexWriterConfig = Lucene.Net.Index.IndexWriterConfig;
+    using IIndexableField = Lucene.Net.Index.IIndexableField;
+    using KeepOnlyLastCommitDeletionPolicy = Lucene.Net.Index.KeepOnlyLastCommitDeletionPolicy;
+    using NoMergePolicy = Lucene.Net.Index.NoMergePolicy;
+    using OpenMode = Lucene.Net.Index.OpenMode;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using SnapshotDeletionPolicy = Lucene.Net.Index.SnapshotDeletionPolicy;
+    using Term = Lucene.Net.Index.Term;
+    using ThreadedIndexingAndSearchingTestCase = Lucene.Net.Index.ThreadedIndexingAndSearchingTestCase;
+    using TrackingIndexWriter = Lucene.Net.Index.TrackingIndexWriter;
+    using Directory = Lucene.Net.Store.Directory;
+    using NRTCachingDirectory = Lucene.Net.Store.NRTCachingDirectory;
+    using IOUtils = Lucene.Net.Util.IOUtils;
+    using SuppressCodecs = Lucene.Net.Util.LuceneTestCase.SuppressCodecs;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    //using ThreadInterruptedException = Lucene.Net.Util.ThreadInterruptedException;
+    using Version = Lucene.Net.Util.LuceneVersion;
+
+    [SuppressCodecs("SimpleText", "Memory", "Direct")]
+    [TestFixture]
+    public class TestControlledRealTimeReopenThread : ThreadedIndexingAndSearchingTestCase
+    {
+
+        // Not guaranteed to reflect deletes:
+        private SearcherManager NrtNoDeletes;
+
+        // Is guaranteed to reflect deletes:
+        private SearcherManager NrtDeletes;
+
+        private TrackingIndexWriter GenWriter;
+
+        private ControlledRealTimeReopenThread<IndexSearcher> NrtDeletesThread;
+        private ControlledRealTimeReopenThread<IndexSearcher> NrtNoDeletesThread;
+
+        private readonly ThreadLocal<long?> LastGens = new ThreadLocal<long?>();
+        private bool WarmCalled;
+
+        [Test]
+        public virtual void TestControlledRealTimeReopenThread_Mem()
+        {
+            RunTest("TestControlledRealTimeReopenThread");
+        }
+
+        protected internal override IndexSearcher FinalSearcher
+        {
+            get
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: finalSearcher maxGen=" + MaxGen);
+                }
+                NrtDeletesThread.WaitForGeneration(MaxGen);
+                return NrtDeletes.Acquire();
+            }
+        }
+
+        protected internal override Directory GetDirectory(Directory @in)
+        {
+            // Randomly swap in NRTCachingDir
+            if (Random().NextBoolean())
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: wrap NRTCachingDir");
+                }
+
+                return new NRTCachingDirectory(@in, 5.0, 60.0);
+            }
+            else
+            {
+                return @in;
+            }
+        }
+
+        protected internal override void UpdateDocuments(Term id, IEnumerable<IEnumerable<IIndexableField>> docs)
+        {
+            long gen = GenWriter.UpdateDocuments(id, docs);
+
+            // Randomly verify the update "took":
+            if (Random().Next(20) == 2)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine(Thread.CurrentThread.Name + ": nrt: verify " + id);
+                }
+                NrtDeletesThread.WaitForGeneration(gen);
+                IndexSearcher s = NrtDeletes.Acquire();
+                if (VERBOSE)
+                {
+                    Console.WriteLine(Thread.CurrentThread.Name + ": nrt: got searcher=" + s);
+                }
+                try
+                {
+                    Assert.AreEqual(docs.Count(), s.Search(new TermQuery(id), 10).TotalHits);
+                }
+                finally
+                {
+                    NrtDeletes.Release(s);
+                }
+            }
+
+            LastGens.Value = gen;
+
+        }
+
+        protected internal override void AddDocuments(Term id, IEnumerable<IEnumerable<IIndexableField>> docs)
+        {
+            long gen = GenWriter.AddDocuments(docs);
+            // Randomly verify the add "took":
+            if (Random().Next(20) == 2)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine(Thread.CurrentThread.Name + ": nrt: verify " + id);
+                }
+                NrtNoDeletesThread.WaitForGeneration(gen);
+                IndexSearcher s = NrtNoDeletes.Acquire();
+                if (VERBOSE)
+                {
+                    Console.WriteLine(Thread.CurrentThread.Name + ": nrt: got searcher=" + s);
+                }
+                try
+                {
+                    Assert.AreEqual(docs.Count(), s.Search(new TermQuery(id), 10).TotalHits);
+                }
+                finally
+                {
+                    NrtNoDeletes.Release(s);
+                }
+            }
+            LastGens.Value = gen;
+        }
+
+        protected internal override void AddDocument(Term id, IEnumerable<IIndexableField> doc)
+        {
+            long gen = GenWriter.AddDocument(doc);
+
+            // Randomly verify the add "took":
+            if (Random().Next(20) == 2)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine(Thread.CurrentThread.Name + ": nrt: verify " + id);
+                }
+                NrtNoDeletesThread.WaitForGeneration(gen);
+                IndexSearcher s = NrtNoDeletes.Acquire();
+                if (VERBOSE)
+                {
+                    Console.WriteLine(Thread.CurrentThread.Name + ": nrt: got searcher=" + s);
+                }
+                try
+                {
+                    Assert.AreEqual(1, s.Search(new TermQuery(id), 10).TotalHits);
+                }
+                finally
+                {
+                    NrtNoDeletes.Release(s);
+                }
+            }
+            LastGens.Value = gen;
+        }
+
+        protected internal override void UpdateDocument(Term id, IEnumerable<IIndexableField> doc)
+        {
+            long gen = GenWriter.UpdateDocument(id, doc);
+            // Randomly verify the udpate "took":
+            if (Random().Next(20) == 2)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine(Thread.CurrentThread.Name + ": nrt: verify " + id);
+                }
+                NrtDeletesThread.WaitForGeneration(gen);
+                IndexSearcher s = NrtDeletes.Acquire();
+                if (VERBOSE)
+                {
+                    Console.WriteLine(Thread.CurrentThread.Name + ": nrt: got searcher=" + s);
+                }
+                try
+                {
+                    Assert.AreEqual(1, s.Search(new TermQuery(id), 10).TotalHits);
+                }
+                finally
+                {
+                    NrtDeletes.Release(s);
+                }
+            }
+            LastGens.Value = gen;
+        }
+
+        protected internal override void DeleteDocuments(Term id)
+        {
+            long gen = GenWriter.DeleteDocuments(id);
+            // randomly verify the delete "took":
+            if (Random().Next(20) == 7)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine(Thread.CurrentThread.Name + ": nrt: verify del " + id);
+                }
+                NrtDeletesThread.WaitForGeneration(gen);
+                IndexSearcher s = NrtDeletes.Acquire();
+                if (VERBOSE)
+                {
+                    Console.WriteLine(Thread.CurrentThread.Name + ": nrt: got searcher=" + s);
+                }
+                try
+                {
+                    Assert.AreEqual(0, s.Search(new TermQuery(id), 10).TotalHits);
+                }
+                finally
+                {
+                    NrtDeletes.Release(s);
+                }
+            }
+            LastGens.Value = gen;
+        }
+
+        protected internal override void DoAfterWriter(TaskScheduler es)
+        {
+            double minReopenSec = 0.01 + 0.05 * Random().NextDouble();
+            double maxReopenSec = minReopenSec * (1.0 + 10 * Random().NextDouble());
+
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: make SearcherManager maxReopenSec=" + maxReopenSec + " minReopenSec=" + minReopenSec);
+            }
+
+            GenWriter = new TrackingIndexWriter(Writer);
+
+            SearcherFactory sf = new SearcherFactoryAnonymousInnerClassHelper(this, es);
+
+            NrtNoDeletes = new SearcherManager(Writer, false, sf);
+            NrtDeletes = new SearcherManager(Writer, true, sf);
+
+            NrtDeletesThread = new ControlledRealTimeReopenThread<IndexSearcher>(GenWriter, NrtDeletes, maxReopenSec, minReopenSec);
+            NrtDeletesThread.Name = "NRTDeletes Reopen Thread";
+            NrtDeletesThread.Priority = (ThreadPriority)Math.Min((int)Thread.CurrentThread.Priority + 2, (int)ThreadPriority.Highest);
+            NrtDeletesThread.SetDaemon(true);
+            NrtDeletesThread.Start();
+
+            NrtNoDeletesThread = new ControlledRealTimeReopenThread<IndexSearcher>(GenWriter, NrtNoDeletes, maxReopenSec, minReopenSec);
+            NrtNoDeletesThread.Name = "NRTNoDeletes Reopen Thread";
+            NrtNoDeletesThread.Priority = (ThreadPriority)Math.Min((int)Thread.CurrentThread.Priority + 2, (int)ThreadPriority.Highest);
+            NrtNoDeletesThread.SetDaemon(true);
+            NrtNoDeletesThread.Start();
+        }
+
+        private class SearcherFactoryAnonymousInnerClassHelper : SearcherFactory
+        {
+            private readonly TestControlledRealTimeReopenThread OuterInstance;
+
+            private TaskScheduler Es;
+
+            public SearcherFactoryAnonymousInnerClassHelper(TestControlledRealTimeReopenThread outerInstance, TaskScheduler es)
+            {
+                this.OuterInstance = outerInstance;
+                this.Es = es;
+            }
+
+            public override IndexSearcher NewSearcher(IndexReader r)
+            {
+                OuterInstance.WarmCalled = true;
+                IndexSearcher s = new IndexSearcher(r, Es);
+                s.Search(new TermQuery(new Term("body", "united")), 10);
+                return s;
+            }
+        }
+
+        protected internal override void DoAfterIndexingThreadDone()
+        {
+            long? gen = LastGens.Value;
+            if (gen != null)
+            {
+                AddMaxGen((long)gen);
+            }
+        }
+
+        private long MaxGen = -1;
+
+        private void AddMaxGen(long gen)
+        {
+            lock (this)
+            {
+                MaxGen = Math.Max(gen, MaxGen);
+            }
+        }
+
+        protected internal override void DoSearching(TaskScheduler es, DateTime stopTime)
+        {
+            RunSearchThreads(stopTime);
+        }
+
+        protected internal override IndexSearcher CurrentSearcher
+        {
+            get
+            {
+                // Test doesn't assert deletions until the end, so we
+                // can randomize whether dels must be applied
+                SearcherManager nrt;
+                if (Random().NextBoolean())
+                {
+                    nrt = NrtDeletes;
+                }
+                else
+                {
+                    nrt = NrtNoDeletes;
+                }
+
+                return nrt.Acquire();
+            }
+        }
+
+        protected internal override void ReleaseSearcher(IndexSearcher s)
+        {
+            // NOTE: a bit iffy... technically you should release
+            // against the same SearcherManager you acquired from... but
+            // both impls just decRef the underlying reader so we
+            // can get away w/ cheating:
+            NrtNoDeletes.Release(s);
+        }
+
+        protected internal override void DoClose()
+        {
+            Assert.IsTrue(WarmCalled);
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: now close SearcherManagers");
+            }
+            NrtDeletesThread.Dispose();
+            NrtDeletes.Dispose();
+            NrtNoDeletesThread.Dispose();
+            NrtNoDeletes.Dispose();
+        }
+
+        /*
+         * LUCENE-3528 - NRTManager hangs in certain situations 
+         */
+        [Test]
+        public virtual void TestThreadStarvationNoDeleteNRTReader()
+        {
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            conf.SetMergePolicy(Random().NextBoolean() ? NoMergePolicy.COMPOUND_FILES : NoMergePolicy.NO_COMPOUND_FILES);
+            Directory d = NewDirectory();
+            CountdownEvent latch = new CountdownEvent(1);
+            CountdownEvent signal = new CountdownEvent(1);
+
+            LatchedIndexWriter _writer = new LatchedIndexWriter(d, conf, latch, signal);
+            TrackingIndexWriter writer = new TrackingIndexWriter(_writer);
+            SearcherManager manager = new SearcherManager(_writer, false, null);
+            Document doc = new Document();
+            doc.Add(NewTextField("test", "test", Field.Store.YES));
+            writer.AddDocument(doc);
+            manager.MaybeRefresh();
+            ThreadClass t = new ThreadAnonymousInnerClassHelper(this, latch, signal, writer, manager);
+            t.Start();
+            _writer.WaitAfterUpdate = true; // wait in addDocument to let some reopens go through
+            long lastGen = writer.UpdateDocument(new Term("foo", "bar"), doc); // once this returns the doc is already reflected in the last reopen
+
+            Assert.IsFalse(manager.IsSearcherCurrent()); // false since there is a delete in the queue
+
+            IndexSearcher searcher = manager.Acquire();
+            try
+            {
+                Assert.AreEqual(2, searcher.IndexReader.NumDocs);
+            }
+            finally
+            {
+                manager.Release(searcher);
+            }
+            ControlledRealTimeReopenThread<IndexSearcher> thread = new ControlledRealTimeReopenThread<IndexSearcher>(writer, manager, 0.01, 0.01);
+            thread.Start(); // start reopening
+            if (VERBOSE)
+            {
+                Console.WriteLine("waiting now for generation " + lastGen);
+            }
+
+            AtomicBoolean finished = new AtomicBoolean(false);
+            ThreadClass waiter = new ThreadAnonymousInnerClassHelper2(this, lastGen, thread, finished);
+            waiter.Start();
+            manager.MaybeRefresh();
+            waiter.Join(1000);
+            if (!finished.Get())
+            {
+                waiter.Interrupt();
+                Assert.Fail("thread deadlocked on waitForGeneration");
+            }
+            thread.Dispose();
+            thread.Join();
+            IOUtils.Close(manager, _writer, d);
+        }
+
+        private class ThreadAnonymousInnerClassHelper : ThreadClass
+        {
+            private readonly TestControlledRealTimeReopenThread OuterInstance;
+
+            private CountdownEvent Latch;
+            private CountdownEvent Signal;
+            private TrackingIndexWriter Writer;
+            private SearcherManager Manager;
+
+            public ThreadAnonymousInnerClassHelper(TestControlledRealTimeReopenThread outerInstance, CountdownEvent latch, CountdownEvent signal, TrackingIndexWriter writer, SearcherManager manager)
+            {
+                this.OuterInstance = outerInstance;
+                this.Latch = latch;
+                this.Signal = signal;
+                this.Writer = writer;
+                this.Manager = manager;
+            }
+
+            public override void Run()
+            {
+                try
+                {
+                    Signal.Wait();
+                    Manager.MaybeRefresh();
+                    Writer.DeleteDocuments(new TermQuery(new Term("foo", "barista")));
+                    Manager.MaybeRefresh(); // kick off another reopen so we inc. the internal gen
+                }
+                catch (Exception e)
+                {
+                    Console.WriteLine(e.ToString());
+                    Console.Write(e.StackTrace);
+                }
+                finally
+                {
+                    Latch.Reset(Latch.CurrentCount == 0 ? 0 : Latch.CurrentCount - 1); // let the add below finish
+                }
+            }
+        }
+
+        private class ThreadAnonymousInnerClassHelper2 : ThreadClass
+        {
+            private readonly TestControlledRealTimeReopenThread OuterInstance;
+
+            private long LastGen;
+            private ControlledRealTimeReopenThread<IndexSearcher> thread;
+            private AtomicBoolean Finished;
+
+            public ThreadAnonymousInnerClassHelper2(TestControlledRealTimeReopenThread outerInstance, long lastGen, ControlledRealTimeReopenThread<IndexSearcher> thread, AtomicBoolean finished)
+            {
+                this.OuterInstance = outerInstance;
+                this.LastGen = lastGen;
+                this.thread = thread;
+                this.Finished = finished;
+            }
+
+            public override void Run()
+            {
+                try
+                {
+                    thread.WaitForGeneration(LastGen);
+                }
+                catch (ThreadInterruptedException ie)
+                {
+                    Thread.CurrentThread.Interrupt();
+                    throw new Exception(ie.Message, ie);
+                }
+                Finished.Set(true);
+            }
+        }
+
+        public class LatchedIndexWriter : IndexWriter
+        {
+
+            internal CountdownEvent Latch;
+            internal bool WaitAfterUpdate = false;
+            internal CountdownEvent Signal;
+
+            public LatchedIndexWriter(Directory d, IndexWriterConfig conf, CountdownEvent latch, CountdownEvent signal)
+                : base(d, conf)
+            {
+                this.Latch = latch;
+                this.Signal = signal;
+
+            }
+
+            public override void UpdateDocument(Term term, IEnumerable<IIndexableField> doc, Analyzer analyzer)
+            {
+                base.UpdateDocument(term, doc, analyzer);
+                try
+                {
+                    if (WaitAfterUpdate)
+                    {
+                        Signal.Reset(Signal.CurrentCount == 0 ? 0 : Signal.CurrentCount - 1);
+                        Latch.Wait();
+                    }
+                }
+#pragma warning disable 168
+                catch (ThreadInterruptedException e)
+#pragma warning restore 168
+                {
+                    throw;
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestEvilSearcherFactory()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            w.Commit();
+
+            IndexReader other = DirectoryReader.Open(dir);
+
+            SearcherFactory theEvilOne = new SearcherFactoryAnonymousInnerClassHelper2(this, other);
+
+            try
+            {
+                new SearcherManager(w.w, false, theEvilOne);
+                Assert.Fail("didn't hit expected exception");
+            }
+#pragma warning disable 168
+            catch (InvalidOperationException ise)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            w.Dispose();
+            other.Dispose();
+            dir.Dispose();
+        }
+
+        private class SearcherFactoryAnonymousInnerClassHelper2 : SearcherFactory
+        {
+            private readonly TestControlledRealTimeReopenThread OuterInstance;
+
+            private IndexReader Other;
+
+            public SearcherFactoryAnonymousInnerClassHelper2(TestControlledRealTimeReopenThread outerInstance, IndexReader other)
+            {
+                this.OuterInstance = outerInstance;
+                this.Other = other;
+            }
+
+            public override IndexSearcher NewSearcher(IndexReader ignored)
+            {
+                return OuterInstance.NewSearcher(Other);
+            }
+        }
+
+        [Test]
+        public virtual void TestListenerCalled()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, null));
+            AtomicBoolean afterRefreshCalled = new AtomicBoolean(false);
+            SearcherManager sm = new SearcherManager(iw, true, new SearcherFactory());
+            sm.AddListener(new RefreshListenerAnonymousInnerClassHelper(this, afterRefreshCalled));
+            iw.AddDocument(new Document());
+            iw.Commit();
+            Assert.IsFalse(afterRefreshCalled.Get());
+            sm.MaybeRefreshBlocking();
+            Assert.IsTrue(afterRefreshCalled.Get());
+            sm.Dispose();
+            iw.Dispose();
+            dir.Dispose();
+        }
+
+        private class RefreshListenerAnonymousInnerClassHelper : ReferenceManager.IRefreshListener
+        {
+            private readonly TestControlledRealTimeReopenThread OuterInstance;
+
+            private AtomicBoolean AfterRefreshCalled;
+
+            public RefreshListenerAnonymousInnerClassHelper(TestControlledRealTimeReopenThread outerInstance, AtomicBoolean afterRefreshCalled)
+            {
+                this.OuterInstance = outerInstance;
+                this.AfterRefreshCalled = afterRefreshCalled;
+            }
+
+            public void BeforeRefresh()
+            {
+            }
+            public void AfterRefresh(bool didRefresh)
+            {
+                if (didRefresh)
+                {
+                    AfterRefreshCalled.Set(true);
+                }
+            }
+        }
+
+        // LUCENE-5461
+        [Test, Timeout(120000)]
+        public virtual void TestCRTReopen()
+        {
+            //test behaving badly
+
+            //should be high enough
+            int maxStaleSecs = 20;
+
+            //build crap data just to store it.
+            string s = "        abcdefghijklmnopqrstuvwxyz     ";
+            char[] chars = s.ToCharArray();
+            StringBuilder builder = new StringBuilder(2048);
+            for (int i = 0; i < 2048; i++)
+            {
+                builder.Append(chars[Random().Next(chars.Length)]);
+            }
+            string content = builder.ToString();
+
+            SnapshotDeletionPolicy sdp = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
+            Directory dir = new NRTCachingDirectory(NewFSDirectory(CreateTempDir("nrt")), 5, 128);
+            IndexWriterConfig config = new IndexWriterConfig(
+#pragma warning disable 612, 618
+                Version.LUCENE_46,
+#pragma warning restore 612, 618
+                new MockAnalyzer(Random()));
+            config.SetIndexDeletionPolicy(sdp);
+            config.SetOpenMode(OpenMode.CREATE_OR_APPEND);
+            IndexWriter iw = new IndexWriter(dir, config);
+            SearcherManager sm = new SearcherManager(iw, true, new SearcherFactory());
+            TrackingIndexWriter tiw = new TrackingIndexWriter(iw);
+            ControlledRealTimeReopenThread<IndexSearcher> controlledRealTimeReopenThread = new ControlledRealTimeReopenThread<IndexSearcher>(tiw, sm, maxStaleSecs, 0);
+
+            controlledRealTimeReopenThread.SetDaemon(true);
+            controlledRealTimeReopenThread.Start();
+
+            IList<ThreadClass> commitThreads = new List<ThreadClass>();
+
+            for (int i = 0; i < 500; i++)
+            {
+                if (i > 0 && i % 50 == 0)
+                {
+                    ThreadClass commitThread = new RunnableAnonymousInnerClassHelper(this, sdp, dir, iw);
+                    commitThread.Start();
+                    commitThreads.Add(commitThread);
+                }
+                Document d = new Document();
+                d.Add(new TextField("count", i + "", Field.Store.NO));
+                d.Add(new TextField("content", content, Field.Store.YES));
+                long start = DateTime.Now.Millisecond;
+                long l = tiw.AddDocument(d);
+                controlledRealTimeReopenThread.WaitForGeneration(l);
+                long wait = DateTime.Now.Millisecond - start;
+                Assert.IsTrue(wait < (maxStaleSecs * 1000), "waited too long for generation " + wait);
+                IndexSearcher searcher = sm.Acquire();
+                TopDocs td = searcher.Search(new TermQuery(new Term("count", i + "")), 10);
+                sm.Release(searcher);
+                Assert.AreEqual(1, td.TotalHits);
+            }
+
+            foreach (ThreadClass commitThread in commitThreads)
+            {
+                commitThread.Join();
+            }
+
+            controlledRealTimeReopenThread.Dispose();
+            sm.Dispose();
+            iw.Dispose();
+            dir.Dispose();
+        }
+
+        private class RunnableAnonymousInnerClassHelper : ThreadClass
+        {
+            private readonly TestControlledRealTimeReopenThread OuterInstance;
+
+            private SnapshotDeletionPolicy Sdp;
+            private Directory Dir;
+            private IndexWriter Iw;
+
+            public RunnableAnonymousInnerClassHelper(TestControlledRealTimeReopenThread outerInstance, SnapshotDeletionPolicy sdp, Directory dir, IndexWriter iw)
+            {
+                this.OuterInstance = outerInstance;
+                this.Sdp = sdp;
+                this.Dir = dir;
+                this.Iw = iw;
+            }
+
+            public override void Run()
+            {
+                try
+                {
+                    Iw.Commit();
+                    IndexCommit ic = Sdp.Snapshot();
+                    foreach (string name in ic.FileNames)
+                    {
+                        //distribute, and backup
+                        //System.out.println(names);
+                        Assert.IsTrue(SlowFileExists(Dir, name));
+                    }
+                }
+                catch (Exception e)
+                {
+                    throw new Exception(e.Message, e);
+                }
+            }
+        }
+    }
+}
\ No newline at end of file


[15/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestTransactions.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestTransactions.cs b/src/Lucene.Net.Tests/Index/TestTransactions.cs
new file mode 100644
index 0000000..627b21c
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestTransactions.cs
@@ -0,0 +1,336 @@
+using System;
+using System.Threading;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using System.IO;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using English = Lucene.Net.Util.English;
+    using Field = Field;
+    using FieldType = FieldType;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+    using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+    using StringField = StringField;
+
+    [TestFixture]
+    public class TestTransactions : LuceneTestCase
+    {
+        private static volatile bool DoFail;
+
+        private class RandomFailure : MockDirectoryWrapper.Failure
+        {
+            private readonly TestTransactions OuterInstance;
+
+            public RandomFailure(TestTransactions outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            public override void Eval(MockDirectoryWrapper dir)
+            {
+                if (TestTransactions.DoFail && Random().Next() % 10 <= 3)
+                {
+                    throw new IOException("now failing randomly but on purpose");
+                }
+            }
+        }
+
+        private abstract class TimedThread : ThreadClass
+        {
+            internal volatile bool Failed;
+            internal static float RUN_TIME_MSEC = AtLeast(500);
+            internal TimedThread[] AllThreads;
+
+            public abstract void DoWork();
+
+            internal TimedThread(TimedThread[] threads)
+            {
+                this.AllThreads = threads;
+            }
+
+            public override void Run()
+            {
+                long stopTime = Environment.TickCount + (long)(RUN_TIME_MSEC);
+
+                try
+                {
+                    do
+                    {
+                        if (AnyErrors())
+                        {
+                            break;
+                        }
+                        DoWork();
+                    } while (Environment.TickCount < stopTime);
+                }
+                catch (Exception e)
+                {
+                    Console.WriteLine(Thread.CurrentThread + ": exc");
+                    Console.Error.WriteLine(e.StackTrace);
+                    Failed = true;
+                }
+            }
+
+            internal virtual bool AnyErrors()
+            {
+                for (int i = 0; i < AllThreads.Length; i++)
+                {
+                    if (AllThreads[i] != null && AllThreads[i].Failed)
+                    {
+                        return true;
+                    }
+                }
+                return false;
+            }
+        }
+
+        private class IndexerThread : TimedThread
+        {
+            private readonly TestTransactions OuterInstance;
+            private IConcurrentMergeScheduler _scheduler1;
+            private IConcurrentMergeScheduler _scheduler2;
+            internal Directory Dir1;
+            internal Directory Dir2;
+            internal object @lock;
+            internal int NextID;
+
+            public IndexerThread(TestTransactions outerInstance, object @lock, 
+                Directory dir1, Directory dir2,
+                IConcurrentMergeScheduler scheduler1, IConcurrentMergeScheduler scheduler2, 
+                TimedThread[] threads)
+                : base(threads)
+            {
+                _scheduler1 = scheduler1;
+                _scheduler2 = scheduler2;
+                this.OuterInstance = outerInstance;
+                this.@lock = @lock;
+                this.Dir1 = dir1;
+                this.Dir2 = dir2;
+            }
+
+            public override void DoWork()
+            {
+                var config = OuterInstance.NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))
+                                .SetMaxBufferedDocs(3)
+                                .SetMergeScheduler(_scheduler1)
+                                .SetMergePolicy(NewLogMergePolicy(2));
+                IndexWriter writer1 = new IndexWriter(Dir1, config);
+                ((IConcurrentMergeScheduler)writer1.Config.MergeScheduler).SetSuppressExceptions();
+
+                // Intentionally use different params so flush/merge
+                // happen @ different times
+                var config2 = OuterInstance.NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))
+                                .SetMaxBufferedDocs(2)
+                                .SetMergeScheduler(_scheduler2)
+                                .SetMergePolicy(NewLogMergePolicy(3));
+                IndexWriter writer2 = new IndexWriter(Dir2, config2);
+                ((IConcurrentMergeScheduler)writer2.Config.MergeScheduler).SetSuppressExceptions();
+
+                Update(writer1);
+                Update(writer2);
+
+                DoFail = true;
+                try
+                {
+                    lock (@lock)
+                    {
+                        try
+                        {
+                            writer1.PrepareCommit();
+                        }
+                        catch (Exception)
+                        {
+                            writer1.Rollback();
+                            writer2.Rollback();
+                            return;
+                        }
+                        try
+                        {
+                            writer2.PrepareCommit();
+                        }
+                        catch (Exception)
+                        {
+                            writer1.Rollback();
+                            writer2.Rollback();
+                            return;
+                        }
+
+                        writer1.Commit();
+                        writer2.Commit();
+                    }
+                }
+                finally
+                {
+                    DoFail = false;
+                }
+
+                writer1.Dispose();
+                writer2.Dispose();
+            }
+
+            public virtual void Update(IndexWriter writer)
+            {
+                // Add 10 docs:
+                FieldType customType = new FieldType(StringField.TYPE_NOT_STORED);
+                customType.StoreTermVectors = true;
+                for (int j = 0; j < 10; j++)
+                {
+                    Document d = new Document();
+                    int n = Random().Next();
+                    d.Add(OuterInstance.NewField("id", Convert.ToString(NextID++), customType));
+                    d.Add(OuterInstance.NewTextField("contents", English.IntToEnglish(n), Field.Store.NO));
+                    writer.AddDocument(d);
+                }
+
+                // Delete 5 docs:
+                int deleteID = NextID - 1;
+                for (int j = 0; j < 5; j++)
+                {
+                    writer.DeleteDocuments(new Term("id", "" + deleteID));
+                    deleteID -= 2;
+                }
+            }
+        }
+
+        private class SearcherThread : TimedThread
+        {
+            internal Directory Dir1;
+            internal Directory Dir2;
+            internal object @lock;
+
+            public SearcherThread(object @lock, Directory dir1, Directory dir2, TimedThread[] threads)
+                : base(threads)
+            {
+                this.@lock = @lock;
+                this.Dir1 = dir1;
+                this.Dir2 = dir2;
+            }
+
+            public override void DoWork()
+            {
+                IndexReader r1 = null, r2 = null;
+                lock (@lock)
+                {
+                    try
+                    {
+                        r1 = DirectoryReader.Open(Dir1);
+                        r2 = DirectoryReader.Open(Dir2);
+                    }
+                    catch (IOException e)
+                    {
+                        if (!e.Message.Contains("on purpose"))
+                        {
+                            throw e;
+                        }
+                        if (r1 != null)
+                        {
+                            r1.Dispose();
+                        }
+                        if (r2 != null)
+                        {
+                            r2.Dispose();
+                        }
+                        return;
+                    }
+                }
+                if (r1.NumDocs != r2.NumDocs)
+                {
+                    throw new Exception("doc counts differ: r1=" + r1.NumDocs + " r2=" + r2.NumDocs);
+                }
+                r1.Dispose();
+                r2.Dispose();
+            }
+        }
+
+        public virtual void InitIndex(Directory dir)
+        {
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            for (int j = 0; j < 7; j++)
+            {
+                Document d = new Document();
+                int n = Random().Next();
+                d.Add(NewTextField("contents", English.IntToEnglish(n), Field.Store.NO));
+                writer.AddDocument(d);
+            }
+            writer.Dispose();
+        }
+
+        [Test, Sequential]
+        public virtual void TestTransactions_Mem(
+            [ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler1, 
+            [ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler2)
+        {
+            Console.WriteLine("Start test");
+            // we cant use non-ramdir on windows, because this test needs to double-write.
+            MockDirectoryWrapper dir1 = new MockDirectoryWrapper(Random(), new RAMDirectory());
+            MockDirectoryWrapper dir2 = new MockDirectoryWrapper(Random(), new RAMDirectory());
+            dir1.PreventDoubleWrite = false;
+            dir2.PreventDoubleWrite = false;
+            dir1.FailOn(new RandomFailure(this));
+            dir2.FailOn(new RandomFailure(this));
+            dir1.FailOnOpenInput = false;
+            dir2.FailOnOpenInput = false;
+
+            // We throw exceptions in deleteFile, which creates
+            // leftover files:
+            dir1.AssertNoUnrefencedFilesOnClose = false;
+            dir2.AssertNoUnrefencedFilesOnClose = false;
+
+            InitIndex(dir1);
+            InitIndex(dir2);
+
+            TimedThread[] threads = new TimedThread[3];
+            int numThread = 0;
+
+            IndexerThread indexerThread = new IndexerThread(this, this, dir1, dir2, scheduler1, scheduler2, threads);
+            threads[numThread++] = indexerThread;
+            indexerThread.Start();
+
+            SearcherThread searcherThread1 = new SearcherThread(this, dir1, dir2, threads);
+            threads[numThread++] = searcherThread1;
+            searcherThread1.Start();
+
+            SearcherThread searcherThread2 = new SearcherThread(this, dir1, dir2, threads);
+            threads[numThread++] = searcherThread2;
+            searcherThread2.Start();
+
+            for (int i = 0; i < numThread; i++)
+            {
+                threads[i].Join();
+            }
+
+            for (int i = 0; i < numThread; i++)
+            {
+                Assert.IsTrue(!threads[i].Failed);
+            }
+            dir1.Dispose();
+            dir2.Dispose();
+
+            Console.WriteLine("End test");
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestTryDelete.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestTryDelete.cs b/src/Lucene.Net.Tests/Index/TestTryDelete.cs
new file mode 100644
index 0000000..a865675
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestTryDelete.cs
@@ -0,0 +1,196 @@
+using System;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Randomized.Generators;
+    using Lucene.Net.Search;
+    using NUnit.Framework;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+    using ReferenceManager = Lucene.Net.Search.ReferenceManager;
+    using SearcherFactory = Lucene.Net.Search.SearcherFactory;
+    using SearcherManager = Lucene.Net.Search.SearcherManager;
+    using Store = Field.Store;
+    using StringField = StringField;
+    using TermQuery = Lucene.Net.Search.TermQuery;
+    using TopDocs = Lucene.Net.Search.TopDocs;
+
+    [TestFixture]
+    public class TestTryDelete : LuceneTestCase
+    {
+        private static IndexWriter GetWriter(Directory directory)
+        {
+            MergePolicy policy = new LogByteSizeMergePolicy();
+            IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            conf.SetMergePolicy(policy);
+            conf.SetOpenMode(OpenMode.CREATE_OR_APPEND);
+
+            IndexWriter writer = new IndexWriter(directory, conf);
+
+            return writer;
+        }
+
+        private static Directory CreateIndex()
+        {
+            Directory directory = new RAMDirectory();
+
+            IndexWriter writer = GetWriter(directory);
+
+            for (int i = 0; i < 10; i++)
+            {
+                Document doc = new Document();
+                doc.Add(new StringField("foo", Convert.ToString(i), Store.YES));
+                writer.AddDocument(doc);
+            }
+
+            writer.Commit();
+            writer.Dispose();
+
+            return directory;
+        }
+
+        [Test]
+        public virtual void TestTryDeleteDocument()
+        {
+            Directory directory = CreateIndex();
+
+            IndexWriter writer = GetWriter(directory);
+
+            ReferenceManager<IndexSearcher> mgr = new SearcherManager(writer, true, new SearcherFactory());
+
+            TrackingIndexWriter mgrWriter = new TrackingIndexWriter(writer);
+
+            IndexSearcher searcher = mgr.Acquire();
+
+            TopDocs topDocs = searcher.Search(new TermQuery(new Term("foo", "0")), 100);
+            Assert.AreEqual(1, topDocs.TotalHits);
+
+            long result;
+            if (Random().NextBoolean())
+            {
+                IndexReader r = DirectoryReader.Open(writer, true);
+                result = mgrWriter.TryDeleteDocument(r, 0);
+                r.Dispose();
+            }
+            else
+            {
+                result = mgrWriter.TryDeleteDocument(searcher.IndexReader, 0);
+            }
+
+            // The tryDeleteDocument should have succeeded:
+            Assert.IsTrue(result != -1);
+
+            Assert.IsTrue(writer.HasDeletions);
+
+            if (Random().NextBoolean())
+            {
+                writer.Commit();
+            }
+
+            Assert.IsTrue(writer.HasDeletions);
+
+            mgr.MaybeRefresh();
+
+            searcher = mgr.Acquire();
+
+            topDocs = searcher.Search(new TermQuery(new Term("foo", "0")), 100);
+
+            Assert.AreEqual(0, topDocs.TotalHits);
+        }
+
+        [Test]
+        public virtual void TestTryDeleteDocumentCloseAndReopen()
+        {
+            Directory directory = CreateIndex();
+
+            IndexWriter writer = GetWriter(directory);
+
+            ReferenceManager<IndexSearcher> mgr = new SearcherManager(writer, true, new SearcherFactory());
+
+            IndexSearcher searcher = mgr.Acquire();
+
+            TopDocs topDocs = searcher.Search(new TermQuery(new Term("foo", "0")), 100);
+            Assert.AreEqual(1, topDocs.TotalHits);
+
+            TrackingIndexWriter mgrWriter = new TrackingIndexWriter(writer);
+            long result = mgrWriter.TryDeleteDocument(DirectoryReader.Open(writer, true), 0);
+
+            Assert.AreEqual(1, result);
+
+            writer.Commit();
+
+            Assert.IsTrue(writer.HasDeletions);
+
+            mgr.MaybeRefresh();
+
+            searcher = mgr.Acquire();
+
+            topDocs = searcher.Search(new TermQuery(new Term("foo", "0")), 100);
+
+            Assert.AreEqual(0, topDocs.TotalHits);
+
+            writer.Dispose();
+
+            searcher = new IndexSearcher(DirectoryReader.Open(directory));
+
+            topDocs = searcher.Search(new TermQuery(new Term("foo", "0")), 100);
+
+            Assert.AreEqual(0, topDocs.TotalHits);
+        }
+
+        [Test]
+        public virtual void TestDeleteDocuments()
+        {
+            Directory directory = CreateIndex();
+
+            IndexWriter writer = GetWriter(directory);
+
+            ReferenceManager<IndexSearcher> mgr = new SearcherManager(writer, true, new SearcherFactory());
+
+            IndexSearcher searcher = mgr.Acquire();
+
+            TopDocs topDocs = searcher.Search(new TermQuery(new Term("foo", "0")), 100);
+            Assert.AreEqual(1, topDocs.TotalHits);
+
+            TrackingIndexWriter mgrWriter = new TrackingIndexWriter(writer);
+            long result = mgrWriter.DeleteDocuments(new TermQuery(new Term("foo", "0")));
+
+            Assert.AreEqual(1, result);
+
+            // writer.Commit();
+
+            Assert.IsTrue(writer.HasDeletions);
+
+            mgr.MaybeRefresh();
+
+            searcher = mgr.Acquire();
+
+            topDocs = searcher.Search(new TermQuery(new Term("foo", "0")), 100);
+
+            Assert.AreEqual(0, topDocs.TotalHits);
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestTwoPhaseCommitTool.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestTwoPhaseCommitTool.cs b/src/Lucene.Net.Tests/Index/TestTwoPhaseCommitTool.cs
new file mode 100644
index 0000000..58c6f3a
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestTwoPhaseCommitTool.cs
@@ -0,0 +1,178 @@
+using System;
+using System.Collections.Generic;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Randomized.Generators;
+    using NUnit.Framework;
+    using System.IO;
+
+    /*
+                 * Licensed to the Apache Software Foundation (ASF) under one or more
+                 * contributor license agreements.  See the NOTICE file distributed with
+                 * this work for additional information regarding copyright ownership.
+                 * The ASF licenses this file to You under the Apache License, Version 2.0
+                 * (the "License"); you may not use this file except in compliance with
+                 * the License.  You may obtain a copy of the License at
+                 *
+                 *     http://www.apache.org/licenses/LICENSE-2.0
+                 *
+                 * Unless required by applicable law or agreed to in writing, software
+                 * distributed under the License is distributed on an "AS IS" BASIS,
+                 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+                 * See the License for the specific language governing permissions and
+                 * limitations under the License.
+                 */
+
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    [TestFixture]
+    public class TestTwoPhaseCommitTool : LuceneTestCase
+    {
+        private class TwoPhaseCommitImpl : ITwoPhaseCommit
+        {
+            internal static bool CommitCalled = false;
+            internal readonly bool FailOnPrepare;
+            internal readonly bool FailOnCommit;
+            internal readonly bool FailOnRollback;
+            internal bool RollbackCalled = false;
+            internal IDictionary<string, string> PrepareCommitData = null;
+            internal IDictionary<string, string> CommitData = null;
+
+            public TwoPhaseCommitImpl(bool failOnPrepare, bool failOnCommit, bool failOnRollback)
+            {
+                this.FailOnPrepare = failOnPrepare;
+                this.FailOnCommit = failOnCommit;
+                this.FailOnRollback = failOnRollback;
+            }
+
+            public void PrepareCommit()
+            {
+                PrepareCommit(null);
+            }
+
+            public virtual void PrepareCommit(IDictionary<string, string> commitData)
+            {
+                this.PrepareCommitData = commitData;
+                Assert.IsFalse(CommitCalled, "commit should not have been called before all prepareCommit were");
+                if (FailOnPrepare)
+                {
+                    throw new IOException("failOnPrepare");
+                }
+            }
+
+            public void Commit()
+            {
+                Commit(null);
+            }
+
+            public virtual void Commit(IDictionary<string, string> commitData)
+            {
+                this.CommitData = commitData;
+                CommitCalled = true;
+                if (FailOnCommit)
+                {
+                    throw new Exception("failOnCommit");
+                }
+            }
+
+            public void Rollback()
+            {
+                RollbackCalled = true;
+                if (FailOnRollback)
+                {
+                    throw new Exception("failOnRollback");
+                }
+            }
+        }
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            TwoPhaseCommitImpl.CommitCalled = false; // reset count before every test
+        }
+
+        [Test]
+        public virtual void TestPrepareThenCommit()
+        {
+            // tests that prepareCommit() is called on all objects before commit()
+            TwoPhaseCommitImpl[] objects = new TwoPhaseCommitImpl[2];
+            for (int i = 0; i < objects.Length; i++)
+            {
+                objects[i] = new TwoPhaseCommitImpl(false, false, false);
+            }
+
+            // following call will fail if commit() is called before all prepare() were
+            TwoPhaseCommitTool.Execute(objects);
+        }
+
+        [Test]
+        public virtual void TestRollback()
+        {
+            // tests that rollback is called if failure occurs at any stage
+            int numObjects = Random().Next(8) + 3; // between [3, 10]
+            TwoPhaseCommitImpl[] objects = new TwoPhaseCommitImpl[numObjects];
+            for (int i = 0; i < objects.Length; i++)
+            {
+                bool failOnPrepare = Random().NextBoolean();
+                // we should not hit failures on commit usually
+                bool failOnCommit = Random().NextDouble() < 0.05;
+                bool railOnRollback = Random().NextBoolean();
+                objects[i] = new TwoPhaseCommitImpl(failOnPrepare, failOnCommit, railOnRollback);
+            }
+
+            bool anyFailure = false;
+            try
+            {
+                TwoPhaseCommitTool.Execute(objects);
+            }
+#pragma warning disable 168
+            catch (Exception t)
+#pragma warning restore 168
+            {
+                anyFailure = true;
+            }
+
+            if (anyFailure)
+            {
+                // if any failure happened, ensure that rollback was called on all.
+                foreach (TwoPhaseCommitImpl tpc in objects)
+                {
+                    Assert.IsTrue(tpc.RollbackCalled, "rollback was not called while a failure occurred during the 2-phase commit");
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestNullTPCs()
+        {
+            int numObjects = Random().Next(4) + 3; // between [3, 6]
+            ITwoPhaseCommit[] tpcs = new ITwoPhaseCommit[numObjects];
+            bool setNull = false;
+            for (int i = 0; i < tpcs.Length; i++)
+            {
+                bool isNull = Random().NextDouble() < 0.3;
+                if (isNull)
+                {
+                    setNull = true;
+                    tpcs[i] = null;
+                }
+                else
+                {
+                    tpcs[i] = new TwoPhaseCommitImpl(false, false, false);
+                }
+            }
+
+            if (!setNull)
+            {
+                // none of the TPCs were picked to be null, pick one at random
+                int idx = Random().Next(numObjects);
+                tpcs[idx] = null;
+            }
+
+            // following call would fail if TPCTool won't handle null TPCs properly
+            TwoPhaseCommitTool.Execute(tpcs);
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestUniqueTermCount.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestUniqueTermCount.cs b/src/Lucene.Net.Tests/Index/TestUniqueTermCount.cs
new file mode 100644
index 0000000..46f4722
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestUniqueTermCount.cs
@@ -0,0 +1,122 @@
+\ufeffusing Lucene.Net.Analysis;
+using Lucene.Net.Documents;
+using Lucene.Net.Search;
+using Lucene.Net.Search.Similarities;
+using Lucene.Net.Store;
+using Lucene.Net.Util;
+using NUnit.Framework;
+using System;
+using System.Collections.Generic;
+using System.Text;
+
+namespace Lucene.Net.Index
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Tests the uniqueTermCount statistic in FieldInvertState
+    /// </summary>
+    public class TestUniqueTermCount : LuceneTestCase
+    {
+        Directory dir;
+        IndexReader reader;
+        /* expected uniqueTermCount values for our documents */
+        List<int> expected = new List<int>();
+
+        public override void SetUp()
+        {
+            base.SetUp();
+            dir = NewDirectory();
+            MockAnalyzer analyzer = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true);
+            IndexWriterConfig config = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
+            config.SetMergePolicy(NewLogMergePolicy());
+            config.SetSimilarity(new TestSimilarity());
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, config);
+            Document doc = new Document();
+            Field foo = NewTextField("foo", "", Field.Store.NO);
+            doc.Add(foo);
+            for (int i = 0; i < 100; i++)
+            {
+                foo.SetStringValue(AddValue());
+                writer.AddDocument(doc);
+            }
+            reader = writer.Reader;
+            writer.Dispose();
+        }
+
+        public override void TearDown()
+        {
+            reader.Dispose();
+            dir.Dispose();
+            base.TearDown();
+        }
+
+        [Test]
+        public void Test()
+        {
+            NumericDocValues fooNorms = MultiDocValues.GetNormValues(reader, "foo");
+            assertNotNull(fooNorms);
+            for (int i = 0; i < reader.MaxDoc; i++)
+            {
+                assertEquals(expected[i], fooNorms.Get(i));
+            }
+        }
+
+        /**
+         * Makes a bunch of single-char tokens (the max # unique terms will at most be 26).
+         * puts the # unique terms into expected, to be checked against the norm.
+         */
+        private string AddValue()
+        {
+            StringBuilder sb = new StringBuilder();
+            HashSet<string> terms = new HashSet<string>();
+            int num = TestUtil.NextInt(Random(), 0, 255);
+            for (int i = 0; i < num; i++)
+            {
+                sb.append(' ');
+                char term = (char)TestUtil.NextInt(Random(), 'a', 'z');
+                sb.append(term);
+                terms.add("" + term);
+            }
+            expected.Add(terms.size());
+            return sb.toString();
+        }
+
+        /**
+         * Simple similarity that encodes maxTermFrequency directly
+         */
+        internal class TestSimilarity : Similarity
+        {
+
+            public override long ComputeNorm(FieldInvertState state)
+            {
+                return state.UniqueTermCount;
+            }
+
+            public override SimWeight ComputeWeight(float queryBoost, CollectionStatistics collectionStats, params TermStatistics[] termStats)
+            {
+                throw new InvalidOperationException();
+            }
+
+            public override SimScorer GetSimScorer(SimWeight weight, AtomicReaderContext context)
+            {
+                throw new InvalidOperationException();
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/bogus24.upgraded.to.36.zip
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/bogus24.upgraded.to.36.zip b/src/Lucene.Net.Tests/Index/bogus24.upgraded.to.36.zip
new file mode 100644
index 0000000..52a09f9
Binary files /dev/null and b/src/Lucene.Net.Tests/Index/bogus24.upgraded.to.36.zip differ

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/index.30.cfs.zip
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/index.30.cfs.zip b/src/Lucene.Net.Tests/Index/index.30.cfs.zip
new file mode 100644
index 0000000..d5978c8
Binary files /dev/null and b/src/Lucene.Net.Tests/Index/index.30.cfs.zip differ

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/index.30.nocfs.zip
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/index.30.nocfs.zip b/src/Lucene.Net.Tests/Index/index.30.nocfs.zip
new file mode 100644
index 0000000..28cd83b
Binary files /dev/null and b/src/Lucene.Net.Tests/Index/index.30.nocfs.zip differ

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/index.31.cfs.zip
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/index.31.cfs.zip b/src/Lucene.Net.Tests/Index/index.31.cfs.zip
new file mode 100644
index 0000000..8f123a7
Binary files /dev/null and b/src/Lucene.Net.Tests/Index/index.31.cfs.zip differ

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/index.31.nocfs.zip
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/index.31.nocfs.zip b/src/Lucene.Net.Tests/Index/index.31.nocfs.zip
new file mode 100644
index 0000000..21434e1
Binary files /dev/null and b/src/Lucene.Net.Tests/Index/index.31.nocfs.zip differ

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/index.31.optimized.cfs.zip
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/index.31.optimized.cfs.zip b/src/Lucene.Net.Tests/Index/index.31.optimized.cfs.zip
new file mode 100644
index 0000000..200c710
Binary files /dev/null and b/src/Lucene.Net.Tests/Index/index.31.optimized.cfs.zip differ

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/index.31.optimized.nocfs.zip
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/index.31.optimized.nocfs.zip b/src/Lucene.Net.Tests/Index/index.31.optimized.nocfs.zip
new file mode 100644
index 0000000..9a158f1
Binary files /dev/null and b/src/Lucene.Net.Tests/Index/index.31.optimized.nocfs.zip differ

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/index.32.cfs.zip
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/index.32.cfs.zip b/src/Lucene.Net.Tests/Index/index.32.cfs.zip
new file mode 100644
index 0000000..eff3153
Binary files /dev/null and b/src/Lucene.Net.Tests/Index/index.32.cfs.zip differ

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/index.32.nocfs.zip
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/index.32.nocfs.zip b/src/Lucene.Net.Tests/Index/index.32.nocfs.zip
new file mode 100644
index 0000000..0b345da
Binary files /dev/null and b/src/Lucene.Net.Tests/Index/index.32.nocfs.zip differ

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/index.34.cfs.zip
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/index.34.cfs.zip b/src/Lucene.Net.Tests/Index/index.34.cfs.zip
new file mode 100644
index 0000000..257e9d8
Binary files /dev/null and b/src/Lucene.Net.Tests/Index/index.34.cfs.zip differ

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/index.34.nocfs.zip
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/index.34.nocfs.zip b/src/Lucene.Net.Tests/Index/index.34.nocfs.zip
new file mode 100644
index 0000000..935d6a1
Binary files /dev/null and b/src/Lucene.Net.Tests/Index/index.34.nocfs.zip differ

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/index.36.surrogates.zip
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/index.36.surrogates.zip b/src/Lucene.Net.Tests/Index/index.36.surrogates.zip
new file mode 100644
index 0000000..6bd7f20
Binary files /dev/null and b/src/Lucene.Net.Tests/Index/index.36.surrogates.zip differ

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/index.40.cfs.zip
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/index.40.cfs.zip b/src/Lucene.Net.Tests/Index/index.40.cfs.zip
new file mode 100644
index 0000000..4974749
Binary files /dev/null and b/src/Lucene.Net.Tests/Index/index.40.cfs.zip differ

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/index.40.nocfs.zip
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/index.40.nocfs.zip b/src/Lucene.Net.Tests/Index/index.40.nocfs.zip
new file mode 100644
index 0000000..9699080
Binary files /dev/null and b/src/Lucene.Net.Tests/Index/index.40.nocfs.zip differ

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/index.40.optimized.cfs.zip
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/index.40.optimized.cfs.zip b/src/Lucene.Net.Tests/Index/index.40.optimized.cfs.zip
new file mode 100644
index 0000000..209c436
Binary files /dev/null and b/src/Lucene.Net.Tests/Index/index.40.optimized.cfs.zip differ

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/index.40.optimized.nocfs.zip
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/index.40.optimized.nocfs.zip b/src/Lucene.Net.Tests/Index/index.40.optimized.nocfs.zip
new file mode 100644
index 0000000..0eaffd0
Binary files /dev/null and b/src/Lucene.Net.Tests/Index/index.40.optimized.nocfs.zip differ

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/index.41.cfs.zip
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/index.41.cfs.zip b/src/Lucene.Net.Tests/Index/index.41.cfs.zip
new file mode 100644
index 0000000..da2745e
Binary files /dev/null and b/src/Lucene.Net.Tests/Index/index.41.cfs.zip differ

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/index.41.nocfs.zip
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/index.41.nocfs.zip b/src/Lucene.Net.Tests/Index/index.41.nocfs.zip
new file mode 100644
index 0000000..c056bcb
Binary files /dev/null and b/src/Lucene.Net.Tests/Index/index.41.nocfs.zip differ

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/index.42.cfs.zip
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/index.42.cfs.zip b/src/Lucene.Net.Tests/Index/index.42.cfs.zip
new file mode 100644
index 0000000..5945fe5
Binary files /dev/null and b/src/Lucene.Net.Tests/Index/index.42.cfs.zip differ

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/index.42.nocfs.zip
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/index.42.nocfs.zip b/src/Lucene.Net.Tests/Index/index.42.nocfs.zip
new file mode 100644
index 0000000..11de1f1
Binary files /dev/null and b/src/Lucene.Net.Tests/Index/index.42.nocfs.zip differ

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/index.45.cfs.zip
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/index.45.cfs.zip b/src/Lucene.Net.Tests/Index/index.45.cfs.zip
new file mode 100644
index 0000000..10a8a1a
Binary files /dev/null and b/src/Lucene.Net.Tests/Index/index.45.cfs.zip differ

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/index.45.nocfs.zip
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/index.45.nocfs.zip b/src/Lucene.Net.Tests/Index/index.45.nocfs.zip
new file mode 100644
index 0000000..7825e2a
Binary files /dev/null and b/src/Lucene.Net.Tests/Index/index.45.nocfs.zip differ

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/index.461.cfs.zip
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/index.461.cfs.zip b/src/Lucene.Net.Tests/Index/index.461.cfs.zip
new file mode 100644
index 0000000..8f18185
Binary files /dev/null and b/src/Lucene.Net.Tests/Index/index.461.cfs.zip differ

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/index.461.nocfs.zip
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/index.461.nocfs.zip b/src/Lucene.Net.Tests/Index/index.461.nocfs.zip
new file mode 100644
index 0000000..cf0173c
Binary files /dev/null and b/src/Lucene.Net.Tests/Index/index.461.nocfs.zip differ

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/moreterms.40.zip
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/moreterms.40.zip b/src/Lucene.Net.Tests/Index/moreterms.40.zip
new file mode 100644
index 0000000..53ad7ce
Binary files /dev/null and b/src/Lucene.Net.Tests/Index/moreterms.40.zip differ

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/unsupported.19.cfs.zip
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/unsupported.19.cfs.zip b/src/Lucene.Net.Tests/Index/unsupported.19.cfs.zip
new file mode 100644
index 0000000..4fd9b32
Binary files /dev/null and b/src/Lucene.Net.Tests/Index/unsupported.19.cfs.zip differ

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/unsupported.19.nocfs.zip
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/unsupported.19.nocfs.zip b/src/Lucene.Net.Tests/Index/unsupported.19.nocfs.zip
new file mode 100644
index 0000000..e0d9142
Binary files /dev/null and b/src/Lucene.Net.Tests/Index/unsupported.19.nocfs.zip differ

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/unsupported.20.cfs.zip
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/unsupported.20.cfs.zip b/src/Lucene.Net.Tests/Index/unsupported.20.cfs.zip
new file mode 100644
index 0000000..4b931ae
Binary files /dev/null and b/src/Lucene.Net.Tests/Index/unsupported.20.cfs.zip differ

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/unsupported.20.nocfs.zip
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/unsupported.20.nocfs.zip b/src/Lucene.Net.Tests/Index/unsupported.20.nocfs.zip
new file mode 100644
index 0000000..1275cdf
Binary files /dev/null and b/src/Lucene.Net.Tests/Index/unsupported.20.nocfs.zip differ

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/unsupported.21.cfs.zip
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/unsupported.21.cfs.zip b/src/Lucene.Net.Tests/Index/unsupported.21.cfs.zip
new file mode 100644
index 0000000..473c138
Binary files /dev/null and b/src/Lucene.Net.Tests/Index/unsupported.21.cfs.zip differ

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/unsupported.21.nocfs.zip
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/unsupported.21.nocfs.zip b/src/Lucene.Net.Tests/Index/unsupported.21.nocfs.zip
new file mode 100644
index 0000000..d0582d0
Binary files /dev/null and b/src/Lucene.Net.Tests/Index/unsupported.21.nocfs.zip differ

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/unsupported.22.cfs.zip
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/unsupported.22.cfs.zip b/src/Lucene.Net.Tests/Index/unsupported.22.cfs.zip
new file mode 100644
index 0000000..1236307
Binary files /dev/null and b/src/Lucene.Net.Tests/Index/unsupported.22.cfs.zip differ

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/unsupported.22.nocfs.zip
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/unsupported.22.nocfs.zip b/src/Lucene.Net.Tests/Index/unsupported.22.nocfs.zip
new file mode 100644
index 0000000..216ddf3
Binary files /dev/null and b/src/Lucene.Net.Tests/Index/unsupported.22.nocfs.zip differ

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/unsupported.23.cfs.zip
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/unsupported.23.cfs.zip b/src/Lucene.Net.Tests/Index/unsupported.23.cfs.zip
new file mode 100644
index 0000000..b5fdeef
Binary files /dev/null and b/src/Lucene.Net.Tests/Index/unsupported.23.cfs.zip differ

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/unsupported.23.nocfs.zip
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/unsupported.23.nocfs.zip b/src/Lucene.Net.Tests/Index/unsupported.23.nocfs.zip
new file mode 100644
index 0000000..9137ae6
Binary files /dev/null and b/src/Lucene.Net.Tests/Index/unsupported.23.nocfs.zip differ

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/unsupported.24.cfs.zip
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/unsupported.24.cfs.zip b/src/Lucene.Net.Tests/Index/unsupported.24.cfs.zip
new file mode 100644
index 0000000..2c666a9
Binary files /dev/null and b/src/Lucene.Net.Tests/Index/unsupported.24.cfs.zip differ

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/unsupported.24.nocfs.zip
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/unsupported.24.nocfs.zip b/src/Lucene.Net.Tests/Index/unsupported.24.nocfs.zip
new file mode 100644
index 0000000..c223875
Binary files /dev/null and b/src/Lucene.Net.Tests/Index/unsupported.24.nocfs.zip differ

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/unsupported.29.cfs.zip
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/unsupported.29.cfs.zip b/src/Lucene.Net.Tests/Index/unsupported.29.cfs.zip
new file mode 100644
index 0000000..c694c78
Binary files /dev/null and b/src/Lucene.Net.Tests/Index/unsupported.29.cfs.zip differ

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/unsupported.29.nocfs.zip
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/unsupported.29.nocfs.zip b/src/Lucene.Net.Tests/Index/unsupported.29.nocfs.zip
new file mode 100644
index 0000000..298cab7
Binary files /dev/null and b/src/Lucene.Net.Tests/Index/unsupported.29.nocfs.zip differ


[16/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestTermsEnum.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestTermsEnum.cs b/src/Lucene.Net.Tests/Index/TestTermsEnum.cs
new file mode 100644
index 0000000..ed848b3
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestTermsEnum.cs
@@ -0,0 +1,1050 @@
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using Lucene.Net.Attributes;
+using Lucene.Net.Documents;
+using Lucene.Net.Search;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Randomized.Generators;
+    using NUnit.Framework;
+    using Automaton = Lucene.Net.Util.Automaton.Automaton;
+    using BasicAutomata = Lucene.Net.Util.Automaton.BasicAutomata;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using CompiledAutomaton = Lucene.Net.Util.Automaton.CompiledAutomaton;
+    using Directory = Lucene.Net.Store.Directory;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using Document = Documents.Document;
+    using Field = Field;
+    using Int32Field = Int32Field;
+    using LineFileDocs = Lucene.Net.Util.LineFileDocs;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using RegExp = Lucene.Net.Util.Automaton.RegExp;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    [SuppressCodecs("SimpleText", "Memory", "Direct")]
+    [TestFixture]
+    public class TestTermsEnum : LuceneTestCase
+    {
+        [Test]
+        public virtual void Test()
+        {
+            Random random = new Random(Random().Next());
+            LineFileDocs docs = new LineFileDocs(random, DefaultCodecSupportsDocValues());
+            Directory d = NewDirectory();
+            MockAnalyzer analyzer = new MockAnalyzer(Random());
+            analyzer.MaxTokenLength = TestUtil.NextInt(Random(), 1, IndexWriter.MAX_TERM_LENGTH);
+            RandomIndexWriter w = new RandomIndexWriter(Random(), d, analyzer, Similarity, TimeZone);
+            int numDocs = AtLeast(10);
+            for (int docCount = 0; docCount < numDocs; docCount++)
+            {
+                w.AddDocument(docs.NextDoc());
+            }
+            IndexReader r = w.Reader;
+            w.Dispose();
+
+            List<BytesRef> terms = new List<BytesRef>();
+            TermsEnum termsEnum = MultiFields.GetTerms(r, "body").GetIterator(null);
+            BytesRef term;
+            while ((term = termsEnum.Next()) != null)
+            {
+                terms.Add(BytesRef.DeepCopyOf(term));
+            }
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: " + terms.Count + " terms");
+            }
+
+            int upto = -1;
+            int iters = AtLeast(200);
+            for (int iter = 0; iter < iters; iter++)
+            {
+                bool isEnd;
+                if (upto != -1 && Random().NextBoolean())
+                {
+                    // next
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("TEST: iter next");
+                    }
+                    isEnd = termsEnum.Next() == null;
+                    upto++;
+                    if (isEnd)
+                    {
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("  end");
+                        }
+                        Assert.AreEqual(upto, terms.Count);
+                        upto = -1;
+                    }
+                    else
+                    {
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("  got term=" + termsEnum.Term.Utf8ToString() + " expected=" + terms[upto].Utf8ToString());
+                        }
+                        Assert.IsTrue(upto < terms.Count);
+                        Assert.AreEqual(terms[upto], termsEnum.Term);
+                    }
+                }
+                else
+                {
+                    BytesRef target;
+                    string exists;
+                    if (Random().NextBoolean())
+                    {
+                        // likely fake term
+                        if (Random().NextBoolean())
+                        {
+                            target = new BytesRef(TestUtil.RandomSimpleString(Random()));
+                        }
+                        else
+                        {
+                            target = new BytesRef(TestUtil.RandomRealisticUnicodeString(Random()));
+                        }
+                        exists = "likely not";
+                    }
+                    else
+                    {
+                        // real term
+                        target = terms[Random().Next(terms.Count)];
+                        exists = "yes";
+                    }
+
+                    upto = terms.BinarySearch(target);
+
+                    if (Random().NextBoolean())
+                    {
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("TEST: iter seekCeil target=" + target.Utf8ToString() + " exists=" + exists);
+                        }
+                        // seekCeil
+                        TermsEnum.SeekStatus status = termsEnum.SeekCeil(target);
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("  got " + status);
+                        }
+
+                        if (upto < 0)
+                        {
+                            upto = -(upto + 1);
+                            if (upto >= terms.Count)
+                            {
+                                Assert.AreEqual(TermsEnum.SeekStatus.END, status);
+                                upto = -1;
+                            }
+                            else
+                            {
+                                Assert.AreEqual(TermsEnum.SeekStatus.NOT_FOUND, status);
+                                Assert.AreEqual(terms[upto], termsEnum.Term);
+                            }
+                        }
+                        else
+                        {
+                            Assert.AreEqual(TermsEnum.SeekStatus.FOUND, status);
+                            Assert.AreEqual(terms[upto], termsEnum.Term);
+                        }
+                    }
+                    else
+                    {
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("TEST: iter seekExact target=" + target.Utf8ToString() + " exists=" + exists);
+                        }
+                        // seekExact
+                        bool result = termsEnum.SeekExact(target);
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("  got " + result);
+                        }
+                        if (upto < 0)
+                        {
+                            Assert.IsFalse(result);
+                            upto = -1;
+                        }
+                        else
+                        {
+                            Assert.IsTrue(result);
+                            Assert.AreEqual(target, termsEnum.Term);
+                        }
+                    }
+                }
+            }
+
+            r.Dispose();
+            d.Dispose();
+            docs.Dispose();
+        }
+
+        private void AddDoc(RandomIndexWriter w, ICollection<string> terms, IDictionary<BytesRef, int?> termToID, int id)
+        {
+            Document doc = new Document();
+            doc.Add(new Int32Field("id", id, Field.Store.NO));
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: addDoc id:" + id + " terms=" + terms);
+            }
+            foreach (string s2 in terms)
+            {
+                doc.Add(NewStringField("f", s2, Field.Store.NO));
+                termToID[new BytesRef(s2)] = id;
+            }
+            w.AddDocument(doc);
+            terms.Clear();
+        }
+
+        private bool Accepts(CompiledAutomaton c, BytesRef b)
+        {
+            int state = c.RunAutomaton.InitialState;
+            for (int idx = 0; idx < b.Length; idx++)
+            {
+                Assert.IsTrue(state != -1);
+                state = c.RunAutomaton.Step(state, b.Bytes[b.Offset + idx] & 0xff);
+            }
+            return c.RunAutomaton.IsAccept(state);
+        }
+
+        // Tests Terms.intersect
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(int.MaxValue)]
+#endif
+        [Test, LongRunningTest, HasTimeout]
+        public virtual void TestIntersectRandom()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+
+            int numTerms = AtLeast(300);
+            //final int numTerms = 50;
+
+            HashSet<string> terms = new HashSet<string>();
+            ICollection<string> pendingTerms = new List<string>();
+            IDictionary<BytesRef, int?> termToID = new Dictionary<BytesRef, int?>();
+            int id = 0;
+            while (terms.Count != numTerms)
+            {
+                string s = RandomString;
+                if (!terms.Contains(s))
+                {
+                    terms.Add(s);
+                    pendingTerms.Add(s);
+                    if (Random().Next(20) == 7)
+                    {
+                        AddDoc(w, pendingTerms, termToID, id++);
+                    }
+                }
+            }
+            AddDoc(w, pendingTerms, termToID, id++);
+
+            BytesRef[] termsArray = new BytesRef[terms.Count];
+            HashSet<BytesRef> termsSet = new HashSet<BytesRef>();
+            {
+                int upto = 0;
+                foreach (string s in terms)
+                {
+                    BytesRef b = new BytesRef(s);
+                    termsArray[upto++] = b;
+                    termsSet.Add(b);
+                }
+                Array.Sort(termsArray);
+            }
+
+            if (VERBOSE)
+            {
+                Console.WriteLine("\nTEST: indexed terms (unicode order):");
+                foreach (BytesRef t in termsArray)
+                {
+                    Console.WriteLine("  " + t.Utf8ToString() + " -> id:" + termToID[t]);
+                }
+            }
+
+            IndexReader r = w.Reader;
+            w.Dispose();
+
+            // NOTE: intentional insanity!!
+            FieldCache.Int32s docIDToID = FieldCache.DEFAULT.GetInt32s(SlowCompositeReaderWrapper.Wrap(r), "id", false);
+
+            for (int iter = 0; iter < 10 * RANDOM_MULTIPLIER; iter++)
+            {
+                // TODO: can we also test infinite As here...?
+
+                // From the random terms, pick some ratio and compile an
+                // automaton:
+                HashSet<string> acceptTerms = new HashSet<string>();
+                SortedSet<BytesRef> sortedAcceptTerms = new SortedSet<BytesRef>();
+                double keepPct = Random().NextDouble();
+                Automaton a;
+                if (iter == 0)
+                {
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("\nTEST: empty automaton");
+                    }
+                    a = BasicAutomata.MakeEmpty();
+                }
+                else
+                {
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("\nTEST: keepPct=" + keepPct);
+                    }
+                    foreach (string s in terms)
+                    {
+                        string s2;
+                        if (Random().NextDouble() <= keepPct)
+                        {
+                            s2 = s;
+                        }
+                        else
+                        {
+                            s2 = RandomString;
+                        }
+                        acceptTerms.Add(s2);
+                        sortedAcceptTerms.Add(new BytesRef(s2));
+                    }
+                    a = BasicAutomata.MakeStringUnion(sortedAcceptTerms);
+                }
+
+                if (Random().NextBoolean())
+                {
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("TEST: reduce the automaton");
+                    }
+                    a.Reduce();
+                }
+
+                CompiledAutomaton c = new CompiledAutomaton(a, true, false);
+
+                BytesRef[] acceptTermsArray = new BytesRef[acceptTerms.Count];
+                HashSet<BytesRef> acceptTermsSet = new HashSet<BytesRef>();
+                int upto = 0;
+                foreach (string s in acceptTerms)
+                {
+                    BytesRef b = new BytesRef(s);
+                    acceptTermsArray[upto++] = b;
+                    acceptTermsSet.Add(b);
+                    Assert.IsTrue(Accepts(c, b));
+                }
+                Array.Sort(acceptTermsArray);
+
+                if (VERBOSE)
+                {
+                    Console.WriteLine("\nTEST: accept terms (unicode order):");
+                    foreach (BytesRef t in acceptTermsArray)
+                    {
+                        Console.WriteLine("  " + t.Utf8ToString() + (termsSet.Contains(t) ? " (exists)" : ""));
+                    }
+                    Console.WriteLine(a.ToDot());
+                }
+
+                for (int iter2 = 0; iter2 < 100; iter2++)
+                {
+                    BytesRef startTerm = acceptTermsArray.Length == 0 || Random().NextBoolean() ? null : acceptTermsArray[Random().Next(acceptTermsArray.Length)];
+
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("\nTEST: iter2=" + iter2 + " startTerm=" + (startTerm == null ? "<null>" : startTerm.Utf8ToString()));
+
+                        if (startTerm != null)
+                        {
+                            int state = c.RunAutomaton.InitialState;
+                            for (int idx = 0; idx < startTerm.Length; idx++)
+                            {
+                                int label = startTerm.Bytes[startTerm.Offset + idx] & 0xff;
+                                Console.WriteLine("  state=" + state + " label=" + label);
+                                state = c.RunAutomaton.Step(state, label);
+                                Assert.IsTrue(state != -1);
+                            }
+                            Console.WriteLine("  state=" + state);
+                        }
+                    }
+
+                    TermsEnum te = MultiFields.GetTerms(r, "f").Intersect(c, startTerm);
+
+                    int loc;
+                    if (startTerm == null)
+                    {
+                        loc = 0;
+                    }
+                    else
+                    {
+                        loc = Array.BinarySearch(termsArray, BytesRef.DeepCopyOf(startTerm));
+                        if (loc < 0)
+                        {
+                            loc = -(loc + 1);
+                        }
+                        else
+                        {
+                            // startTerm exists in index
+                            loc++;
+                        }
+                    }
+                    while (loc < termsArray.Length && !acceptTermsSet.Contains(termsArray[loc]))
+                    {
+                        loc++;
+                    }
+
+                    DocsEnum docsEnum = null;
+                    while (loc < termsArray.Length)
+                    {
+                        BytesRef expected = termsArray[loc];
+                        BytesRef actual = te.Next();
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("TEST:   next() expected=" + expected.Utf8ToString() + " actual=" + (actual == null ? "null" : actual.Utf8ToString()));
+                        }
+                        Assert.AreEqual(expected, actual);
+                        Assert.AreEqual(1, te.DocFreq);
+                        docsEnum = TestUtil.Docs(Random(), te, null, docsEnum, DocsEnum.FLAG_NONE);
+                        int docID = docsEnum.NextDoc();
+                        Assert.IsTrue(docID != DocIdSetIterator.NO_MORE_DOCS);
+                        Assert.AreEqual(docIDToID.Get(docID), (int)termToID[expected]);
+                        do
+                        {
+                            loc++;
+                        } while (loc < termsArray.Length && !acceptTermsSet.Contains(termsArray[loc]));
+                    }
+                    Assert.IsNull(te.Next());
+                }
+            }
+
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        private readonly string FIELD = "field";
+
+        private IndexReader MakeIndex(Directory d, params string[] terms)
+        {
+            var iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+
+            /*
+            iwc.SetCodec(new StandardCodec(minTermsInBlock, maxTermsInBlock));
+            */
+
+            using (var w = new RandomIndexWriter(Random(), d, iwc))
+            {
+                foreach (string term in terms)
+                {
+                    var doc = new Document();
+                    var f = NewStringField(FIELD, term, Field.Store.NO);
+                    doc.Add(f);
+                    w.AddDocument(doc);
+                }
+
+                return w.Reader;
+            }
+        }
+
+        private int DocFreq(IndexReader r, string term)
+        {
+            return r.DocFreq(new Term(FIELD, term));
+        }
+
+        [Test]
+        public virtual void TestEasy()
+        {
+            // No floor arcs:
+            using (var d = NewDirectory())
+            using (var r = MakeIndex(d, "aa0", "aa1", "aa2", "aa3", "bb0", "bb1", "bb2", "bb3", "aa"))
+            {
+                // First term in block:
+                Assert.AreEqual(1, DocFreq(r, "aa0"));
+
+                // Scan forward to another term in same block
+                Assert.AreEqual(1, DocFreq(r, "aa2"));
+
+                Assert.AreEqual(1, DocFreq(r, "aa"));
+
+                // Reset same block then scan forwards
+                Assert.AreEqual(1, DocFreq(r, "aa1"));
+
+                // Not found, in same block
+                Assert.AreEqual(0, DocFreq(r, "aa5"));
+
+                // Found, in same block
+                Assert.AreEqual(1, DocFreq(r, "aa2"));
+
+                // Not found in index:
+                Assert.AreEqual(0, DocFreq(r, "b0"));
+
+                // Found:
+                Assert.AreEqual(1, DocFreq(r, "aa2"));
+
+                // Found, rewind:
+                Assert.AreEqual(1, DocFreq(r, "aa0"));
+
+                // First term in block:
+                Assert.AreEqual(1, DocFreq(r, "bb0"));
+
+                // Scan forward to another term in same block
+                Assert.AreEqual(1, DocFreq(r, "bb2"));
+
+                // Reset same block then scan forwards
+                Assert.AreEqual(1, DocFreq(r, "bb1"));
+
+                // Not found, in same block
+                Assert.AreEqual(0, DocFreq(r, "bb5"));
+
+                // Found, in same block
+                Assert.AreEqual(1, DocFreq(r, "bb2"));
+
+                // Not found in index:
+                Assert.AreEqual(0, DocFreq(r, "b0"));
+
+                // Found:
+                Assert.AreEqual(1, DocFreq(r, "bb2"));
+
+                // Found, rewind:
+                Assert.AreEqual(1, DocFreq(r, "bb0"));
+            }
+        }
+
+        // tests:
+        //   - test same prefix has non-floor block and floor block (ie, has 2 long outputs on same term prefix)
+        //   - term that's entirely in the index
+
+        [Test]
+        public virtual void TestFloorBlocks()
+        {
+            var terms = new[] { "aa0", "aa1", "aa2", "aa3", "aa4", "aa5", "aa6", "aa7", "aa8", "aa9", "aa", "xx" };
+
+            using (var d = NewDirectory())
+            using (var r = MakeIndex(d, terms))
+            {
+                // First term in first block:
+                Assert.AreEqual(1, DocFreq(r, "aa0"));
+                Assert.AreEqual(1, DocFreq(r, "aa4"));
+
+                // No block
+                Assert.AreEqual(0, DocFreq(r, "bb0"));
+
+                // Second block
+                Assert.AreEqual(1, DocFreq(r, "aa4"));
+
+                // Backwards to prior floor block:
+                Assert.AreEqual(1, DocFreq(r, "aa0"));
+
+                // Forwards to last floor block:
+                Assert.AreEqual(1, DocFreq(r, "aa9"));
+
+                Assert.AreEqual(0, DocFreq(r, "a"));
+                Assert.AreEqual(1, DocFreq(r, "aa"));
+                Assert.AreEqual(0, DocFreq(r, "a"));
+                Assert.AreEqual(1, DocFreq(r, "aa"));
+
+                // Forwards to last floor block:
+                Assert.AreEqual(1, DocFreq(r, "xx"));
+                Assert.AreEqual(1, DocFreq(r, "aa1"));
+                Assert.AreEqual(0, DocFreq(r, "yy"));
+
+                Assert.AreEqual(1, DocFreq(r, "xx"));
+                Assert.AreEqual(1, DocFreq(r, "aa9"));
+
+                Assert.AreEqual(1, DocFreq(r, "xx"));
+                Assert.AreEqual(1, DocFreq(r, "aa4"));
+
+                TermsEnum te = MultiFields.GetTerms(r, FIELD).GetIterator(null);
+                while (te.Next() != null)
+                {
+                    //System.out.println("TEST: next term=" + te.Term().Utf8ToString());
+                }
+
+                Assert.IsTrue(SeekExact(te, "aa1"));
+                Assert.AreEqual("aa2", Next(te));
+                Assert.IsTrue(SeekExact(te, "aa8"));
+                Assert.AreEqual("aa9", Next(te));
+                Assert.AreEqual("xx", Next(te));
+
+                TestRandomSeeks(r, terms);
+            }
+        }
+
+        [Test]
+        public virtual void TestZeroTerms()
+        {
+            var d = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), d, Similarity, TimeZone);
+            Document doc = new Document();
+            doc.Add(NewTextField("field", "one two three", Field.Store.NO));
+            doc = new Document();
+            doc.Add(NewTextField("field2", "one two three", Field.Store.NO));
+            w.AddDocument(doc);
+            w.Commit();
+            w.DeleteDocuments(new Term("field", "one"));
+            w.ForceMerge(1);
+            IndexReader r = w.Reader;
+            w.Dispose();
+            Assert.AreEqual(1, r.NumDocs);
+            Assert.AreEqual(1, r.MaxDoc);
+            Terms terms = MultiFields.GetTerms(r, "field");
+            if (terms != null)
+            {
+                Assert.IsNull(terms.GetIterator(null).Next());
+            }
+            r.Dispose();
+            d.Dispose();
+        }
+
+        private string RandomString
+        {
+            get
+            {
+                //return TestUtil.RandomSimpleString(Random());
+                return TestUtil.RandomRealisticUnicodeString(Random());
+            }
+        }
+
+        [Test]
+        public virtual void TestRandomTerms()
+        {
+            var terms = new string[TestUtil.NextInt(Random(), 1, AtLeast(1000))];
+            var seen = new HashSet<string>();
+
+            var allowEmptyString = Random().NextBoolean();
+
+            if (Random().Next(10) == 7 && terms.Length > 2)
+            {
+                // Sometimes add a bunch of terms sharing a longish common prefix:
+                int numTermsSamePrefix = Random().Next(terms.Length / 2);
+                if (numTermsSamePrefix > 0)
+                {
+                    string prefix;
+                    while (true)
+                    {
+                        prefix = RandomString;
+                        if (prefix.Length < 5)
+                        {
+                            continue;
+                        }
+                        else
+                        {
+                            break;
+                        }
+                    }
+                    while (seen.Count < numTermsSamePrefix)
+                    {
+                        string t = prefix + RandomString;
+                        if (!seen.Contains(t))
+                        {
+                            terms[seen.Count] = t;
+                            seen.Add(t);
+                        }
+                    }
+                }
+            }
+
+            while (seen.Count < terms.Length)
+            {
+                string t = RandomString;
+                if (!seen.Contains(t) && (allowEmptyString || t.Length != 0))
+                {
+                    terms[seen.Count] = t;
+                    seen.Add(t);
+                }
+            }
+
+            using (var d = NewDirectory())
+            using (var r = MakeIndex(d, terms))
+            {
+                TestRandomSeeks(r, terms);
+            }
+        }
+
+        // sugar
+        private bool SeekExact(TermsEnum te, string term)
+        {
+            return te.SeekExact(new BytesRef(term));
+        }
+
+        // sugar
+        private string Next(TermsEnum te)
+        {
+            BytesRef br = te.Next();
+            if (br == null)
+            {
+                return null;
+            }
+            else
+            {
+                return br.Utf8ToString();
+            }
+        }
+
+        private BytesRef GetNonExistTerm(BytesRef[] terms)
+        {
+            BytesRef t = null;
+            while (true)
+            {
+                string ts = RandomString;
+                t = new BytesRef(ts);
+                if (Array.BinarySearch(terms, t) < 0)
+                {
+                    return t;
+                }
+            }
+        }
+
+        private class TermAndState
+        {
+            public readonly BytesRef Term;
+            public readonly TermState State;
+
+            public TermAndState(BytesRef term, TermState state)
+            {
+                this.Term = term;
+                this.State = state;
+            }
+        }
+
+        private void TestRandomSeeks(IndexReader r, params string[] validTermStrings)
+        {
+            BytesRef[] validTerms = new BytesRef[validTermStrings.Length];
+            for (int termIDX = 0; termIDX < validTermStrings.Length; termIDX++)
+            {
+                validTerms[termIDX] = new BytesRef(validTermStrings[termIDX]);
+            }
+            Array.Sort(validTerms);
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: " + validTerms.Length + " terms:");
+                foreach (BytesRef t in validTerms)
+                {
+                    Console.WriteLine("  " + t.Utf8ToString() + " " + t);
+                }
+            }
+            TermsEnum te = MultiFields.GetTerms(r, FIELD).GetIterator(null);
+
+            int END_LOC = -validTerms.Length - 1;
+
+            IList<TermAndState> termStates = new List<TermAndState>();
+
+            for (int iter = 0; iter < 100 * RANDOM_MULTIPLIER; iter++)
+            {
+                BytesRef t;
+                int loc;
+                TermState termState;
+                if (Random().Next(6) == 4)
+                {
+                    // pick term that doens't exist:
+                    t = GetNonExistTerm(validTerms);
+                    termState = null;
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("\nTEST: invalid term=" + t.Utf8ToString());
+                    }
+                    loc = Array.BinarySearch(validTerms, t);
+                }
+                else if (termStates.Count != 0 && Random().Next(4) == 1)
+                {
+                    TermAndState ts = termStates[Random().Next(termStates.Count)];
+                    t = ts.Term;
+                    loc = Array.BinarySearch(validTerms, t);
+                    Assert.IsTrue(loc >= 0);
+                    termState = ts.State;
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("\nTEST: valid termState term=" + t.Utf8ToString());
+                    }
+                }
+                else
+                {
+                    // pick valid term
+                    loc = Random().Next(validTerms.Length);
+                    t = BytesRef.DeepCopyOf(validTerms[loc]);
+                    termState = null;
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("\nTEST: valid term=" + t.Utf8ToString());
+                    }
+                }
+
+                // seekCeil or seekExact:
+                bool doSeekExact = Random().NextBoolean();
+                if (termState != null)
+                {
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("  seekExact termState");
+                    }
+                    te.SeekExact(t, termState);
+                }
+                else if (doSeekExact)
+                {
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("  seekExact");
+                    }
+                    Assert.AreEqual(loc >= 0, te.SeekExact(t));
+                }
+                else
+                {
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("  seekCeil");
+                    }
+
+                    TermsEnum.SeekStatus result = te.SeekCeil(t);
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("  got " + result);
+                    }
+
+                    if (loc >= 0)
+                    {
+                        Assert.AreEqual(TermsEnum.SeekStatus.FOUND, result);
+                    }
+                    else if (loc == END_LOC)
+                    {
+                        Assert.AreEqual(TermsEnum.SeekStatus.END, result);
+                    }
+                    else
+                    {
+                        Debug.Assert(loc >= -validTerms.Length);
+                        Assert.AreEqual(TermsEnum.SeekStatus.NOT_FOUND, result);
+                    }
+                }
+
+                if (loc >= 0)
+                {
+                    Assert.AreEqual(t, te.Term);
+                }
+                else if (doSeekExact)
+                {
+                    // TermsEnum is unpositioned if seekExact returns false
+                    continue;
+                }
+                else if (loc == END_LOC)
+                {
+                    continue;
+                }
+                else
+                {
+                    loc = -loc - 1;
+                    Assert.AreEqual(validTerms[loc], te.Term);
+                }
+
+                // Do a bunch of next's after the seek
+                int numNext = Random().Next(validTerms.Length);
+
+                for (int nextCount = 0; nextCount < numNext; nextCount++)
+                {
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("\nTEST: next loc=" + loc + " of " + validTerms.Length);
+                    }
+                    BytesRef t2 = te.Next();
+                    loc++;
+                    if (loc == validTerms.Length)
+                    {
+                        Assert.IsNull(t2);
+                        break;
+                    }
+                    else
+                    {
+                        Assert.AreEqual(validTerms[loc], t2);
+                        if (Random().Next(40) == 17 && termStates.Count < 100)
+                        {
+                            termStates.Add(new TermAndState(validTerms[loc], te.GetTermState()));
+                        }
+                    }
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestIntersectBasic()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            iwc.SetMergePolicy(new LogDocMergePolicy());
+            RandomIndexWriter w = new RandomIndexWriter(Random(), dir, iwc);
+            Document doc = new Document();
+            doc.Add(NewTextField("field", "aaa", Field.Store.NO));
+            w.AddDocument(doc);
+
+            doc = new Document();
+            doc.Add(NewStringField("field", "bbb", Field.Store.NO));
+            w.AddDocument(doc);
+
+            doc = new Document();
+            doc.Add(NewTextField("field", "ccc", Field.Store.NO));
+            w.AddDocument(doc);
+
+            w.ForceMerge(1);
+            DirectoryReader r = w.Reader;
+            w.Dispose();
+            AtomicReader sub = GetOnlySegmentReader(r);
+            Terms terms = sub.Fields.GetTerms("field");
+            Automaton automaton = (new RegExp(".*", RegExp.NONE)).ToAutomaton();
+            CompiledAutomaton ca = new CompiledAutomaton(automaton, false, false);
+            TermsEnum te = terms.Intersect(ca, null);
+            Assert.AreEqual("aaa", te.Next().Utf8ToString());
+            Assert.AreEqual(0, te.Docs(null, null, DocsEnum.FLAG_NONE).NextDoc());
+            Assert.AreEqual("bbb", te.Next().Utf8ToString());
+            Assert.AreEqual(1, te.Docs(null, null, DocsEnum.FLAG_NONE).NextDoc());
+            Assert.AreEqual("ccc", te.Next().Utf8ToString());
+            Assert.AreEqual(2, te.Docs(null, null, DocsEnum.FLAG_NONE).NextDoc());
+            Assert.IsNull(te.Next());
+
+            te = terms.Intersect(ca, new BytesRef("abc"));
+            Assert.AreEqual("bbb", te.Next().Utf8ToString());
+            Assert.AreEqual(1, te.Docs(null, null, DocsEnum.FLAG_NONE).NextDoc());
+            Assert.AreEqual("ccc", te.Next().Utf8ToString());
+            Assert.AreEqual(2, te.Docs(null, null, DocsEnum.FLAG_NONE).NextDoc());
+            Assert.IsNull(te.Next());
+
+            te = terms.Intersect(ca, new BytesRef("aaa"));
+            Assert.AreEqual("bbb", te.Next().Utf8ToString());
+            Assert.AreEqual(1, te.Docs(null, null, DocsEnum.FLAG_NONE).NextDoc());
+            Assert.AreEqual("ccc", te.Next().Utf8ToString());
+            Assert.AreEqual(2, te.Docs(null, null, DocsEnum.FLAG_NONE).NextDoc());
+            Assert.IsNull(te.Next());
+
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestIntersectStartTerm()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            iwc.SetMergePolicy(new LogDocMergePolicy());
+
+            RandomIndexWriter w = new RandomIndexWriter(Random(), dir, iwc);
+            Document doc = new Document();
+            doc.Add(NewStringField("field", "abc", Field.Store.NO));
+            w.AddDocument(doc);
+
+            doc = new Document();
+            doc.Add(NewStringField("field", "abd", Field.Store.NO));
+            w.AddDocument(doc);
+
+            doc = new Document();
+            doc.Add(NewStringField("field", "acd", Field.Store.NO));
+            w.AddDocument(doc);
+
+            doc = new Document();
+            doc.Add(NewStringField("field", "bcd", Field.Store.NO));
+            w.AddDocument(doc);
+
+            w.ForceMerge(1);
+            DirectoryReader r = w.Reader;
+            w.Dispose();
+            AtomicReader sub = GetOnlySegmentReader(r);
+            Terms terms = sub.Fields.GetTerms("field");
+
+            Automaton automaton = (new RegExp(".*d", RegExp.NONE)).ToAutomaton();
+            CompiledAutomaton ca = new CompiledAutomaton(automaton, false, false);
+            TermsEnum te;
+
+            // should seek to startTerm
+            te = terms.Intersect(ca, new BytesRef("aad"));
+            Assert.AreEqual("abd", te.Next().Utf8ToString());
+            Assert.AreEqual(1, te.Docs(null, null, DocsEnum.FLAG_NONE).NextDoc());
+            Assert.AreEqual("acd", te.Next().Utf8ToString());
+            Assert.AreEqual(2, te.Docs(null, null, DocsEnum.FLAG_NONE).NextDoc());
+            Assert.AreEqual("bcd", te.Next().Utf8ToString());
+            Assert.AreEqual(3, te.Docs(null, null, DocsEnum.FLAG_NONE).NextDoc());
+            Assert.IsNull(te.Next());
+
+            // should fail to find ceil label on second arc, rewind
+            te = terms.Intersect(ca, new BytesRef("add"));
+            Assert.AreEqual("bcd", te.Next().Utf8ToString());
+            Assert.AreEqual(3, te.Docs(null, null, DocsEnum.FLAG_NONE).NextDoc());
+            Assert.IsNull(te.Next());
+
+            // should reach end
+            te = terms.Intersect(ca, new BytesRef("bcd"));
+            Assert.IsNull(te.Next());
+            te = terms.Intersect(ca, new BytesRef("ddd"));
+            Assert.IsNull(te.Next());
+
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestIntersectEmptyString()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            iwc.SetMergePolicy(new LogDocMergePolicy());
+            RandomIndexWriter w = new RandomIndexWriter(Random(), dir, iwc);
+            Document doc = new Document();
+            doc.Add(NewStringField("field", "", Field.Store.NO));
+            doc.Add(NewStringField("field", "abc", Field.Store.NO));
+            w.AddDocument(doc);
+
+            doc = new Document();
+            // add empty string to both documents, so that singletonDocID == -1.
+            // For a FST-based term dict, we'll expect to see the first arc is
+            // flaged with HAS_FINAL_OUTPUT
+            doc.Add(NewStringField("field", "abc", Field.Store.NO));
+            doc.Add(NewStringField("field", "", Field.Store.NO));
+            w.AddDocument(doc);
+
+            w.ForceMerge(1);
+            DirectoryReader r = w.Reader;
+            w.Dispose();
+            AtomicReader sub = GetOnlySegmentReader(r);
+            Terms terms = sub.Fields.GetTerms("field");
+
+            Automaton automaton = (new RegExp(".*", RegExp.NONE)).ToAutomaton(); // accept ALL
+            CompiledAutomaton ca = new CompiledAutomaton(automaton, false, false);
+
+            TermsEnum te = terms.Intersect(ca, null);
+            DocsEnum de;
+
+            Assert.AreEqual("", te.Next().Utf8ToString());
+            de = te.Docs(null, null, DocsEnum.FLAG_NONE);
+            Assert.AreEqual(0, de.NextDoc());
+            Assert.AreEqual(1, de.NextDoc());
+
+            Assert.AreEqual("abc", te.Next().Utf8ToString());
+            de = te.Docs(null, null, DocsEnum.FLAG_NONE);
+            Assert.AreEqual(0, de.NextDoc());
+            Assert.AreEqual(1, de.NextDoc());
+
+            Assert.IsNull(te.Next());
+
+            // pass empty string
+            te = terms.Intersect(ca, new BytesRef(""));
+
+            Assert.AreEqual("abc", te.Next().Utf8ToString());
+            de = te.Docs(null, null, DocsEnum.FLAG_NONE);
+            Assert.AreEqual(0, de.NextDoc());
+            Assert.AreEqual(1, de.NextDoc());
+
+            Assert.IsNull(te.Next());
+
+            r.Dispose();
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestTermsEnum2.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestTermsEnum2.cs b/src/Lucene.Net.Tests/Index/TestTermsEnum2.cs
new file mode 100644
index 0000000..734cfe0
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestTermsEnum2.cs
@@ -0,0 +1,204 @@
+using System.Collections.Generic;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Randomized.Generators;
+    using Lucene.Net.Support;
+    using Lucene.Net.Util.Automaton;
+    using NUnit.Framework;
+    using AutomatonQuery = Lucene.Net.Search.AutomatonQuery;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using CheckHits = Lucene.Net.Search.CheckHits;
+    using Codec = Lucene.Net.Codecs.Codec;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockTokenizer = Lucene.Net.Analysis.MockTokenizer;
+    using SeekStatus = Lucene.Net.Index.TermsEnum.SeekStatus;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    [TestFixture]
+    public class TestTermsEnum2 : LuceneTestCase
+    {
+        private Directory Dir;
+        private IndexReader Reader;
+        private IndexSearcher Searcher;
+        private SortedSet<BytesRef> Terms; // the terms we put in the index
+        private Automaton TermsAutomaton; // automata of the same
+        internal int NumIterations;
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            // we generate aweful regexps: good for testing.
+            // but for preflex codec, the test can be very slow, so use less iterations.
+            NumIterations = Codec.Default.Name.Equals("Lucene3x") ? 10 * RANDOM_MULTIPLIER : AtLeast(50);
+            Dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), Dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.KEYWORD, false)).SetMaxBufferedDocs(TestUtil.NextInt(Random(), 50, 1000)));
+            Document doc = new Document();
+            Field field = NewStringField("field", "", Field.Store.YES);
+            doc.Add(field);
+            Terms = new SortedSet<BytesRef>();
+
+            int num = AtLeast(200);
+            for (int i = 0; i < num; i++)
+            {
+                string s = TestUtil.RandomUnicodeString(Random());
+                field.SetStringValue(s);
+                Terms.Add(new BytesRef(s));
+                writer.AddDocument(doc);
+            }
+
+            TermsAutomaton = BasicAutomata.MakeStringUnion(Terms);
+
+            Reader = writer.Reader;
+            Searcher = NewSearcher(Reader);
+            writer.Dispose();
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            Reader.Dispose();
+            Dir.Dispose();
+            base.TearDown();
+        }
+
+        /// <summary>
+        /// tests a pre-intersected automaton against the original </summary>
+        [Test]
+        public virtual void TestFiniteVersusInfinite()
+        {
+            for (int i = 0; i < NumIterations; i++)
+            {
+                string reg = AutomatonTestUtil.RandomRegexp(Random());
+                Automaton automaton = (new RegExp(reg, RegExp.NONE)).ToAutomaton();
+                IList<BytesRef> matchedTerms = new List<BytesRef>();
+                foreach (BytesRef t in Terms)
+                {
+                    if (BasicOperations.Run(automaton, t.Utf8ToString()))
+                    {
+                        matchedTerms.Add(t);
+                    }
+                }
+
+                Automaton alternate = BasicAutomata.MakeStringUnion(matchedTerms);
+                //System.out.println("match " + matchedTerms.Size() + " " + alternate.getNumberOfStates() + " states, sigma=" + alternate.getStartPoints().Length);
+                //AutomatonTestUtil.minimizeSimple(alternate);
+                //System.out.println("minmize done");
+                AutomatonQuery a1 = new AutomatonQuery(new Term("field", ""), automaton);
+                AutomatonQuery a2 = new AutomatonQuery(new Term("field", ""), alternate);
+                CheckHits.CheckEqual(a1, Searcher.Search(a1, 25).ScoreDocs, Searcher.Search(a2, 25).ScoreDocs);
+            }
+        }
+
+        /// <summary>
+        /// seeks to every term accepted by some automata </summary>
+        [Test]
+        public virtual void TestSeeking()
+        {
+            for (int i = 0; i < NumIterations; i++)
+            {
+                string reg = AutomatonTestUtil.RandomRegexp(Random());
+                Automaton automaton = (new RegExp(reg, RegExp.NONE)).ToAutomaton();
+                TermsEnum te = MultiFields.GetTerms(Reader, "field").GetIterator(null);
+                IList<BytesRef> unsortedTerms = new List<BytesRef>(Terms);
+                Collections.Shuffle(unsortedTerms);
+
+                foreach (BytesRef term in unsortedTerms)
+                {
+                    if (BasicOperations.Run(automaton, term.Utf8ToString()))
+                    {
+                        // term is accepted
+                        if (Random().NextBoolean())
+                        {
+                            // seek exact
+                            Assert.IsTrue(te.SeekExact(term));
+                        }
+                        else
+                        {
+                            // seek ceil
+                            Assert.AreEqual(SeekStatus.FOUND, te.SeekCeil(term));
+                            Assert.AreEqual(term, te.Term);
+                        }
+                    }
+                }
+            }
+        }
+
+        /// <summary>
+        /// mixes up seek and next for all terms </summary>
+        [Test]
+        public virtual void TestSeekingAndNexting()
+        {
+            for (int i = 0; i < NumIterations; i++)
+            {
+                TermsEnum te = MultiFields.GetTerms(Reader, "field").GetIterator(null);
+
+                foreach (BytesRef term in Terms)
+                {
+                    int c = Random().Next(3);
+                    if (c == 0)
+                    {
+                        Assert.AreEqual(term, te.Next());
+                    }
+                    else if (c == 1)
+                    {
+                        Assert.AreEqual(SeekStatus.FOUND, te.SeekCeil(term));
+                        Assert.AreEqual(term, te.Term);
+                    }
+                    else
+                    {
+                        Assert.IsTrue(te.SeekExact(term));
+                    }
+                }
+            }
+        }
+
+        /// <summary>
+        /// tests intersect: TODO start at a random term! </summary>
+        [Test]
+        public virtual void TestIntersect()
+        {
+            for (int i = 0; i < NumIterations; i++)
+            {
+                string reg = AutomatonTestUtil.RandomRegexp(Random());
+                Automaton automaton = (new RegExp(reg, RegExp.NONE)).ToAutomaton();
+                CompiledAutomaton ca = new CompiledAutomaton(automaton, SpecialOperations.IsFinite(automaton), false);
+                TermsEnum te = MultiFields.GetTerms(Reader, "field").Intersect(ca, null);
+                Automaton expected = BasicOperations.Intersection(TermsAutomaton, automaton);
+                SortedSet<BytesRef> found = new SortedSet<BytesRef>();
+                while (te.Next() != null)
+                {
+                    found.Add(BytesRef.DeepCopyOf(te.Term));
+                }
+
+                Automaton actual = BasicAutomata.MakeStringUnion(found);
+                Assert.IsTrue(BasicOperations.SameLanguage(expected, actual));
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestThreadedForceMerge.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestThreadedForceMerge.cs b/src/Lucene.Net.Tests/Index/TestThreadedForceMerge.cs
new file mode 100644
index 0000000..e4e2d59
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestThreadedForceMerge.cs
@@ -0,0 +1,183 @@
+using System;
+using System.Threading;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using Analyzer = Lucene.Net.Analysis.Analyzer;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using English = Lucene.Net.Util.English;
+    using FieldType = FieldType;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockTokenizer = Lucene.Net.Analysis.MockTokenizer;
+    using StringField = StringField;
+
+    [TestFixture]
+    public class TestThreadedForceMerge : LuceneTestCase
+    {
+        private static Analyzer ANALYZER;
+
+        private const int NUM_THREADS = 3;
+        //private final static int NUM_THREADS = 5;
+
+        private const int NUM_ITER = 1;
+
+        private const int NUM_ITER2 = 1;
+
+        private volatile bool Failed;
+
+        [SetUp]
+        public static void Setup()
+        {
+            ANALYZER = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true);
+        }
+
+        private void SetFailed()
+        {
+            Failed = true;
+        }
+
+        public virtual void RunTest(Random random, Directory directory)
+        {
+            IndexWriter writer = new IndexWriter(directory, ((IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, ANALYZER).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(2)).SetMergePolicy(NewLogMergePolicy()));
+
+            for (int iter = 0; iter < NUM_ITER; iter++)
+            {
+                int iterFinal = iter;
+
+                ((LogMergePolicy)writer.Config.MergePolicy).MergeFactor = 1000;
+
+                FieldType customType = new FieldType(StringField.TYPE_STORED);
+                customType.OmitNorms = true;
+
+                for (int i = 0; i < 200; i++)
+                {
+                    Document d = new Document();
+                    d.Add(NewField("id", Convert.ToString(i), customType));
+                    d.Add(NewField("contents", English.IntToEnglish(i), customType));
+                    writer.AddDocument(d);
+                }
+
+                ((LogMergePolicy)writer.Config.MergePolicy).MergeFactor = 4;
+
+                ThreadClass[] threads = new ThreadClass[NUM_THREADS];
+
+                for (int i = 0; i < NUM_THREADS; i++)
+                {
+                    int iFinal = i;
+                    IndexWriter writerFinal = writer;
+                    threads[i] = new ThreadAnonymousInnerClassHelper(this, iterFinal, customType, iFinal, writerFinal);
+                }
+
+                for (int i = 0; i < NUM_THREADS; i++)
+                {
+                    threads[i].Start();
+                }
+
+                for (int i = 0; i < NUM_THREADS; i++)
+                {
+                    threads[i].Join();
+                }
+
+                Assert.IsTrue(!Failed);
+
+                int expectedDocCount = (int)((1 + iter) * (200 + 8 * NUM_ITER2 * (NUM_THREADS / 2.0) * (1 + NUM_THREADS)));
+
+                Assert.AreEqual(expectedDocCount, writer.NumDocs, "index=" + writer.SegString() + " numDocs=" + writer.NumDocs + " maxDoc=" + writer.MaxDoc + " config=" + writer.Config);
+                Assert.AreEqual(expectedDocCount, writer.MaxDoc, "index=" + writer.SegString() + " numDocs=" + writer.NumDocs + " maxDoc=" + writer.MaxDoc + " config=" + writer.Config);
+
+                writer.Dispose();
+                writer = new IndexWriter(directory, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, ANALYZER).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(2));
+
+                DirectoryReader reader = DirectoryReader.Open(directory);
+                Assert.AreEqual(1, reader.Leaves.Count, "reader=" + reader);
+                Assert.AreEqual(expectedDocCount, reader.NumDocs);
+                reader.Dispose();
+            }
+            writer.Dispose();
+        }
+
+        private class ThreadAnonymousInnerClassHelper : ThreadClass
+        {
+            private readonly TestThreadedForceMerge OuterInstance;
+
+            private int IterFinal;
+            private FieldType CustomType;
+            private int IFinal;
+            private IndexWriter WriterFinal;
+
+            public ThreadAnonymousInnerClassHelper(TestThreadedForceMerge outerInstance, int iterFinal, FieldType customType, int iFinal, IndexWriter writerFinal)
+            {
+                this.OuterInstance = outerInstance;
+                this.IterFinal = iterFinal;
+                this.CustomType = customType;
+                this.IFinal = iFinal;
+                this.WriterFinal = writerFinal;
+            }
+
+            public override void Run()
+            {
+                try
+                {
+                    for (int j = 0; j < NUM_ITER2; j++)
+                    {
+                        WriterFinal.ForceMerge(1, false);
+                        for (int k = 0; k < 17 * (1 + IFinal); k++)
+                        {
+                            Document d = new Document();
+                            d.Add(OuterInstance.NewField("id", IterFinal + "_" + IFinal + "_" + j + "_" + k, CustomType));
+                            d.Add(OuterInstance.NewField("contents", English.IntToEnglish(IFinal + k), CustomType));
+                            WriterFinal.AddDocument(d);
+                        }
+                        for (int k = 0; k < 9 * (1 + IFinal); k++)
+                        {
+                            WriterFinal.DeleteDocuments(new Term("id", IterFinal + "_" + IFinal + "_" + j + "_" + k));
+                        }
+                        WriterFinal.ForceMerge(1);
+                    }
+                }
+                catch (Exception t)
+                {
+                    OuterInstance.SetFailed();
+                    Console.WriteLine(Thread.CurrentThread.Name + ": hit exception");
+                    Console.WriteLine(t.StackTrace);
+                }
+            }
+        }
+
+        /*
+          Run above stress test against RAMDirectory and then
+          FSDirectory.
+        */
+
+        [Test]
+        public virtual void TestThreadedForceMerge_Mem()
+        {
+            Directory directory = NewDirectory();
+            RunTest(Random(), directory);
+            directory.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestTieredMergePolicy.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestTieredMergePolicy.cs b/src/Lucene.Net.Tests/Index/TestTieredMergePolicy.cs
new file mode 100644
index 0000000..793736d
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestTieredMergePolicy.cs
@@ -0,0 +1,297 @@
+using Lucene.Net.Attributes;
+using Lucene.Net.Documents;
+using Lucene.Net.Store;
+using NUnit.Framework;
+using System;
+
+namespace Lucene.Net.Index
+{
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+
+    /*
+    * Licensed to the Apache Software Foundation (ASF) under one or more
+    * contributor license agreements.  See the NOTICE file distributed with
+    * this work for additional information regarding copyright ownership.
+    * The ASF licenses this file to You under the Apache License, Version 2.0
+    * (the "License"); you may not use this file except in compliance with
+    * the License.  You may obtain a copy of the License at
+    *
+    *     http://www.apache.org/licenses/LICENSE-2.0
+    *
+    * Unless required by applicable law or agreed to in writing, software
+    * distributed under the License is distributed on an "AS IS" BASIS,
+    * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    * See the License for the specific language governing permissions and
+    * limitations under the License.
+    */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    [TestFixture]
+    public class TestTieredMergePolicy : BaseMergePolicyTestCase
+    {
+        protected internal override MergePolicy MergePolicy()
+        {
+            return NewTieredMergePolicy();
+        }
+
+        [Test, LuceneNetSpecific]
+        public virtual void TestIndexWriterDirtSimple()
+        {
+            Directory dir = new RAMDirectory();
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            TieredMergePolicy tmp = NewTieredMergePolicy();
+            iwc.SetMergePolicy(tmp);
+            iwc.SetMaxBufferedDocs(2);
+            tmp.MaxMergeAtOnce = 100;
+            tmp.SegmentsPerTier = 100;
+            tmp.ForceMergeDeletesPctAllowed = 30.0;
+            IndexWriter w = new IndexWriter(dir, iwc);
+
+            int numDocs = 2;
+
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document doc = new Document();
+                doc.Add(NewTextField("content", "aaa " + i, Field.Store.NO));
+                w.AddDocument(doc);
+            }
+
+            Assert.AreEqual(numDocs, w.MaxDoc);
+            Assert.AreEqual(numDocs, w.NumDocs);
+        }
+
+        [Test]
+        public virtual void TestForceMergeDeletes()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            TieredMergePolicy tmp = NewTieredMergePolicy();
+            conf.SetMergePolicy(tmp);
+            conf.SetMaxBufferedDocs(4);
+            tmp.MaxMergeAtOnce = 100;
+            tmp.SegmentsPerTier = 100;
+            tmp.ForceMergeDeletesPctAllowed = 30.0;
+            IndexWriter w = new IndexWriter(dir, conf);
+            for (int i = 0; i < 80; i++)
+            {
+                Document doc = new Document();
+                doc.Add(NewTextField("content", "aaa " + (i % 4), Field.Store.NO));
+                w.AddDocument(doc);
+            }
+            Assert.AreEqual(80, w.MaxDoc);
+            Assert.AreEqual(80, w.NumDocs);
+
+            if (VERBOSE)
+            {
+                Console.WriteLine("\nTEST: delete docs");
+            }
+            w.DeleteDocuments(new Term("content", "0"));
+            w.ForceMergeDeletes();
+
+            Assert.AreEqual(80, w.MaxDoc);
+            Assert.AreEqual(60, w.NumDocs);
+
+            if (VERBOSE)
+            {
+                Console.WriteLine("\nTEST: forceMergeDeletes2");
+            }
+            ((TieredMergePolicy)w.Config.MergePolicy).ForceMergeDeletesPctAllowed = 10.0;
+            w.ForceMergeDeletes();
+            Assert.AreEqual(60, w.NumDocs);
+            Assert.AreEqual(60, w.MaxDoc);
+            w.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestPartialMerge()
+        {
+            int num = AtLeast(10);
+            for (int iter = 0; iter < num; iter++)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: iter=" + iter);
+                }
+                Directory dir = NewDirectory();
+                IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+                conf.SetMergeScheduler(new SerialMergeScheduler());
+                TieredMergePolicy tmp = NewTieredMergePolicy();
+                conf.SetMergePolicy(tmp);
+                conf.SetMaxBufferedDocs(2);
+                tmp.MaxMergeAtOnce = 3;
+                tmp.SegmentsPerTier = 6;
+
+                IndexWriter w = new IndexWriter(dir, conf);
+                int maxCount = 0;
+                int numDocs = TestUtil.NextInt(Random(), 20, 100);
+                for (int i = 0; i < numDocs; i++)
+                {
+                    Document doc = new Document();
+                    doc.Add(NewTextField("content", "aaa " + (i % 4), Field.Store.NO));
+                    w.AddDocument(doc);
+                    int count = w.SegmentCount;
+                    maxCount = Math.Max(count, maxCount);
+                    Assert.IsTrue(count >= maxCount - 3, "count=" + count + " maxCount=" + maxCount);
+                }
+
+                w.Flush(true, true);
+
+                int segmentCount = w.SegmentCount;
+                int targetCount = TestUtil.NextInt(Random(), 1, segmentCount);
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: merge to " + targetCount + " segs (current count=" + segmentCount + ")");
+                }
+                w.ForceMerge(targetCount);
+                Assert.AreEqual(targetCount, w.SegmentCount);
+
+                w.Dispose();
+                dir.Dispose();
+            }
+        }
+
+        [Test]
+        public virtual void TestForceMergeDeletesMaxSegSize()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            TieredMergePolicy tmp = new TieredMergePolicy();
+            tmp.MaxMergedSegmentMB = 0.01;
+            tmp.ForceMergeDeletesPctAllowed = 0.0;
+            conf.SetMergePolicy(tmp);
+
+            RandomIndexWriter w = new RandomIndexWriter(Random(), dir, conf);
+            w.RandomForceMerge = false;
+
+            int numDocs = AtLeast(200);
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document doc = new Document();
+                doc.Add(NewStringField("id", "" + i, Field.Store.NO));
+                doc.Add(NewTextField("content", "aaa " + i, Field.Store.NO));
+                w.AddDocument(doc);
+            }
+
+            w.ForceMerge(1);
+            IndexReader r = w.Reader;
+            Assert.AreEqual(numDocs, r.MaxDoc);
+            Assert.AreEqual(numDocs, r.NumDocs);
+            r.Dispose();
+
+            if (VERBOSE)
+            {
+                Console.WriteLine("\nTEST: delete doc");
+            }
+
+            w.DeleteDocuments(new Term("id", "" + (42 + 17)));
+
+            r = w.Reader;
+            Assert.AreEqual(numDocs, r.MaxDoc);
+            Assert.AreEqual(numDocs - 1, r.NumDocs);
+            r.Dispose();
+
+            w.ForceMergeDeletes();
+
+            r = w.Reader;
+            Assert.AreEqual(numDocs - 1, r.MaxDoc);
+            Assert.AreEqual(numDocs - 1, r.NumDocs);
+            r.Dispose();
+
+            w.Dispose();
+
+            dir.Dispose();
+        }
+
+        private const double EPSILON = 1E-14;
+
+        [Test]
+        public virtual void TestSetters()
+        {
+            TieredMergePolicy tmp = new TieredMergePolicy();
+
+            tmp.MaxMergedSegmentMB = 0.5;
+            Assert.AreEqual(0.5, tmp.MaxMergedSegmentMB, EPSILON);
+
+            tmp.MaxMergedSegmentMB = double.PositiveInfinity;
+            Assert.AreEqual(long.MaxValue / 1024 / 1024.0, tmp.MaxMergedSegmentMB, EPSILON * long.MaxValue);
+
+            tmp.MaxMergedSegmentMB = long.MaxValue / 1024 / 1024.0;
+            Assert.AreEqual(long.MaxValue / 1024 / 1024.0, tmp.MaxMergedSegmentMB, EPSILON * long.MaxValue);
+
+            try
+            {
+                tmp.MaxMergedSegmentMB = -2.0;
+                Assert.Fail("Didn't throw IllegalArgumentException");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // pass
+            }
+
+            tmp.FloorSegmentMB = 2.0;
+            Assert.AreEqual(2.0, tmp.FloorSegmentMB, EPSILON);
+
+            tmp.FloorSegmentMB = double.PositiveInfinity;
+            Assert.AreEqual(long.MaxValue / 1024 / 1024.0, tmp.FloorSegmentMB, EPSILON * long.MaxValue);
+
+            tmp.FloorSegmentMB = long.MaxValue / 1024 / 1024.0;
+            Assert.AreEqual(long.MaxValue / 1024 / 1024.0, tmp.FloorSegmentMB, EPSILON * long.MaxValue);
+
+            try
+            {
+                tmp.FloorSegmentMB = -2.0;
+                Assert.Fail("Didn't throw IllegalArgumentException");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // pass
+            }
+
+            tmp.MaxCFSSegmentSizeMB = 2.0;
+            Assert.AreEqual(2.0, tmp.MaxCFSSegmentSizeMB, EPSILON);
+
+            tmp.MaxCFSSegmentSizeMB = double.PositiveInfinity;
+            Assert.AreEqual(long.MaxValue / 1024 / 1024.0, tmp.MaxCFSSegmentSizeMB, EPSILON * long.MaxValue);
+
+            tmp.MaxCFSSegmentSizeMB = long.MaxValue / 1024 / 1024.0;
+            Assert.AreEqual(long.MaxValue / 1024 / 1024.0, tmp.MaxCFSSegmentSizeMB, EPSILON * long.MaxValue);
+
+            try
+            {
+                tmp.MaxCFSSegmentSizeMB = -2.0;
+                Assert.Fail("Didn't throw IllegalArgumentException");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // pass
+            }
+
+            // TODO: Add more checks for other non-double setters!
+        }
+
+
+        #region BaseMergePolicyTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestForceMergeNotNeeded()
+        {
+            base.TestForceMergeNotNeeded();
+        }
+
+        #endregion
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestTransactionRollback.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestTransactionRollback.cs b/src/Lucene.Net.Tests/Index/TestTransactionRollback.cs
new file mode 100644
index 0000000..0ff979a
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestTransactionRollback.cs
@@ -0,0 +1,271 @@
+using System;
+using System.Collections;
+using System.Collections.Generic;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using IBits = Lucene.Net.Util.IBits;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+
+    /// <summary>
+    /// Test class to illustrate using IndexDeletionPolicy to provide multi-level rollback capability.
+    /// this test case creates an index of records 1 to 100, introducing a commit point every 10 records.
+    ///
+    /// A "keep all" deletion policy is used to ensure we keep all commit points for testing purposes
+    /// </summary>
+
+    [TestFixture]
+    public class TestTransactionRollback : LuceneTestCase
+    {
+        private const string FIELD_RECORD_ID = "record_id";
+        private Directory Dir;
+
+        //Rolls back index to a chosen ID
+        private void RollBackLast(int id)
+        {
+            // System.out.println("Attempting to rollback to "+id);
+            string ids = "-" + id;
+            IndexCommit last = null;
+            ICollection<IndexCommit> commits = DirectoryReader.ListCommits(Dir);
+            for (IEnumerator<IndexCommit> iterator = commits.GetEnumerator(); iterator.MoveNext(); )
+            {
+                IndexCommit commit = iterator.Current;
+                IDictionary<string, string> ud = commit.UserData;
+                if (ud.Count > 0)
+                {
+                    if (ud["index"].EndsWith(ids))
+                    {
+                        last = commit;
+                    }
+                }
+            }
+
+            if (last == null)
+            {
+                throw new Exception("Couldn't find commit point " + id);
+            }
+
+            IndexWriter w = new IndexWriter(Dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetIndexDeletionPolicy(new RollbackDeletionPolicy(this, id)).SetIndexCommit(last));
+            IDictionary<string, string> data = new Dictionary<string, string>();
+            data["index"] = "Rolled back to 1-" + id;
+            w.CommitData = data;
+            w.Dispose();
+        }
+
+        [Test]
+        public virtual void TestRepeatedRollBacks()
+        {
+            int expectedLastRecordId = 100;
+            while (expectedLastRecordId > 10)
+            {
+                expectedLastRecordId -= 10;
+                RollBackLast(expectedLastRecordId);
+
+                BitArray expecteds = new BitArray(100);
+                expecteds.Set(1, (expectedLastRecordId + 1), true);
+                CheckExpecteds(expecteds);
+            }
+        }
+
+        private void CheckExpecteds(BitArray expecteds)
+        {
+            IndexReader r = DirectoryReader.Open(Dir);
+
+            //Perhaps not the most efficient approach but meets our
+            //needs here.
+            IBits liveDocs = MultiFields.GetLiveDocs(r);
+            for (int i = 0; i < r.MaxDoc; i++)
+            {
+                if (liveDocs == null || liveDocs.Get(i))
+                {
+                    string sval = r.Document(i).Get(FIELD_RECORD_ID);
+                    if (sval != null)
+                    {
+                        int val = Convert.ToInt32(sval);
+                        Assert.IsTrue(expecteds.SafeGet(val), "Did not expect document #" + val);
+                        expecteds.SafeSet(val, false);
+                    }
+                }
+            }
+            r.Dispose();
+            Assert.AreEqual(0, expecteds.Cardinality(), "Should have 0 docs remaining ");
+        }
+
+        /*
+        private void showAvailableCommitPoints() throws Exception {
+          Collection commits = DirectoryReader.ListCommits(dir);
+          for (Iterator iterator = commits.iterator(); iterator.hasNext();) {
+            IndexCommit comm = (IndexCommit) iterator.Next();
+            System.out.print("\t Available commit point:["+comm.getUserData()+"] files=");
+            Collection files = comm.getFileNames();
+            for (Iterator iterator2 = files.iterator(); iterator2.hasNext();) {
+              String filename = (String) iterator2.Next();
+              System.out.print(filename+", ");
+            }
+            System.out.println();
+          }
+        }
+        */
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Dir = NewDirectory();
+
+            //Build index, of records 1 to 100, committing after each batch of 10
+            IndexDeletionPolicy sdp = new KeepAllDeletionPolicy(this);
+            IndexWriter w = new IndexWriter(Dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetIndexDeletionPolicy(sdp));
+
+            for (int currentRecordId = 1; currentRecordId <= 100; currentRecordId++)
+            {
+                Document doc = new Document();
+                doc.Add(NewTextField(FIELD_RECORD_ID, "" + currentRecordId, Field.Store.YES));
+                w.AddDocument(doc);
+
+                if (currentRecordId % 10 == 0)
+                {
+                    IDictionary<string, string> data = new Dictionary<string, string>();
+                    data["index"] = "records 1-" + currentRecordId;
+                    w.CommitData = data;
+                    w.Commit();
+                }
+            }
+
+            w.Dispose();
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            Dir.Dispose();
+            base.TearDown();
+        }
+
+        // Rolls back to previous commit point
+        internal class RollbackDeletionPolicy : IndexDeletionPolicy
+        {
+            private readonly TestTransactionRollback OuterInstance;
+
+            internal int RollbackPoint;
+
+            public RollbackDeletionPolicy(TestTransactionRollback outerInstance, int rollbackPoint)
+            {
+                this.OuterInstance = outerInstance;
+                this.RollbackPoint = rollbackPoint;
+            }
+
+            public override void OnCommit<T>(IList<T> commits)
+            {
+            }
+
+            public override void OnInit<T>(IList<T> commits)
+            {
+                foreach (IndexCommit commit in commits)
+                {
+                    IDictionary<string, string> userData = commit.UserData;
+                    if (userData.Count > 0)
+                    {
+                        // Label for a commit point is "Records 1-30"
+                        // this code reads the last id ("30" in this example) and deletes it
+                        // if it is after the desired rollback point
+                        string x = userData["index"];
+                        string lastVal = x.Substring(x.LastIndexOf("-") + 1);
+                        int last = Convert.ToInt32(lastVal);
+                        if (last > RollbackPoint)
+                        {
+                            /*
+                            System.out.print("\tRolling back commit point:" +
+                                             " UserData="+commit.getUserData() +")  ("+(commits.Size()-1)+" commit points left) files=");
+                            Collection files = commit.getFileNames();
+                            for (Iterator iterator2 = files.iterator(); iterator2.hasNext();) {
+                              System.out.print(" "+iterator2.Next());
+                            }
+                            System.out.println();
+                            */
+
+                            commit.Delete();
+                        }
+                    }
+                }
+            }
+        }
+
+        internal class DeleteLastCommitPolicy : IndexDeletionPolicy
+        {
+            private readonly TestTransactionRollback OuterInstance;
+
+            public DeleteLastCommitPolicy(TestTransactionRollback outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            public override void OnCommit<T>(IList<T> commits)
+            {
+            }
+
+            public override void OnInit<T>(IList<T> commits)
+            {
+                commits.RemoveAt(commits.Count - 1);
+            }
+        }
+
+        [Test]
+        public virtual void TestRollbackDeletionPolicy()
+        {
+            for (int i = 0; i < 2; i++)
+            {
+                // Unless you specify a prior commit point, rollback
+                // should not work:
+                (new IndexWriter(Dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetIndexDeletionPolicy(new DeleteLastCommitPolicy(this)))).Dispose();
+                IndexReader r = DirectoryReader.Open(Dir);
+                Assert.AreEqual(100, r.NumDocs);
+                r.Dispose();
+            }
+        }
+
+        // Keeps all commit points (used to build index)
+        internal class KeepAllDeletionPolicy : IndexDeletionPolicy
+        {
+            private readonly TestTransactionRollback OuterInstance;
+
+            public KeepAllDeletionPolicy(TestTransactionRollback outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            public override void OnCommit<T>(IList<T> commits)
+            {
+            }
+
+            public override void OnInit<T>(IList<T> commits)
+            {
+            }
+        }
+    }
+}
\ No newline at end of file


[40/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestCheckIndex.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestCheckIndex.cs b/src/Lucene.Net.Tests/Index/TestCheckIndex.cs
new file mode 100644
index 0000000..7b6186e
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestCheckIndex.cs
@@ -0,0 +1,129 @@
+using Lucene.Net.Documents;
+using Lucene.Net.Support;
+using System;
+using System.Collections.Generic;
+using System.Text;
+
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+    using System.IO;
+    using CannedTokenStream = Lucene.Net.Analysis.CannedTokenStream;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldType = FieldType;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using TextField = TextField;
+    using Token = Lucene.Net.Analysis.Token;
+
+    [TestFixture]
+    public class TestCheckIndex : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestDeletedDocs()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2));
+            for (int i = 0; i < 19; i++)
+            {
+                Document doc = new Document();
+                FieldType customType = new FieldType(TextField.TYPE_STORED);
+                customType.StoreTermVectors = true;
+                customType.StoreTermVectorPositions = true;
+                customType.StoreTermVectorOffsets = true;
+                doc.Add(NewField("field", "aaa" + i, customType));
+                writer.AddDocument(doc);
+            }
+            writer.ForceMerge(1);
+            writer.Commit();
+            writer.DeleteDocuments(new Term("field", "aaa5"));
+            writer.Dispose();
+
+            ByteArrayOutputStream bos = new ByteArrayOutputStream(1024);
+            CheckIndex checker = new CheckIndex(dir);
+            checker.InfoStream = new StreamWriter(bos, Encoding.UTF8);
+            if (VERBOSE)
+            {
+                checker.InfoStream = Console.Out;
+            }
+            CheckIndex.Status indexStatus = checker.DoCheckIndex();
+            if (indexStatus.Clean == false)
+            {
+                Console.WriteLine("CheckIndex failed");
+                checker.FlushInfoStream();
+                Console.WriteLine(bos.ToString());
+                Assert.Fail();
+            }
+
+            CheckIndex.Status.SegmentInfoStatus seg = indexStatus.SegmentInfos[0];
+            Assert.IsTrue(seg.OpenReaderPassed);
+
+            Assert.IsNotNull(seg.Diagnostics);
+
+            Assert.IsNotNull(seg.FieldNormStatus);
+            Assert.IsNull(seg.FieldNormStatus.Error);
+            Assert.AreEqual(1, seg.FieldNormStatus.TotFields);
+
+            Assert.IsNotNull(seg.TermIndexStatus);
+            Assert.IsNull(seg.TermIndexStatus.Error);
+            Assert.AreEqual(18, seg.TermIndexStatus.TermCount);
+            Assert.AreEqual(18, seg.TermIndexStatus.TotFreq);
+            Assert.AreEqual(18, seg.TermIndexStatus.TotPos);
+
+            Assert.IsNotNull(seg.StoredFieldStatus);
+            Assert.IsNull(seg.StoredFieldStatus.Error);
+            Assert.AreEqual(18, seg.StoredFieldStatus.DocCount);
+            Assert.AreEqual(18, seg.StoredFieldStatus.TotFields);
+
+            Assert.IsNotNull(seg.TermVectorStatus);
+            Assert.IsNull(seg.TermVectorStatus.Error);
+            Assert.AreEqual(18, seg.TermVectorStatus.DocCount);
+            Assert.AreEqual(18, seg.TermVectorStatus.TotVectors);
+
+            Assert.IsTrue(seg.Diagnostics.Count > 0);
+            IList<string> onlySegments = new List<string>();
+            onlySegments.Add("_0");
+
+            Assert.IsTrue(checker.DoCheckIndex(onlySegments).Clean == true);
+            dir.Dispose();
+        }
+
+        // LUCENE-4221: we have to let these thru, for now
+        [Test]
+        public virtual void TestBogusTermVectors()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter iw = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, null));
+            Document doc = new Document();
+            FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
+            ft.StoreTermVectors = true;
+            ft.StoreTermVectorOffsets = true;
+            Field field = new Field("foo", "", ft);
+            field.SetTokenStream(new CannedTokenStream(new Token("bar", 5, 10), new Token("bar", 1, 4)));
+            doc.Add(field);
+            iw.AddDocument(doc);
+            iw.Dispose();
+            dir.Dispose(); // checkindex
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestCodecHoldsOpenFiles.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestCodecHoldsOpenFiles.cs b/src/Lucene.Net.Tests/Index/TestCodecHoldsOpenFiles.cs
new file mode 100644
index 0000000..66622af
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestCodecHoldsOpenFiles.cs
@@ -0,0 +1,116 @@
+using Lucene.Net.Documents;
+using NUnit.Framework;
+
+namespace Lucene.Net.Index
+{
+    using Attributes;
+    using System.IO;
+    using Directory = Lucene.Net.Store.Directory;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Document = Documents.Document;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using TextField = TextField;
+
+    [TestFixture]
+    public class TestCodecHoldsOpenFiles : LuceneTestCase
+    {
+        [Test]
+        public virtual void Test()
+        {
+            Directory d = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), d, Similarity, TimeZone);
+            int numDocs = AtLeast(100);
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document doc = new Document();
+                doc.Add(NewField("foo", "bar", TextField.TYPE_NOT_STORED));
+                w.AddDocument(doc);
+            }
+
+            IndexReader r = w.Reader;
+            w.Dispose();
+
+            foreach (string fileName in d.ListAll())
+            {
+                try
+                {
+                    d.DeleteFile(fileName);
+                }
+#pragma warning disable 168
+                catch (IOException ioe)
+#pragma warning restore 168
+                {
+                    // ignore: this means codec (correctly) is holding
+                    // the file open
+                }
+            }
+
+            foreach (AtomicReaderContext cxt in r.Leaves)
+            {
+                TestUtil.CheckReader(cxt.Reader);
+            }
+
+            r.Dispose();
+            d.Dispose();
+        }
+
+        [Test, LuceneNetSpecific] // Apparently added to LUCENENET for debugging
+        public virtual void TestExposeUnclosedFiles()
+        {
+            Directory d = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), d, Similarity, TimeZone);
+            //int numDocs = AtLeast(100);
+            int numDocs = 5;
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document doc = new Document();
+                doc.Add(NewField("foo", "bar", TextField.TYPE_NOT_STORED));
+                w.AddDocument(doc);
+            }
+
+            IndexReader r = w.Reader;
+            w.Dispose();
+
+            foreach (string fileName in d.ListAll())
+            {
+                try
+                {
+                    d.DeleteFile(fileName);
+                }
+#pragma warning disable 168
+                catch (IOException ioe)
+#pragma warning restore 168
+                {
+                    // ignore: this means codec (correctly) is holding
+                    // the file open
+                }
+            }
+
+            foreach (AtomicReaderContext cxt in r.Leaves)
+            {
+                TestUtil.CheckReader(cxt.Reader);
+            }
+
+            r.Dispose();
+            d.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestCodecs.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestCodecs.cs b/src/Lucene.Net.Tests/Index/TestCodecs.cs
new file mode 100644
index 0000000..e798da9
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestCodecs.cs
@@ -0,0 +1,933 @@
+using Lucene.Net.Codecs.MockSep;
+using Lucene.Net.Documents;
+using Lucene.Net.Search;
+using Lucene.Net.Support;
+using NUnit.Framework;
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.Threading;
+
+namespace Lucene.Net.Index
+{
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Codec = Lucene.Net.Codecs.Codec;
+    using Constants = Lucene.Net.Util.Constants;
+    using Directory = Lucene.Net.Store.Directory;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+
+    //using MockSepPostingsFormat = Lucene.Net.Codecs.mocksep.MockSepPostingsFormat;
+    using Document = Documents.Document;
+    using FieldsConsumer = Lucene.Net.Codecs.FieldsConsumer;
+    using FieldsProducer = Lucene.Net.Codecs.FieldsProducer;
+    using FieldType = FieldType;
+    using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+    using InfoStream = Lucene.Net.Util.InfoStream;
+    using Lucene3xCodec = Lucene.Net.Codecs.Lucene3x.Lucene3xCodec;
+    using Lucene40RWCodec = Lucene.Net.Codecs.Lucene40.Lucene40RWCodec;
+    using Lucene41RWCodec = Lucene.Net.Codecs.Lucene41.Lucene41RWCodec;
+    using Lucene42RWCodec = Lucene.Net.Codecs.Lucene42.Lucene42RWCodec;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using NumericDocValuesField = NumericDocValuesField;
+    using OpenBitSet = Lucene.Net.Util.OpenBitSet;
+    using PostingsConsumer = Lucene.Net.Codecs.PostingsConsumer;
+    using Query = Lucene.Net.Search.Query;
+    using ScoreDoc = Lucene.Net.Search.ScoreDoc;
+    using Store = Field.Store;
+    using StringField = StringField;
+    using TermsConsumer = Lucene.Net.Codecs.TermsConsumer;
+    using TermStats = Lucene.Net.Codecs.TermStats;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    // TODO: test multiple codecs here?
+
+    // TODO
+    //   - test across fields
+    //   - fix this test to run once for all codecs
+    //   - make more docs per term, to test > 1 level skipping
+    //   - test all combinations of payloads/not and omitTF/not
+    //   - test w/ different indexDivisor
+    //   - test field where payload length rarely changes
+    //   - 0-term fields
+    //   - seek/skip to same term/doc i'm already on
+    //   - mix in deleted docs
+    //   - seek, skip beyond end -- assert returns false
+    //   - seek, skip to things that don't exist -- ensure it
+    //     goes to 1 before next one known to exist
+    //   - skipTo(term)
+    //   - skipTo(doc)
+
+    [TestFixture]
+    public class TestCodecs : LuceneTestCase
+    {
+        private static string[] FieldNames = new string[] { "one", "two", "three", "four" };
+
+        private static int NUM_TEST_ITER;
+        private const int NUM_TEST_THREADS = 3;
+        private const int NUM_FIELDS = 4;
+        private const int NUM_TERMS_RAND = 50; // must be > 16 to test skipping
+        private const int DOC_FREQ_RAND = 500; // must be > 16 to test skipping
+        private const int TERM_DOC_FREQ_RAND = 20;
+
+        [OneTimeSetUp]
+        public static void BeforeClass()
+        {
+            NUM_TEST_ITER = AtLeast(20);
+        }
+
+        internal class FieldData : IComparable<FieldData>
+        {
+            private readonly TestCodecs OuterInstance;
+
+            internal readonly FieldInfo FieldInfo;
+            internal readonly TermData[] Terms;
+            internal readonly bool OmitTF;
+            internal readonly bool StorePayloads;
+
+            public FieldData(TestCodecs outerInstance, string name, FieldInfos.Builder fieldInfos, TermData[] terms, bool omitTF, bool storePayloads)
+            {
+                this.OuterInstance = outerInstance;
+                this.OmitTF = omitTF;
+                this.StorePayloads = storePayloads;
+                // TODO: change this test to use all three
+                FieldInfo = fieldInfos.AddOrUpdate(name, new IndexableFieldTypeAnonymousInnerClassHelper(this, omitTF));
+                if (storePayloads)
+                {
+                    FieldInfo.SetStorePayloads();
+                }
+                this.Terms = terms;
+                for (int i = 0; i < terms.Length; i++)
+                {
+                    terms[i].Field = this;
+                }
+
+                Array.Sort(terms);
+            }
+
+            private class IndexableFieldTypeAnonymousInnerClassHelper : IIndexableFieldType
+            {
+                private readonly FieldData OuterInstance;
+
+                private bool OmitTF;
+
+                public IndexableFieldTypeAnonymousInnerClassHelper(FieldData outerInstance, bool omitTF)
+                {
+                    this.OuterInstance = outerInstance;
+                    this.OmitTF = omitTF;
+                }
+
+                public bool IsIndexed
+                {
+                    get { return true; }
+                    set { }
+                }
+
+                public bool IsStored
+                {
+                    get { return false; }
+                    set { }
+                }
+
+                public bool IsTokenized
+                {
+                    get { return false; }
+                    set { }
+                }
+
+                public bool StoreTermVectors
+                {
+                    get { return false; }
+                    set { }
+                }
+
+                public bool StoreTermVectorOffsets
+                {
+                    get { return false; }
+                    set { }
+                }
+
+                public bool StoreTermVectorPositions
+                {
+                    get { return false; }
+                    set { }
+                }
+
+                public bool StoreTermVectorPayloads
+                {
+                    get { return false; }
+                    set { }
+                }
+
+                public bool OmitNorms
+                {
+                    get { return false; }
+                    set { }
+                }
+
+                public IndexOptions? IndexOptions
+                {
+                    get { return OmitTF ? Index.IndexOptions.DOCS_ONLY : Index.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS; }
+                    set { }
+                }
+
+                public NumericType? NumericType
+                {
+                    get { throw new NotImplementedException(); }
+                    set { }
+                }
+
+                public DocValuesType? DocValueType
+                {
+                    get { return null; }
+                    set { }
+                }
+            }
+
+            public int CompareTo(FieldData other)
+            {
+                return FieldInfo.Name.CompareTo(other.FieldInfo.Name);
+            }
+
+            public virtual void Write(FieldsConsumer consumer)
+            {
+                Array.Sort(Terms);
+                TermsConsumer termsConsumer = consumer.AddField(FieldInfo);
+                long sumTotalTermCount = 0;
+                long sumDF = 0;
+                OpenBitSet visitedDocs = new OpenBitSet();
+                foreach (TermData term in Terms)
+                {
+                    for (int i = 0; i < term.Docs.Length; i++)
+                    {
+                        visitedDocs.Set(term.Docs[i]);
+                    }
+                    sumDF += term.Docs.Length;
+                    sumTotalTermCount += term.Write(termsConsumer);
+                }
+                termsConsumer.Finish(OmitTF ? -1 : sumTotalTermCount, sumDF, (int)visitedDocs.Cardinality());
+            }
+        }
+
+        internal class PositionData
+        {
+            private readonly TestCodecs OuterInstance;
+
+            internal int Pos;
+            internal BytesRef Payload;
+
+            internal PositionData(TestCodecs outerInstance, int pos, BytesRef payload)
+            {
+                this.OuterInstance = outerInstance;
+                this.Pos = pos;
+                this.Payload = payload;
+            }
+        }
+
+        internal class TermData : IComparable<TermData>
+        {
+            private readonly TestCodecs OuterInstance;
+
+            internal string Text2;
+            internal readonly BytesRef Text;
+            internal int[] Docs;
+            internal PositionData[][] Positions;
+            internal FieldData Field;
+
+            public TermData(TestCodecs outerInstance, string text, int[] docs, PositionData[][] positions)
+            {
+                this.OuterInstance = outerInstance;
+                this.Text = new BytesRef(text);
+                this.Text2 = text;
+                this.Docs = docs;
+                this.Positions = positions;
+            }
+
+            public virtual int CompareTo(TermData o)
+            {
+                return Text.CompareTo(o.Text);
+            }
+
+            public virtual long Write(TermsConsumer termsConsumer)
+            {
+                PostingsConsumer postingsConsumer = termsConsumer.StartTerm(Text);
+                long totTF = 0;
+                for (int i = 0; i < Docs.Length; i++)
+                {
+                    int termDocFreq;
+                    if (Field.OmitTF)
+                    {
+                        termDocFreq = -1;
+                    }
+                    else
+                    {
+                        termDocFreq = Positions[i].Length;
+                    }
+                    postingsConsumer.StartDoc(Docs[i], termDocFreq);
+                    if (!Field.OmitTF)
+                    {
+                        totTF += Positions[i].Length;
+                        for (int j = 0; j < Positions[i].Length; j++)
+                        {
+                            PositionData pos = Positions[i][j];
+                            postingsConsumer.AddPosition(pos.Pos, pos.Payload, -1, -1);
+                        }
+                    }
+                    postingsConsumer.FinishDoc();
+                }
+                termsConsumer.FinishTerm(Text, new TermStats(Docs.Length, Field.OmitTF ? -1 : totTF));
+                return totTF;
+            }
+        }
+
+        private const string SEGMENT = "0";
+
+        internal virtual TermData[] MakeRandomTerms(bool omitTF, bool storePayloads)
+        {
+            int numTerms = 1 + Random().Next(NUM_TERMS_RAND);
+            //final int numTerms = 2;
+            TermData[] terms = new TermData[numTerms];
+
+            HashSet<string> termsSeen = new HashSet<string>();
+
+            for (int i = 0; i < numTerms; i++)
+            {
+                // Make term text
+                string text2;
+                while (true)
+                {
+                    text2 = TestUtil.RandomUnicodeString(Random());
+                    if (!termsSeen.Contains(text2) && !text2.EndsWith("."))
+                    {
+                        termsSeen.Add(text2);
+                        break;
+                    }
+                }
+
+                int docFreq = 1 + Random().Next(DOC_FREQ_RAND);
+                int[] docs = new int[docFreq];
+                PositionData[][] positions;
+
+                if (!omitTF)
+                {
+                    positions = new PositionData[docFreq][];
+                }
+                else
+                {
+                    positions = null;
+                }
+
+                int docID = 0;
+                for (int j = 0; j < docFreq; j++)
+                {
+                    docID += TestUtil.NextInt(Random(), 1, 10);
+                    docs[j] = docID;
+
+                    if (!omitTF)
+                    {
+                        int termFreq = 1 + Random().Next(TERM_DOC_FREQ_RAND);
+                        positions[j] = new PositionData[termFreq];
+                        int position = 0;
+                        for (int k = 0; k < termFreq; k++)
+                        {
+                            position += TestUtil.NextInt(Random(), 1, 10);
+
+                            BytesRef payload;
+                            if (storePayloads && Random().Next(4) == 0)
+                            {
+                                var bytes = new byte[1 + Random().Next(5)];
+                                for (int l = 0; l < bytes.Length; l++)
+                                {
+                                    bytes[l] = (byte)Random().Next(255);
+                                }
+                                payload = new BytesRef(bytes);
+                            }
+                            else
+                            {
+                                payload = null;
+                            }
+
+                            positions[j][k] = new PositionData(this, position, payload);
+                        }
+                    }
+                }
+
+                terms[i] = new TermData(this, text2, docs, positions);
+            }
+
+            return terms;
+        }
+
+        [Test]
+        public virtual void TestFixedPostings()
+        {
+            const int NUM_TERMS = 100;
+            TermData[] terms = new TermData[NUM_TERMS];
+            for (int i = 0; i < NUM_TERMS; i++)
+            {
+                int[] docs = new int[] { i };
+                string text = Convert.ToString(i);
+                terms[i] = new TermData(this, text, docs, null);
+            }
+
+            FieldInfos.Builder builder = new FieldInfos.Builder();
+
+            FieldData field = new FieldData(this, "field", builder, terms, true, false);
+            FieldData[] fields = new FieldData[] { field };
+            FieldInfos fieldInfos = builder.Finish();
+            Directory dir = NewDirectory();
+            this.Write(fieldInfos, dir, fields, true);
+            Codec codec = Codec.Default;
+            SegmentInfo si = new SegmentInfo(dir, Constants.LUCENE_MAIN_VERSION, SEGMENT, 10000, false, codec, null);
+
+            FieldsProducer reader = codec.PostingsFormat.FieldsProducer(new SegmentReadState(dir, si, fieldInfos, NewIOContext(Random()), DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR));
+
+            IEnumerator<string> fieldsEnum = reader.GetEnumerator();
+            fieldsEnum.MoveNext();
+            string fieldName = fieldsEnum.Current;
+            Assert.IsNotNull(fieldName);
+            Terms terms2 = reader.GetTerms(fieldName);
+            Assert.IsNotNull(terms2);
+
+            TermsEnum termsEnum = terms2.GetIterator(null);
+
+            DocsEnum docsEnum = null;
+            for (int i = 0; i < NUM_TERMS; i++)
+            {
+                BytesRef term = termsEnum.Next();
+                Assert.IsNotNull(term);
+                Assert.AreEqual(terms[i].Text2, term.Utf8ToString());
+
+                // do this twice to stress test the codec's reuse, ie,
+                // make sure it properly fully resets (rewinds) its
+                // internal state:
+                for (int iter = 0; iter < 2; iter++)
+                {
+                    docsEnum = TestUtil.Docs(Random(), termsEnum, null, docsEnum, DocsEnum.FLAG_NONE);
+                    Assert.AreEqual(terms[i].Docs[0], docsEnum.NextDoc());
+                    Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, docsEnum.NextDoc());
+                }
+            }
+            Assert.IsNull(termsEnum.Next());
+
+            for (int i = 0; i < NUM_TERMS; i++)
+            {
+                Assert.AreEqual(termsEnum.SeekCeil(new BytesRef(terms[i].Text2)), TermsEnum.SeekStatus.FOUND);
+            }
+
+            Assert.IsFalse(fieldsEnum.MoveNext());
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestRandomPostings()
+        {
+            FieldInfos.Builder builder = new FieldInfos.Builder();
+
+            FieldData[] fields = new FieldData[NUM_FIELDS];
+            for (int i = 0; i < NUM_FIELDS; i++)
+            {
+                bool omitTF = 0 == (i % 3);
+                bool storePayloads = 1 == (i % 3);
+                fields[i] = new FieldData(this, FieldNames[i], builder, this.MakeRandomTerms(omitTF, storePayloads), omitTF, storePayloads);
+            }
+
+            Directory dir = NewDirectory();
+            FieldInfos fieldInfos = builder.Finish();
+
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: now write postings");
+            }
+
+            this.Write(fieldInfos, dir, fields, false);
+            Codec codec = Codec.Default;
+            SegmentInfo si = new SegmentInfo(dir, Constants.LUCENE_MAIN_VERSION, SEGMENT, 10000, false, codec, null);
+
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: now read postings");
+            }
+            FieldsProducer terms = codec.PostingsFormat.FieldsProducer(new SegmentReadState(dir, si, fieldInfos, NewIOContext(Random()), DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR));
+
+            Verify[] threads = new Verify[NUM_TEST_THREADS - 1];
+            for (int i = 0; i < NUM_TEST_THREADS - 1; i++)
+            {
+                threads[i] = new Verify(this, si, fields, terms);
+                threads[i].SetDaemon(true);
+                threads[i].Start();
+            }
+
+            (new Verify(this, si, fields, terms)).Run();
+
+            for (int i = 0; i < NUM_TEST_THREADS - 1; i++)
+            {
+                threads[i].Join();
+                Debug.Assert(!threads[i].Failed);
+            }
+
+            terms.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestSepPositionAfterMerge()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig config = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            config.SetMergePolicy(NewLogMergePolicy());
+            config.SetCodec(TestUtil.AlwaysPostingsFormat(new MockSepPostingsFormat()));
+            IndexWriter writer = new IndexWriter(dir, config);
+
+            try
+            {
+                PhraseQuery pq = new PhraseQuery();
+                pq.Add(new Term("content", "bbb"));
+                pq.Add(new Term("content", "ccc"));
+
+                Document doc = new Document();
+                FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
+                customType.OmitNorms = true;
+                doc.Add(NewField("content", "aaa bbb ccc ddd", customType));
+
+                // add document and force commit for creating a first segment
+                writer.AddDocument(doc);
+                writer.Commit();
+
+                ScoreDoc[] results = this.Search(writer, pq, 5);
+                Assert.AreEqual(1, results.Length);
+                Assert.AreEqual(0, results[0].Doc);
+
+                // add document and force commit for creating a second segment
+                writer.AddDocument(doc);
+                writer.Commit();
+
+                // at this point, there should be at least two segments
+                results = this.Search(writer, pq, 5);
+                Assert.AreEqual(2, results.Length);
+                Assert.AreEqual(0, results[0].Doc);
+
+                writer.ForceMerge(1);
+
+                // optimise to merge the segments.
+                results = this.Search(writer, pq, 5);
+                Assert.AreEqual(2, results.Length);
+                Assert.AreEqual(0, results[0].Doc);
+            }
+            finally
+            {
+                writer.Dispose();
+                dir.Dispose();
+            }
+        }
+
+        private ScoreDoc[] Search(IndexWriter writer, Query q, int n)
+        {
+            IndexReader reader = writer.Reader;
+            IndexSearcher searcher = NewSearcher(reader);
+            try
+            {
+                return searcher.Search(q, null, n).ScoreDocs;
+            }
+            finally
+            {
+                reader.Dispose();
+            }
+        }
+
+        private class Verify : ThreadClass
+        {
+            private readonly TestCodecs OuterInstance;
+
+            internal readonly Fields TermsDict;
+            internal readonly FieldData[] Fields;
+            internal readonly SegmentInfo Si;
+            internal volatile bool Failed;
+
+            internal Verify(TestCodecs outerInstance, SegmentInfo si, FieldData[] fields, Fields termsDict)
+            {
+                this.OuterInstance = outerInstance;
+                this.Fields = fields;
+                this.TermsDict = termsDict;
+                this.Si = si;
+            }
+
+            public override void Run()
+            {
+                try
+                {
+                    this._run();
+                }
+                catch (Exception t)
+                {
+                    Failed = true;
+                    throw new Exception(t.Message, t);
+                }
+            }
+
+            internal virtual void VerifyDocs(int[] docs, PositionData[][] positions, DocsEnum docsEnum, bool doPos)
+            {
+                for (int i = 0; i < docs.Length; i++)
+                {
+                    int doc = docsEnum.NextDoc();
+                    Assert.IsTrue(doc != DocIdSetIterator.NO_MORE_DOCS);
+                    Assert.AreEqual(docs[i], doc);
+                    if (doPos)
+                    {
+                        this.VerifyPositions(positions[i], ((DocsAndPositionsEnum)docsEnum));
+                    }
+                }
+                Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, docsEnum.NextDoc());
+            }
+
+            internal sbyte[] Data = new sbyte[10];
+
+            internal virtual void VerifyPositions(PositionData[] positions, DocsAndPositionsEnum posEnum)
+            {
+                for (int i = 0; i < positions.Length; i++)
+                {
+                    int pos = posEnum.NextPosition();
+                    Assert.AreEqual(positions[i].Pos, pos);
+                    if (positions[i].Payload != null)
+                    {
+                        Assert.IsNotNull(posEnum.GetPayload());
+                        if (Random().Next(3) < 2)
+                        {
+                            // Verify the payload bytes
+                            BytesRef otherPayload = posEnum.GetPayload();
+                            Assert.IsTrue(positions[i].Payload.Equals(otherPayload), "expected=" + positions[i].Payload.ToString() + " got=" + otherPayload.ToString());
+                        }
+                    }
+                    else
+                    {
+                        Assert.IsNull(posEnum.GetPayload());
+                    }
+                }
+            }
+
+            public virtual void _run()
+            {
+                for (int iter = 0; iter < NUM_TEST_ITER; iter++)
+                {
+                    FieldData field = Fields[Random().Next(Fields.Length)];
+                    TermsEnum termsEnum = TermsDict.GetTerms(field.FieldInfo.Name).GetIterator(null);
+#pragma warning disable 612, 618
+                    if (Si.Codec is Lucene3xCodec)
+#pragma warning restore 612, 618
+                    {
+                        // code below expects unicode sort order
+                        continue;
+                    }
+
+                    int upto = 0;
+                    // Test straight enum of the terms:
+                    while (true)
+                    {
+                        BytesRef term = termsEnum.Next();
+                        if (term == null)
+                        {
+                            break;
+                        }
+                        BytesRef expected = new BytesRef(field.Terms[upto++].Text2);
+                        Assert.IsTrue(expected.BytesEquals(term), "expected=" + expected + " vs actual " + term);
+                    }
+                    Assert.AreEqual(upto, field.Terms.Length);
+
+                    // Test random seek:
+                    TermData term2 = field.Terms[Random().Next(field.Terms.Length)];
+                    TermsEnum.SeekStatus status = termsEnum.SeekCeil(new BytesRef(term2.Text2));
+                    Assert.AreEqual(status, TermsEnum.SeekStatus.FOUND);
+                    Assert.AreEqual(term2.Docs.Length, termsEnum.DocFreq);
+                    if (field.OmitTF)
+                    {
+                        this.VerifyDocs(term2.Docs, term2.Positions, TestUtil.Docs(Random(), termsEnum, null, null, DocsEnum.FLAG_NONE), false);
+                    }
+                    else
+                    {
+                        this.VerifyDocs(term2.Docs, term2.Positions, termsEnum.DocsAndPositions(null, null), true);
+                    }
+
+                    // Test random seek by ord:
+                    int idx = Random().Next(field.Terms.Length);
+                    term2 = field.Terms[idx];
+                    bool success = false;
+                    try
+                    {
+                        termsEnum.SeekExact(idx);
+                        success = true;
+                    }
+#pragma warning disable 168
+                    catch (System.NotSupportedException uoe)
+#pragma warning restore 168
+                    {
+                        // ok -- skip it
+                    }
+                    if (success)
+                    {
+                        Assert.AreEqual(status, TermsEnum.SeekStatus.FOUND);
+                        Assert.IsTrue(termsEnum.Term.BytesEquals(new BytesRef(term2.Text2)));
+                        Assert.AreEqual(term2.Docs.Length, termsEnum.DocFreq);
+                        if (field.OmitTF)
+                        {
+                            this.VerifyDocs(term2.Docs, term2.Positions, TestUtil.Docs(Random(), termsEnum, null, null, DocsEnum.FLAG_NONE), false);
+                        }
+                        else
+                        {
+                            this.VerifyDocs(term2.Docs, term2.Positions, termsEnum.DocsAndPositions(null, null), true);
+                        }
+                    }
+
+                    // Test seek to non-existent terms:
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("TEST: seek non-exist terms");
+                    }
+                    for (int i = 0; i < 100; i++)
+                    {
+                        string text2 = TestUtil.RandomUnicodeString(Random()) + ".";
+                        status = termsEnum.SeekCeil(new BytesRef(text2));
+                        Assert.IsTrue(status == TermsEnum.SeekStatus.NOT_FOUND || status == TermsEnum.SeekStatus.END);
+                    }
+
+                    // Seek to each term, backwards:
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("TEST: seek terms backwards");
+                    }
+                    for (int i = field.Terms.Length - 1; i >= 0; i--)
+                    {
+                        Assert.AreEqual(TermsEnum.SeekStatus.FOUND, termsEnum.SeekCeil(new BytesRef(field.Terms[i].Text2)), Thread.CurrentThread.Name + ": field=" + field.FieldInfo.Name + " term=" + field.Terms[i].Text2);
+                        Assert.AreEqual(field.Terms[i].Docs.Length, termsEnum.DocFreq);
+                    }
+
+                    // Seek to each term by ord, backwards
+                    for (int i = field.Terms.Length - 1; i >= 0; i--)
+                    {
+                        try
+                        {
+                            termsEnum.SeekExact(i);
+                            Assert.AreEqual(field.Terms[i].Docs.Length, termsEnum.DocFreq);
+                            Assert.IsTrue(termsEnum.Term.BytesEquals(new BytesRef(field.Terms[i].Text2)));
+                        }
+#pragma warning disable 168
+                        catch (System.NotSupportedException uoe)
+#pragma warning restore 168
+                        {
+                        }
+                    }
+
+                    // Seek to non-existent empty-string term
+                    status = termsEnum.SeekCeil(new BytesRef(""));
+                    Assert.IsNotNull(status);
+                    //Assert.AreEqual(TermsEnum.SeekStatus.NOT_FOUND, status);
+
+                    // Make sure we're now pointing to first term
+                    Assert.IsTrue(termsEnum.Term.BytesEquals(new BytesRef(field.Terms[0].Text2)));
+
+                    // Test docs enum
+                    termsEnum.SeekCeil(new BytesRef(""));
+                    upto = 0;
+                    do
+                    {
+                        term2 = field.Terms[upto];
+                        if (Random().Next(3) == 1)
+                        {
+                            DocsEnum docs;
+                            DocsEnum docsAndFreqs;
+                            DocsAndPositionsEnum postings;
+                            if (!field.OmitTF)
+                            {
+                                postings = termsEnum.DocsAndPositions(null, null);
+                                if (postings != null)
+                                {
+                                    docs = docsAndFreqs = postings;
+                                }
+                                else
+                                {
+                                    docs = docsAndFreqs = TestUtil.Docs(Random(), termsEnum, null, null, DocsEnum.FLAG_FREQS);
+                                }
+                            }
+                            else
+                            {
+                                postings = null;
+                                docsAndFreqs = null;
+                                docs = TestUtil.Docs(Random(), termsEnum, null, null, DocsEnum.FLAG_NONE);
+                            }
+                            Assert.IsNotNull(docs);
+                            int upto2 = -1;
+                            bool ended = false;
+                            while (upto2 < term2.Docs.Length - 1)
+                            {
+                                // Maybe skip:
+                                int left = term2.Docs.Length - upto2;
+                                int doc;
+                                if (Random().Next(3) == 1 && left >= 1)
+                                {
+                                    int inc = 1 + Random().Next(left - 1);
+                                    upto2 += inc;
+                                    if (Random().Next(2) == 1)
+                                    {
+                                        doc = docs.Advance(term2.Docs[upto2]);
+                                        Assert.AreEqual(term2.Docs[upto2], doc);
+                                    }
+                                    else
+                                    {
+                                        doc = docs.Advance(1 + term2.Docs[upto2]);
+                                        if (doc == DocIdSetIterator.NO_MORE_DOCS)
+                                        {
+                                            // skipped past last doc
+                                            Debug.Assert(upto2 == term2.Docs.Length - 1);
+                                            ended = true;
+                                            break;
+                                        }
+                                        else
+                                        {
+                                            // skipped to next doc
+                                            Debug.Assert(upto2 < term2.Docs.Length - 1);
+                                            if (doc >= term2.Docs[1 + upto2])
+                                            {
+                                                upto2++;
+                                            }
+                                        }
+                                    }
+                                }
+                                else
+                                {
+                                    doc = docs.NextDoc();
+                                    Assert.IsTrue(doc != -1);
+                                    upto2++;
+                                }
+                                Assert.AreEqual(term2.Docs[upto2], doc);
+                                if (!field.OmitTF)
+                                {
+                                    Assert.AreEqual(term2.Positions[upto2].Length, postings.Freq);
+                                    if (Random().Next(2) == 1)
+                                    {
+                                        this.VerifyPositions(term2.Positions[upto2], postings);
+                                    }
+                                }
+                            }
+
+                            if (!ended)
+                            {
+                                Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, docs.NextDoc());
+                            }
+                        }
+                        upto++;
+                    } while (termsEnum.Next() != null);
+
+                    Assert.AreEqual(upto, field.Terms.Length);
+                }
+            }
+        }
+
+        private void Write(FieldInfos fieldInfos, Directory dir, FieldData[] fields, bool allowPreFlex)
+        {
+            int termIndexInterval = TestUtil.NextInt(Random(), 13, 27);
+            Codec codec = Codec.Default;
+            SegmentInfo si = new SegmentInfo(dir, Constants.LUCENE_MAIN_VERSION, SEGMENT, 10000, false, codec, null);
+            SegmentWriteState state = new SegmentWriteState(InfoStream.Default, dir, si, fieldInfos, termIndexInterval, null, NewIOContext(Random()));
+
+            FieldsConsumer consumer = codec.PostingsFormat.FieldsConsumer(state);
+            Array.Sort(fields);
+            foreach (FieldData field in fields)
+            {
+#pragma warning disable 612, 618
+                if (!allowPreFlex && codec is Lucene3xCodec)
+#pragma warning restore 612, 618
+                {
+                    // code below expects unicode sort order
+                    continue;
+                }
+                field.Write(consumer);
+            }
+            consumer.Dispose();
+        }
+
+        [Test]
+        public virtual void TestDocsOnlyFreq()
+        {
+            // tests that when fields are indexed with DOCS_ONLY, the Codec
+            // returns 1 in docsEnum.Freq()
+            Directory dir = NewDirectory();
+            Random random = Random();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
+            // we don't need many documents to assert this, but don't use one document either
+            int numDocs = AtLeast(random, 50);
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document doc = new Document();
+                doc.Add(new StringField("f", "doc", Store.NO));
+                writer.AddDocument(doc);
+            }
+            writer.Dispose();
+
+            Term term = new Term("f", new BytesRef("doc"));
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            foreach (AtomicReaderContext ctx in reader.Leaves)
+            {
+                DocsEnum de = ((AtomicReader)ctx.Reader).TermDocsEnum(term);
+                while (de.NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
+                {
+                    Assert.AreEqual(1, de.Freq, "wrong freq for doc " + de.DocID);
+                }
+            }
+            reader.Dispose();
+
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestDisableImpersonation()
+        {
+            Codec[] oldCodecs = new Codec[] { new Lucene40RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE), new Lucene41RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE), new Lucene42RWCodec(OLD_FORMAT_IMPERSONATION_IS_ACTIVE) };
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            conf.SetCodec(oldCodecs[Random().Next(oldCodecs.Length)]);
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            Document doc = new Document();
+            doc.Add(new StringField("f", "bar", Store.YES));
+            doc.Add(new NumericDocValuesField("n", 18L));
+            writer.AddDocument(doc);
+
+            OLD_FORMAT_IMPERSONATION_IS_ACTIVE = false;
+            try
+            {
+                writer.Dispose();
+                Assert.Fail("should not have succeeded to impersonate an old format!");
+            }
+#pragma warning disable 168
+            catch (System.NotSupportedException e)
+#pragma warning restore 168
+            {
+                writer.Rollback();
+            }
+            finally
+            {
+                OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true;
+            }
+
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestCompoundFile.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestCompoundFile.cs b/src/Lucene.Net.Tests/Index/TestCompoundFile.cs
new file mode 100644
index 0000000..8452701
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestCompoundFile.cs
@@ -0,0 +1,917 @@
+using Lucene.Net.Documents;
+using Lucene.Net.Store;
+using System;
+
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+    using System.IO;
+    using CompoundFileDirectory = Lucene.Net.Store.CompoundFileDirectory;
+    using Directory = Lucene.Net.Store.Directory;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexInput = Lucene.Net.Store.IndexInput;
+    using IndexOutput = Lucene.Net.Store.IndexOutput;
+    using IOContext = Lucene.Net.Store.IOContext;
+    using IOUtils = Lucene.Net.Util.IOUtils;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using SimpleFSDirectory = Lucene.Net.Store.SimpleFSDirectory;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    [TestFixture]
+    public class TestCompoundFile : LuceneTestCase
+    {
+        private Directory Dir;
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            DirectoryInfo file = CreateTempDir("testIndex");
+            // use a simple FSDir here, to be sure to have SimpleFSInputs
+            Dir = new SimpleFSDirectory(file, null);
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            Dir.Dispose();
+            base.TearDown();
+        }
+
+        /// <summary>
+        /// Creates a file of the specified size with random data. </summary>
+        private void CreateRandomFile(Directory dir, string name, int size)
+        {
+            IndexOutput os = dir.CreateOutput(name, NewIOContext(Random()));
+            for (int i = 0; i < size; i++)
+            {
+                var b = unchecked((sbyte)(new Random(1).NextDouble() * 256));
+                os.WriteByte((byte)b);
+            }
+            os.Dispose();
+        }
+
+        /// <summary>
+        /// Creates a file of the specified size with sequential data. The first
+        ///  byte is written as the start byte provided. All subsequent bytes are
+        ///  computed as start + offset where offset is the number of the byte.
+        /// </summary>
+        private void CreateSequenceFile(Directory dir, string name, sbyte start, int size)
+        {
+            IndexOutput os = dir.CreateOutput(name, NewIOContext(Random()));
+            for (int i = 0; i < size; i++)
+            {
+                os.WriteByte((byte)start);
+                start++;
+            }
+            os.Dispose();
+        }
+
+        private void AssertSameStreams(string msg, IndexInput expected, IndexInput test)
+        {
+            Assert.IsNotNull(expected, msg + " null expected");
+            Assert.IsNotNull(test, msg + " null test");
+            Assert.AreEqual(expected.Length, test.Length, msg + " length");
+            Assert.AreEqual(expected.FilePointer, test.FilePointer, msg + " position");
+
+            var expectedBuffer = new byte[512];
+            var testBuffer = new byte[expectedBuffer.Length];
+
+            long remainder = expected.Length - expected.FilePointer;
+            while (remainder > 0)
+            {
+                int readLen = (int)Math.Min(remainder, expectedBuffer.Length);
+                expected.ReadBytes(expectedBuffer, 0, readLen);
+                test.ReadBytes(testBuffer, 0, readLen);
+                AssertEqualArrays(msg + ", remainder " + remainder, expectedBuffer, testBuffer, 0, readLen);
+                remainder -= readLen;
+            }
+        }
+
+        private void AssertSameStreams(string msg, IndexInput expected, IndexInput actual, long seekTo)
+        {
+            if (seekTo >= 0 && seekTo < expected.Length)
+            {
+                expected.Seek(seekTo);
+                actual.Seek(seekTo);
+                AssertSameStreams(msg + ", seek(mid)", expected, actual);
+            }
+        }
+
+        private void AssertSameSeekBehavior(string msg, IndexInput expected, IndexInput actual)
+        {
+            // seek to 0
+            long point = 0;
+            AssertSameStreams(msg + ", seek(0)", expected, actual, point);
+
+            // seek to middle
+            point = expected.Length / 2L;
+            AssertSameStreams(msg + ", seek(mid)", expected, actual, point);
+
+            // seek to end - 2
+            point = expected.Length - 2;
+            AssertSameStreams(msg + ", seek(end-2)", expected, actual, point);
+
+            // seek to end - 1
+            point = expected.Length - 1;
+            AssertSameStreams(msg + ", seek(end-1)", expected, actual, point);
+
+            // seek to the end
+            point = expected.Length;
+            AssertSameStreams(msg + ", seek(end)", expected, actual, point);
+
+            // seek past end
+            point = expected.Length + 1;
+            AssertSameStreams(msg + ", seek(end+1)", expected, actual, point);
+        }
+
+        private void AssertEqualArrays(string msg, byte[] expected, byte[] test, int start, int len)
+        {
+            Assert.IsNotNull(expected, msg + " null expected");
+            Assert.IsNotNull(test, msg + " null test");
+
+            for (int i = start; i < len; i++)
+            {
+                Assert.AreEqual(expected[i], test[i], msg + " " + i);
+            }
+        }
+
+        // ===========================================================
+        //  Tests of the basic CompoundFile functionality
+        // ===========================================================
+
+        /// <summary>
+        /// this test creates compound file based on a single file.
+        ///  Files of different sizes are tested: 0, 1, 10, 100 bytes.
+        /// </summary>
+        [Test]
+        public virtual void TestSingleFile()
+        {
+            int[] data = new int[] { 0, 1, 10, 100 };
+            for (int i = 0; i < data.Length; i++)
+            {
+                string name = "t" + data[i];
+                CreateSequenceFile(Dir, name, (sbyte)0, data[i]);
+                CompoundFileDirectory csw = new CompoundFileDirectory(Dir, name + ".cfs", NewIOContext(Random()), true);
+                Dir.Copy(csw, name, name, NewIOContext(Random()));
+                csw.Dispose();
+
+                CompoundFileDirectory csr = new CompoundFileDirectory(Dir, name + ".cfs", NewIOContext(Random()), false);
+                IndexInput expected = Dir.OpenInput(name, NewIOContext(Random()));
+                IndexInput actual = csr.OpenInput(name, NewIOContext(Random()));
+                AssertSameStreams(name, expected, actual);
+                AssertSameSeekBehavior(name, expected, actual);
+                expected.Dispose();
+                actual.Dispose();
+                csr.Dispose();
+            }
+        }
+
+        /// <summary>
+        /// this test creates compound file based on two files.
+        ///
+        /// </summary>
+        [Test]
+        public virtual void TestTwoFiles()
+        {
+            CreateSequenceFile(Dir, "d1", (sbyte)0, 15);
+            CreateSequenceFile(Dir, "d2", (sbyte)0, 114);
+
+            CompoundFileDirectory csw = new CompoundFileDirectory(Dir, "d.cfs", NewIOContext(Random()), true);
+            Dir.Copy(csw, "d1", "d1", NewIOContext(Random()));
+            Dir.Copy(csw, "d2", "d2", NewIOContext(Random()));
+            csw.Dispose();
+
+            CompoundFileDirectory csr = new CompoundFileDirectory(Dir, "d.cfs", NewIOContext(Random()), false);
+            IndexInput expected = Dir.OpenInput("d1", NewIOContext(Random()));
+            IndexInput actual = csr.OpenInput("d1", NewIOContext(Random()));
+            AssertSameStreams("d1", expected, actual);
+            AssertSameSeekBehavior("d1", expected, actual);
+            expected.Dispose();
+            actual.Dispose();
+
+            expected = Dir.OpenInput("d2", NewIOContext(Random()));
+            actual = csr.OpenInput("d2", NewIOContext(Random()));
+            AssertSameStreams("d2", expected, actual);
+            AssertSameSeekBehavior("d2", expected, actual);
+            expected.Dispose();
+            actual.Dispose();
+            csr.Dispose();
+        }
+
+        /// <summary>
+        /// this test creates a compound file based on a large number of files of
+        ///  various length. The file content is generated randomly. The sizes range
+        ///  from 0 to 1Mb. Some of the sizes are selected to test the buffering
+        ///  logic in the file reading code. For this the chunk variable is set to
+        ///  the length of the buffer used internally by the compound file logic.
+        /// </summary>
+        [Test]
+        public virtual void TestRandomFiles()
+        {
+            // Setup the test segment
+            string segment = "test";
+            int chunk = 1024; // internal buffer size used by the stream
+            CreateRandomFile(Dir, segment + ".zero", 0);
+            CreateRandomFile(Dir, segment + ".one", 1);
+            CreateRandomFile(Dir, segment + ".ten", 10);
+            CreateRandomFile(Dir, segment + ".hundred", 100);
+            CreateRandomFile(Dir, segment + ".big1", chunk);
+            CreateRandomFile(Dir, segment + ".big2", chunk - 1);
+            CreateRandomFile(Dir, segment + ".big3", chunk + 1);
+            CreateRandomFile(Dir, segment + ".big4", 3 * chunk);
+            CreateRandomFile(Dir, segment + ".big5", 3 * chunk - 1);
+            CreateRandomFile(Dir, segment + ".big6", 3 * chunk + 1);
+            CreateRandomFile(Dir, segment + ".big7", 1000 * chunk);
+
+            // Setup extraneous files
+            CreateRandomFile(Dir, "onetwothree", 100);
+            CreateRandomFile(Dir, segment + ".notIn", 50);
+            CreateRandomFile(Dir, segment + ".notIn2", 51);
+
+            // Now test
+            CompoundFileDirectory csw = new CompoundFileDirectory(Dir, "test.cfs", NewIOContext(Random()), true);
+            string[] data = new string[] { ".zero", ".one", ".ten", ".hundred", ".big1", ".big2", ".big3", ".big4", ".big5", ".big6", ".big7" };
+            for (int i = 0; i < data.Length; i++)
+            {
+                string fileName = segment + data[i];
+                Dir.Copy(csw, fileName, fileName, NewIOContext(Random()));
+            }
+            csw.Dispose();
+
+            CompoundFileDirectory csr = new CompoundFileDirectory(Dir, "test.cfs", NewIOContext(Random()), false);
+            for (int i = 0; i < data.Length; i++)
+            {
+                IndexInput check = Dir.OpenInput(segment + data[i], NewIOContext(Random()));
+                IndexInput test = csr.OpenInput(segment + data[i], NewIOContext(Random()));
+                AssertSameStreams(data[i], check, test);
+                AssertSameSeekBehavior(data[i], check, test);
+                test.Dispose();
+                check.Dispose();
+            }
+            csr.Dispose();
+        }
+
+        /// <summary>
+        /// Setup a larger compound file with a number of components, each of
+        ///  which is a sequential file (so that we can easily tell that we are
+        ///  reading in the right byte). The methods sets up 20 files - f0 to f19,
+        ///  the size of each file is 1000 bytes.
+        /// </summary>
+        private void SetUp_2()
+        {
+            CompoundFileDirectory cw = new CompoundFileDirectory(Dir, "f.comp", NewIOContext(Random()), true);
+            for (int i = 0; i < 20; i++)
+            {
+                CreateSequenceFile(Dir, "f" + i, (sbyte)0, 2000);
+                string fileName = "f" + i;
+                Dir.Copy(cw, fileName, fileName, NewIOContext(Random()));
+            }
+            cw.Dispose();
+        }
+
+        [Test]
+        public virtual void TestReadAfterClose()
+        {
+            try
+            {
+                Demo_FSIndexInputBug(Dir, "test");
+            }
+#pragma warning disable 168
+            catch (ObjectDisposedException ode)
+#pragma warning restore 168
+            {
+                // expected
+            }
+        }
+
+        private void Demo_FSIndexInputBug(Directory fsdir, string file)
+        {
+            // Setup the test file - we need more than 1024 bytes
+            IndexOutput os = fsdir.CreateOutput(file, IOContext.DEFAULT);
+            for (int i = 0; i < 2000; i++)
+            {
+                os.WriteByte((byte)(sbyte)i);
+            }
+            os.Dispose();
+
+            IndexInput @in = fsdir.OpenInput(file, IOContext.DEFAULT);
+
+            // this read primes the buffer in IndexInput
+            @in.ReadByte();
+
+            // Close the file
+            @in.Dispose();
+
+            // ERROR: this call should fail, but succeeds because the buffer
+            // is still filled
+            @in.ReadByte();
+
+            // ERROR: this call should fail, but succeeds for some reason as well
+            @in.Seek(1099);
+
+            try
+            {
+                // OK: this call correctly fails. We are now past the 1024 internal
+                // buffer, so an actual IO is attempted, which fails
+                @in.ReadByte();
+                Assert.Fail("expected readByte() to throw exception");
+            }
+#pragma warning disable 168
+            catch (IOException e)
+#pragma warning restore 168
+            {
+                // expected exception
+            }
+        }
+
+        [Test]
+        public virtual void TestClonedStreamsClosing()
+        {
+            SetUp_2();
+            CompoundFileDirectory cr = new CompoundFileDirectory(Dir, "f.comp", NewIOContext(Random()), false);
+
+            // basic clone
+            IndexInput expected = Dir.OpenInput("f11", NewIOContext(Random()));
+
+            // this test only works for FSIndexInput
+            Assert.IsTrue(TestHelper.IsSimpleFSIndexInput(expected));
+            Assert.IsTrue(TestHelper.IsSimpleFSIndexInputOpen(expected));
+
+            IndexInput one = cr.OpenInput("f11", NewIOContext(Random()));
+
+            IndexInput two = (IndexInput)one.Clone();
+
+            AssertSameStreams("basic clone one", expected, one);
+            expected.Seek(0);
+            AssertSameStreams("basic clone two", expected, two);
+
+            // Now close the first stream
+            one.Dispose();
+
+            // The following should really fail since we couldn't expect to
+            // access a file once close has been called on it (regardless of
+            // buffering and/or clone magic)
+            expected.Seek(0);
+            two.Seek(0);
+            AssertSameStreams("basic clone two/2", expected, two);
+
+            // Now close the compound reader
+            cr.Dispose();
+
+            // The following may also fail since the compound stream is closed
+            expected.Seek(0);
+            two.Seek(0);
+            //assertSameStreams("basic clone two/3", expected, two);
+
+            // Now close the second clone
+            two.Dispose();
+            expected.Seek(0);
+            two.Seek(0);
+            //assertSameStreams("basic clone two/4", expected, two);
+
+            expected.Dispose();
+        }
+
+        /// <summary>
+        /// this test opens two files from a compound stream and verifies that
+        ///  their file positions are independent of each other.
+        /// </summary>
+        [Test]
+        public virtual void TestRandomAccess()
+        {
+            SetUp_2();
+            CompoundFileDirectory cr = new CompoundFileDirectory(Dir, "f.comp", NewIOContext(Random()), false);
+
+            // Open two files
+            IndexInput e1 = Dir.OpenInput("f11", NewIOContext(Random()));
+            IndexInput e2 = Dir.OpenInput("f3", NewIOContext(Random()));
+
+            IndexInput a1 = cr.OpenInput("f11", NewIOContext(Random()));
+            IndexInput a2 = Dir.OpenInput("f3", NewIOContext(Random()));
+
+            // Seek the first pair
+            e1.Seek(100);
+            a1.Seek(100);
+            Assert.AreEqual(100, e1.FilePointer);
+            Assert.AreEqual(100, a1.FilePointer);
+            byte be1 = e1.ReadByte();
+            byte ba1 = a1.ReadByte();
+            Assert.AreEqual(be1, ba1);
+
+            // Now seek the second pair
+            e2.Seek(1027);
+            a2.Seek(1027);
+            Assert.AreEqual(1027, e2.FilePointer);
+            Assert.AreEqual(1027, a2.FilePointer);
+            byte be2 = e2.ReadByte();
+            byte ba2 = a2.ReadByte();
+            Assert.AreEqual(be2, ba2);
+
+            // Now make sure the first one didn't move
+            Assert.AreEqual(101, e1.FilePointer);
+            Assert.AreEqual(101, a1.FilePointer);
+            be1 = e1.ReadByte();
+            ba1 = a1.ReadByte();
+            Assert.AreEqual(be1, ba1);
+
+            // Now more the first one again, past the buffer length
+            e1.Seek(1910);
+            a1.Seek(1910);
+            Assert.AreEqual(1910, e1.FilePointer);
+            Assert.AreEqual(1910, a1.FilePointer);
+            be1 = e1.ReadByte();
+            ba1 = a1.ReadByte();
+            Assert.AreEqual(be1, ba1);
+
+            // Now make sure the second set didn't move
+            Assert.AreEqual(1028, e2.FilePointer);
+            Assert.AreEqual(1028, a2.FilePointer);
+            be2 = e2.ReadByte();
+            ba2 = a2.ReadByte();
+            Assert.AreEqual(be2, ba2);
+
+            // Move the second set back, again cross the buffer size
+            e2.Seek(17);
+            a2.Seek(17);
+            Assert.AreEqual(17, e2.FilePointer);
+            Assert.AreEqual(17, a2.FilePointer);
+            be2 = e2.ReadByte();
+            ba2 = a2.ReadByte();
+            Assert.AreEqual(be2, ba2);
+
+            // Finally, make sure the first set didn't move
+            // Now make sure the first one didn't move
+            Assert.AreEqual(1911, e1.FilePointer);
+            Assert.AreEqual(1911, a1.FilePointer);
+            be1 = e1.ReadByte();
+            ba1 = a1.ReadByte();
+            Assert.AreEqual(be1, ba1);
+
+            e1.Dispose();
+            e2.Dispose();
+            a1.Dispose();
+            a2.Dispose();
+            cr.Dispose();
+        }
+
+        /// <summary>
+        /// this test opens two files from a compound stream and verifies that
+        ///  their file positions are independent of each other.
+        /// </summary>
+        [Test]
+        public virtual void TestRandomAccessClones()
+        {
+            SetUp_2();
+            CompoundFileDirectory cr = new CompoundFileDirectory(Dir, "f.comp", NewIOContext(Random()), false);
+
+            // Open two files
+            IndexInput e1 = cr.OpenInput("f11", NewIOContext(Random()));
+            IndexInput e2 = cr.OpenInput("f3", NewIOContext(Random()));
+
+            IndexInput a1 = (IndexInput)e1.Clone();
+            IndexInput a2 = (IndexInput)e2.Clone();
+
+            // Seek the first pair
+            e1.Seek(100);
+            a1.Seek(100);
+            Assert.AreEqual(100, e1.FilePointer);
+            Assert.AreEqual(100, a1.FilePointer);
+            byte be1 = e1.ReadByte();
+            byte ba1 = a1.ReadByte();
+            Assert.AreEqual(be1, ba1);
+
+            // Now seek the second pair
+            e2.Seek(1027);
+            a2.Seek(1027);
+            Assert.AreEqual(1027, e2.FilePointer);
+            Assert.AreEqual(1027, a2.FilePointer);
+            byte be2 = e2.ReadByte();
+            byte ba2 = a2.ReadByte();
+            Assert.AreEqual(be2, ba2);
+
+            // Now make sure the first one didn't move
+            Assert.AreEqual(101, e1.FilePointer);
+            Assert.AreEqual(101, a1.FilePointer);
+            be1 = e1.ReadByte();
+            ba1 = a1.ReadByte();
+            Assert.AreEqual(be1, ba1);
+
+            // Now more the first one again, past the buffer length
+            e1.Seek(1910);
+            a1.Seek(1910);
+            Assert.AreEqual(1910, e1.FilePointer);
+            Assert.AreEqual(1910, a1.FilePointer);
+            be1 = e1.ReadByte();
+            ba1 = a1.ReadByte();
+            Assert.AreEqual(be1, ba1);
+
+            // Now make sure the second set didn't move
+            Assert.AreEqual(1028, e2.FilePointer);
+            Assert.AreEqual(1028, a2.FilePointer);
+            be2 = e2.ReadByte();
+            ba2 = a2.ReadByte();
+            Assert.AreEqual(be2, ba2);
+
+            // Move the second set back, again cross the buffer size
+            e2.Seek(17);
+            a2.Seek(17);
+            Assert.AreEqual(17, e2.FilePointer);
+            Assert.AreEqual(17, a2.FilePointer);
+            be2 = e2.ReadByte();
+            ba2 = a2.ReadByte();
+            Assert.AreEqual(be2, ba2);
+
+            // Finally, make sure the first set didn't move
+            // Now make sure the first one didn't move
+            Assert.AreEqual(1911, e1.FilePointer);
+            Assert.AreEqual(1911, a1.FilePointer);
+            be1 = e1.ReadByte();
+            ba1 = a1.ReadByte();
+            Assert.AreEqual(be1, ba1);
+
+            e1.Dispose();
+            e2.Dispose();
+            a1.Dispose();
+            a2.Dispose();
+            cr.Dispose();
+        }
+
+        [Test]
+        public virtual void TestFileNotFound()
+        {
+            SetUp_2();
+            CompoundFileDirectory cr = new CompoundFileDirectory(Dir, "f.comp", NewIOContext(Random()), false);
+
+            // Open two files
+            try
+            {
+                cr.OpenInput("bogus", NewIOContext(Random()));
+                Assert.Fail("File not found");
+            }
+#pragma warning disable 168
+            catch (Exception e)
+#pragma warning restore 168
+            {
+                /* success */
+                //System.out.println("SUCCESS: File Not Found: " + e);
+            }
+
+            cr.Dispose();
+        }
+
+        [Test]
+        public virtual void TestReadPastEOF()
+        {
+            SetUp_2();
+            var cr = new CompoundFileDirectory(Dir, "f.comp", NewIOContext(Random()), false);
+            IndexInput @is = cr.OpenInput("f2", NewIOContext(Random()));
+            @is.Seek(@is.Length - 10);
+            var b = new byte[100];
+            @is.ReadBytes(b, 0, 10);
+
+            try
+            {
+                @is.ReadByte();
+                Assert.Fail("Single byte read past end of file");
+            }
+#pragma warning disable 168
+            catch (IOException e)
+#pragma warning restore 168
+            {
+                /* success */
+                //System.out.println("SUCCESS: single byte read past end of file: " + e);
+            }
+
+            @is.Seek(@is.Length - 10);
+            try
+            {
+                @is.ReadBytes(b, 0, 50);
+                Assert.Fail("Block read past end of file");
+            }
+#pragma warning disable 168
+            catch (IOException e)
+#pragma warning restore 168
+            {
+                /* success */
+                //System.out.println("SUCCESS: block read past end of file: " + e);
+            }
+
+            @is.Dispose();
+            cr.Dispose();
+        }
+
+        /// <summary>
+        /// this test that writes larger than the size of the buffer output
+        /// will correctly increment the file pointer.
+        /// </summary>
+        [Test]
+        public virtual void TestLargeWrites()
+        {
+            IndexOutput os = Dir.CreateOutput("testBufferStart.txt", NewIOContext(Random()));
+
+            var largeBuf = new byte[2048];
+            for (int i = 0; i < largeBuf.Length; i++)
+            {
+                largeBuf[i] = (byte)unchecked((sbyte)(new Random(1).NextDouble() * 256));
+            }
+
+            long currentPos = os.FilePointer;
+            os.WriteBytes(largeBuf, largeBuf.Length);
+
+            try
+            {
+                Assert.AreEqual(currentPos + largeBuf.Length, os.FilePointer);
+            }
+            finally
+            {
+                os.Dispose();
+            }
+        }
+
+        [Test]
+        public virtual void TestAddExternalFile()
+        {
+            CreateSequenceFile(Dir, "d1", (sbyte)0, 15);
+
+            Directory newDir = NewDirectory();
+            CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random()), true);
+            Dir.Copy(csw, "d1", "d1", NewIOContext(Random()));
+            csw.Dispose();
+
+            CompoundFileDirectory csr = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random()), false);
+            IndexInput expected = Dir.OpenInput("d1", NewIOContext(Random()));
+            IndexInput actual = csr.OpenInput("d1", NewIOContext(Random()));
+            AssertSameStreams("d1", expected, actual);
+            AssertSameSeekBehavior("d1", expected, actual);
+            expected.Dispose();
+            actual.Dispose();
+            csr.Dispose();
+
+            newDir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestAppend()
+        {
+            Directory newDir = NewDirectory();
+            CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random()), true);
+            int size = 5 + Random().Next(128);
+            for (int j = 0; j < 2; j++)
+            {
+                IndexOutput os = csw.CreateOutput("seg_" + j + "_foo.txt", NewIOContext(Random()));
+                for (int i = 0; i < size; i++)
+                {
+                    os.WriteInt32(i * j);
+                }
+                os.Dispose();
+                string[] listAll = newDir.ListAll();
+                Assert.AreEqual(1, listAll.Length);
+                Assert.AreEqual("d.cfs", listAll[0]);
+            }
+            CreateSequenceFile(Dir, "d1", (sbyte)0, 15);
+            Dir.Copy(csw, "d1", "d1", NewIOContext(Random()));
+            string[] listAll_ = newDir.ListAll();
+            Assert.AreEqual(1, listAll_.Length);
+            Assert.AreEqual("d.cfs", listAll_[0]);
+            csw.Dispose();
+            CompoundFileDirectory csr = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random()), false);
+            for (int j = 0; j < 2; j++)
+            {
+                IndexInput openInput = csr.OpenInput("seg_" + j + "_foo.txt", NewIOContext(Random()));
+                Assert.AreEqual(size * 4, openInput.Length);
+                for (int i = 0; i < size; i++)
+                {
+                    Assert.AreEqual(i * j, openInput.ReadInt32());
+                }
+
+                openInput.Dispose();
+            }
+            IndexInput expected = Dir.OpenInput("d1", NewIOContext(Random()));
+            IndexInput actual = csr.OpenInput("d1", NewIOContext(Random()));
+            AssertSameStreams("d1", expected, actual);
+            AssertSameSeekBehavior("d1", expected, actual);
+            expected.Dispose();
+            actual.Dispose();
+            csr.Dispose();
+            newDir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestAppendTwice()
+        {
+            Directory newDir = NewDirectory();
+            CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random()), true);
+            CreateSequenceFile(newDir, "d1", (sbyte)0, 15);
+            IndexOutput @out = csw.CreateOutput("d.xyz", NewIOContext(Random()));
+            @out.WriteInt32(0);
+            @out.Dispose();
+            Assert.AreEqual(1, csw.ListAll().Length);
+            Assert.AreEqual("d.xyz", csw.ListAll()[0]);
+
+            csw.Dispose();
+
+            CompoundFileDirectory cfr = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random()), false);
+            Assert.AreEqual(1, cfr.ListAll().Length);
+            Assert.AreEqual("d.xyz", cfr.ListAll()[0]);
+            cfr.Dispose();
+            newDir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestEmptyCFS()
+        {
+            Directory newDir = NewDirectory();
+            CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random()), true);
+            csw.Dispose();
+
+            CompoundFileDirectory csr = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random()), false);
+            Assert.AreEqual(0, csr.ListAll().Length);
+            csr.Dispose();
+
+            newDir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestReadNestedCFP()
+        {
+            Directory newDir = NewDirectory();
+            CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random()), true);
+            CompoundFileDirectory nested = new CompoundFileDirectory(newDir, "b.cfs", NewIOContext(Random()), true);
+            IndexOutput @out = nested.CreateOutput("b.xyz", NewIOContext(Random()));
+            IndexOutput out1 = nested.CreateOutput("b_1.xyz", NewIOContext(Random()));
+            @out.WriteInt32(0);
+            out1.WriteInt32(1);
+            @out.Dispose();
+            out1.Dispose();
+            nested.Dispose();
+            newDir.Copy(csw, "b.cfs", "b.cfs", NewIOContext(Random()));
+            newDir.Copy(csw, "b.cfe", "b.cfe", NewIOContext(Random()));
+            newDir.DeleteFile("b.cfs");
+            newDir.DeleteFile("b.cfe");
+            csw.Dispose();
+
+            Assert.AreEqual(2, newDir.ListAll().Length);
+            csw = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random()), false);
+
+            Assert.AreEqual(2, csw.ListAll().Length);
+            nested = new CompoundFileDirectory(csw, "b.cfs", NewIOContext(Random()), false);
+
+            Assert.AreEqual(2, nested.ListAll().Length);
+            IndexInput openInput = nested.OpenInput("b.xyz", NewIOContext(Random()));
+            Assert.AreEqual(0, openInput.ReadInt32());
+            openInput.Dispose();
+            openInput = nested.OpenInput("b_1.xyz", NewIOContext(Random()));
+            Assert.AreEqual(1, openInput.ReadInt32());
+            openInput.Dispose();
+            nested.Dispose();
+            csw.Dispose();
+            newDir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestDoubleClose()
+        {
+            Directory newDir = NewDirectory();
+            CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random()), true);
+            IndexOutput @out = csw.CreateOutput("d.xyz", NewIOContext(Random()));
+            @out.WriteInt32(0);
+            @out.Dispose();
+
+            csw.Dispose();
+            // close a second time - must have no effect according to IDisposable
+            csw.Dispose();
+
+            csw = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random()), false);
+            IndexInput openInput = csw.OpenInput("d.xyz", NewIOContext(Random()));
+            Assert.AreEqual(0, openInput.ReadInt32());
+            openInput.Dispose();
+            csw.Dispose();
+            // close a second time - must have no effect according to IDisposable
+            csw.Dispose();
+
+            newDir.Dispose();
+        }
+
+        // Make sure we don't somehow use more than 1 descriptor
+        // when reading a CFS with many subs:
+        [Test]
+        public virtual void TestManySubFiles()
+        {
+            Directory d = NewFSDirectory(CreateTempDir("CFSManySubFiles"));
+            int FILE_COUNT = AtLeast(500);
+
+            for (int fileIdx = 0; fileIdx < FILE_COUNT; fileIdx++)
+            {
+                IndexOutput @out = d.CreateOutput("file." + fileIdx, NewIOContext(Random()));
+                @out.WriteByte((byte)(sbyte)fileIdx);
+                @out.Dispose();
+            }
+
+            CompoundFileDirectory cfd = new CompoundFileDirectory(d, "c.cfs", NewIOContext(Random()), true);
+            for (int fileIdx = 0; fileIdx < FILE_COUNT; fileIdx++)
+            {
+                string fileName = "file." + fileIdx;
+                d.Copy(cfd, fileName, fileName, NewIOContext(Random()));
+            }
+            cfd.Dispose();
+
+            IndexInput[] ins = new IndexInput[FILE_COUNT];
+            CompoundFileDirectory cfr = new CompoundFileDirectory(d, "c.cfs", NewIOContext(Random()), false);
+            for (int fileIdx = 0; fileIdx < FILE_COUNT; fileIdx++)
+            {
+                ins[fileIdx] = cfr.OpenInput("file." + fileIdx, NewIOContext(Random()));
+            }
+
+            for (int fileIdx = 0; fileIdx < FILE_COUNT; fileIdx++)
+            {
+                Assert.AreEqual((byte)fileIdx, ins[fileIdx].ReadByte());
+            }
+
+            for (int fileIdx = 0; fileIdx < FILE_COUNT; fileIdx++)
+            {
+                ins[fileIdx].Dispose();
+            }
+            cfr.Dispose();
+            d.Dispose();
+        }
+
+        [Test]
+        public virtual void TestListAll()
+        {
+            Directory dir = NewDirectory();
+            // riw should sometimes create docvalues fields, etc
+            RandomIndexWriter riw = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            Document doc = new Document();
+            // these fields should sometimes get term vectors, etc
+            Field idField = NewStringField("id", "", Field.Store.NO);
+            Field bodyField = NewTextField("body", "", Field.Store.NO);
+            doc.Add(idField);
+            doc.Add(bodyField);
+            for (int i = 0; i < 100; i++)
+            {
+                idField.SetStringValue(Convert.ToString(i));
+                bodyField.SetStringValue(TestUtil.RandomUnicodeString(Random()));
+                riw.AddDocument(doc);
+                if (Random().Next(7) == 0)
+                {
+                    riw.Commit();
+                }
+            }
+            riw.Dispose();
+            CheckFiles(dir);
+            dir.Dispose();
+        }
+
+        // checks that we can open all files returned by listAll!
+        private void CheckFiles(Directory dir)
+        {
+            foreach (string file in dir.ListAll())
+            {
+                if (file.EndsWith(IndexFileNames.COMPOUND_FILE_EXTENSION))
+                {
+                    CompoundFileDirectory cfsDir = new CompoundFileDirectory(dir, file, NewIOContext(Random()), false);
+                    CheckFiles(cfsDir); // recurse into cfs
+                    cfsDir.Dispose();
+                }
+                IndexInput @in = null;
+                bool success = false;
+                try
+                {
+                    @in = dir.OpenInput(file, NewIOContext(Random()));
+                    success = true;
+                }
+                finally
+                {
+                    if (success)
+                    {
+                        IOUtils.Close(@in);
+                    }
+                    else
+                    {
+                        IOUtils.CloseWhileHandlingException(@in);
+                    }
+                }
+            }
+        }
+    }
+}
\ No newline at end of file


[66/72] [abbrv] lucenenet git commit: Lucene.Net.TestFramework: Renamed Util\automaton\ to Util\Automaton\

Posted by ni...@apache.org.
Lucene.Net.TestFramework: Renamed Util\automaton\ to Util\Automaton\


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/6a55c21f
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/6a55c21f
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/6a55c21f

Branch: refs/heads/api-work
Commit: 6a55c21f8ea386a68935ca98dc64a60ce03002c3
Parents: 49a0460
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 26 03:39:03 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Feb 27 06:18:00 2017 +0700

----------------------------------------------------------------------
 .../Lucene.Net.TestFramework.csproj             |   2 +-
 .../Util/Automaton/AutomatonTestUtil.cs         | 575 +++++++++++++++++++
 .../Util/automaton/AutomatonTestUtil.cs         | 575 -------------------
 3 files changed, 576 insertions(+), 576 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/6a55c21f/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj b/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
index 351f632..788338e 100644
--- a/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
+++ b/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
@@ -434,7 +434,7 @@
       <SubType>Code</SubType>
     </Compile>
     <Compile Include="Util\ApiScanTestBase.cs" />
-    <Compile Include="Util\automaton\AutomatonTestUtil.cs">
+    <Compile Include="Util\Automaton\AutomatonTestUtil.cs">
       <SubType>Code</SubType>
     </Compile>
     <Compile Include="Util\BaseDocIdSetTestCase.cs">

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/6a55c21f/src/Lucene.Net.TestFramework/Util/Automaton/AutomatonTestUtil.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Util/Automaton/AutomatonTestUtil.cs b/src/Lucene.Net.TestFramework/Util/Automaton/AutomatonTestUtil.cs
new file mode 100644
index 0000000..2c0e1f9
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Util/Automaton/AutomatonTestUtil.cs
@@ -0,0 +1,575 @@
+using Lucene.Net.Support;
+using System;
+using System.Collections.Generic;
+using NUnit.Framework;
+
+namespace Lucene.Net.Util.Automaton
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    using Lucene.Net.Randomized.Generators;
+
+    /// <summary>
+    /// Utilities for testing automata.
+    /// <p>
+    /// Capable of generating random regular expressions,
+    /// and automata, and also provides a number of very
+    /// basic unoptimized implementations (*slow) for testing.
+    /// </summary>
+    public class AutomatonTestUtil
+    {
+        /// <summary>
+        /// Returns random string, including full unicode range. </summary>
+        public static string RandomRegexp(Random r)
+        {
+            while (true)
+            {
+                string regexp = RandomRegexpString(r);
+                // we will also generate some undefined unicode queries
+                if (!UnicodeUtil.ValidUTF16String(regexp.ToCharArray()))
+                {
+                    continue;
+                }
+                try
+                {
+                    new RegExp(regexp, RegExp.NONE);
+                    return regexp;
+                }
+#pragma warning disable 168
+                catch (Exception e)
+#pragma warning restore 168
+                {
+                }
+            }
+        }
+
+        private static string RandomRegexpString(Random r)
+        {
+            int end = r.Next(20);
+            if (end == 0)
+            {
+                // allow 0 length
+                return "";
+            }
+            char[] buffer = new char[end];
+            for (int i = 0; i < end; i++)
+            {
+                int t = r.Next(15);
+                if (0 == t && i < end - 1)
+                {
+                    // Make a surrogate pair
+                    // High surrogate
+                    buffer[i++] = (char)TestUtil.NextInt(r, 0xd800, 0xdbff);
+                    // Low surrogate
+                    buffer[i] = (char)TestUtil.NextInt(r, 0xdc00, 0xdfff);
+                }
+                else if (t <= 1)
+                {
+                    buffer[i] = (char)r.Next(0x80);
+                }
+                else if (2 == t)
+                {
+                    buffer[i] = (char)TestUtil.NextInt(r, 0x80, 0x800);
+                }
+                else if (3 == t)
+                {
+                    buffer[i] = (char)TestUtil.NextInt(r, 0x800, 0xd7ff);
+                }
+                else if (4 == t)
+                {
+                    buffer[i] = (char)TestUtil.NextInt(r, 0xe000, 0xffff);
+                }
+                else if (5 == t)
+                {
+                    buffer[i] = '.';
+                }
+                else if (6 == t)
+                {
+                    buffer[i] = '?';
+                }
+                else if (7 == t)
+                {
+                    buffer[i] = '*';
+                }
+                else if (8 == t)
+                {
+                    buffer[i] = '+';
+                }
+                else if (9 == t)
+                {
+                    buffer[i] = '(';
+                }
+                else if (10 == t)
+                {
+                    buffer[i] = ')';
+                }
+                else if (11 == t)
+                {
+                    buffer[i] = '-';
+                }
+                else if (12 == t)
+                {
+                    buffer[i] = '[';
+                }
+                else if (13 == t)
+                {
+                    buffer[i] = ']';
+                }
+                else if (14 == t)
+                {
+                    buffer[i] = '|';
+                }
+            }
+            return new string(buffer, 0, end);
+        }
+
+        /// <summary>
+        /// picks a random int code point, avoiding surrogates;
+        /// throws IllegalArgumentException if this transition only
+        /// accepts surrogates
+        /// </summary>
+        private static int GetRandomCodePoint(Random r, Transition t)
+        {
+            int code;
+            if (t.Max < UnicodeUtil.UNI_SUR_HIGH_START || t.Min > UnicodeUtil.UNI_SUR_HIGH_END)
+            {
+                // easy: entire range is before or after surrogates
+                code = t.Min + r.Next(t.Max - t.Min + 1);
+            }
+            else if (t.Min >= UnicodeUtil.UNI_SUR_HIGH_START)
+            {
+                if (t.Max > UnicodeUtil.UNI_SUR_LOW_END)
+                {
+                    // after surrogates
+                    code = 1 + UnicodeUtil.UNI_SUR_LOW_END + r.Next(t.Max - UnicodeUtil.UNI_SUR_LOW_END);
+                }
+                else
+                {
+                    throw new System.ArgumentException("transition accepts only surrogates: " + t);
+                }
+            }
+            else if (t.Max <= UnicodeUtil.UNI_SUR_LOW_END)
+            {
+                if (t.Min < UnicodeUtil.UNI_SUR_HIGH_START)
+                {
+                    // before surrogates
+                    code = t.Min + r.Next(UnicodeUtil.UNI_SUR_HIGH_START - t.Min);
+                }
+                else
+                {
+                    throw new System.ArgumentException("transition accepts only surrogates: " + t);
+                }
+            }
+            else
+            {
+                // range includes all surrogates
+                int gap1 = UnicodeUtil.UNI_SUR_HIGH_START - t.Min;
+                int gap2 = t.Max - UnicodeUtil.UNI_SUR_LOW_END;
+                int c = r.Next(gap1 + gap2);
+                if (c < gap1)
+                {
+                    code = t.Min + c;
+                }
+                else
+                {
+                    code = UnicodeUtil.UNI_SUR_LOW_END + c - gap1 + 1;
+                }
+            }
+
+            Assert.True(code >= t.Min && code <= t.Max && (code < UnicodeUtil.UNI_SUR_HIGH_START || code > UnicodeUtil.UNI_SUR_LOW_END), "code=" + code + " min=" + t.Min + " max=" + t.Max);
+            return code;
+        }
+
+        /// <summary>
+        /// Lets you retrieve random strings accepted
+        /// by an Automaton.
+        /// <p>
+        /// Once created, call <seealso cref="#getRandomAcceptedString(Random)"/>
+        /// to get a new string (in UTF-32 codepoints).
+        /// </summary>
+        public class RandomAcceptedStrings
+        {
+            internal readonly IDictionary<Transition, bool?> LeadsToAccept;
+            internal readonly Automaton a;
+
+            private class ArrivingTransition
+            {
+                internal readonly State From;
+                internal readonly Transition t;
+
+                public ArrivingTransition(State from, Transition t)
+                {
+                    this.From = from;
+                    this.t = t;
+                }
+            }
+
+            public RandomAcceptedStrings(Automaton a)
+            {
+                this.a = a;
+                if (!String.IsNullOrEmpty(a.Singleton))
+                {
+                    LeadsToAccept = null;
+                    return;
+                }
+
+                // must use IdentityHashmap because two Transitions w/
+                // different start nodes can be considered the same
+                LeadsToAccept = new IdentityHashMap<Transition, bool?>();
+                IDictionary<State, IList<ArrivingTransition>> allArriving = new Dictionary<State, IList<ArrivingTransition>>();
+
+                LinkedList<State> q = new LinkedList<State>();
+                HashSet<State> seen = new HashSet<State>();
+
+                // reverse map the transitions, so we can quickly look
+                // up all arriving transitions to a given state
+                foreach (State s in a.GetNumberedStates())
+                {
+                    for (int i = 0; i < s.numTransitions; i++)
+                    {
+                        Transition t = s.TransitionsArray[i];
+                        IList<ArrivingTransition> tl;
+                        allArriving.TryGetValue(t.Dest, out tl);
+                        if (tl == null)
+                        {
+                            tl = new List<ArrivingTransition>();
+                            allArriving[t.Dest] = tl;
+                        }
+                        tl.Add(new ArrivingTransition(s, t));
+                    }
+                    if (s.Accept)
+                    {
+                        q.AddLast(s);
+                        seen.Add(s);
+                    }
+                }
+
+                // Breadth-first search, from accept states,
+                // backwards:
+                while (q.Count > 0)
+                {
+                    State s = q.First.Value;
+                    q.Remove(s);
+                    IList<ArrivingTransition> arriving;
+                    allArriving.TryGetValue(s, out arriving);
+                    if (arriving != null)
+                    {
+                        foreach (ArrivingTransition at in arriving)
+                        {
+                            State from = at.From;
+                            if (!seen.Contains(from))
+                            {
+                                q.AddLast(from);
+                                seen.Add(from);
+                                LeadsToAccept[at.t] = true;
+                            }
+                        }
+                    }
+                }
+            }
+
+            public int[] GetRandomAcceptedString(Random r)
+            {
+                IList<int?> soFar = new List<int?>();
+                if (a.IsSingleton)
+                {
+                    // accepts only one
+                    var s = a.Singleton;
+
+                    int charUpto = 0;
+                    while (charUpto < s.Length)
+                    {
+                        int cp = Character.CodePointAt(s, charUpto);
+                        charUpto += Character.CharCount(cp);
+                        soFar.Add(cp);
+                    }
+                }
+                else
+                {
+                    var s = a.GetInitialState();
+
+                    while (true)
+                    {
+                        if (s.Accept)
+                        {
+                            if (s.numTransitions == 0)
+                            {
+                                // stop now
+                                break;
+                            }
+                            else
+                            {
+                                if (r.NextBoolean())
+                                {
+                                    break;
+                                }
+                            }
+                        }
+
+                        if (s.numTransitions == 0)
+                        {
+                            throw new Exception("this automaton has dead states");
+                        }
+
+                        bool cheat = r.NextBoolean();
+
+                        Transition t;
+                        if (cheat)
+                        {
+                            // pick a transition that we know is the fastest
+                            // path to an accept state
+                            IList<Transition> toAccept = new List<Transition>();
+                            for (int i = 0; i < s.numTransitions; i++)
+                            {
+                                Transition t0 = s.TransitionsArray[i];
+                                if (LeadsToAccept.ContainsKey(t0))
+                                {
+                                    toAccept.Add(t0);
+                                }
+                            }
+                            if (toAccept.Count == 0)
+                            {
+                                // this is OK -- it means we jumped into a cycle
+                                t = s.TransitionsArray[r.Next(s.numTransitions)];
+                            }
+                            else
+                            {
+                                t = toAccept[r.Next(toAccept.Count)];
+                            }
+                        }
+                        else
+                        {
+                            t = s.TransitionsArray[r.Next(s.numTransitions)];
+                        }
+                        soFar.Add(GetRandomCodePoint(r, t));
+                        s = t.Dest;
+                    }
+                }
+
+                return ArrayUtil.ToInt32Array(soFar);
+            }
+        }
+
+        /// <summary>
+        /// return a random NFA/DFA for testing </summary>
+        public static Automaton RandomAutomaton(Random random)
+        {
+            // get two random Automata from regexps
+            Automaton a1 = (new RegExp(AutomatonTestUtil.RandomRegexp(random), RegExp.NONE)).ToAutomaton();
+            if (random.NextBoolean())
+            {
+                a1 = BasicOperations.Complement(a1);
+            }
+
+            Automaton a2 = (new RegExp(AutomatonTestUtil.RandomRegexp(random), RegExp.NONE)).ToAutomaton();
+            if (random.NextBoolean())
+            {
+                a2 = BasicOperations.Complement(a2);
+            }
+
+            // combine them in random ways
+            switch (random.Next(4))
+            {
+                case 0:
+                    return BasicOperations.Concatenate(a1, a2);
+
+                case 1:
+                    return BasicOperations.Union(a1, a2);
+
+                case 2:
+                    return BasicOperations.Intersection(a1, a2);
+
+                default:
+                    return BasicOperations.Minus(a1, a2);
+            }
+        }
+
+        /// <summary>
+        /// below are original, unoptimized implementations of DFA operations for testing.
+        /// These are from brics automaton, full license (BSD) below:
+        /// </summary>
+
+        /*
+         * dk.brics.automaton
+         *
+         * Copyright (c) 2001-2009 Anders Moeller
+         * All rights reserved.
+         *
+         * Redistribution and use in source and binary forms, with or without
+         * modification, are permitted provided that the following conditions
+         * are met:
+         * 1. Redistributions of source code must retain the above copyright
+         *    notice, this list of conditions and the following disclaimer.
+         * 2. Redistributions in binary form must reproduce the above copyright
+         *    notice, this list of conditions and the following disclaimer in the
+         *    documentation and/or other materials provided with the distribution.
+         * 3. The name of the author may not be used to endorse or promote products
+         *    derived from this software without specific prior written permission.
+         *
+         * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
+         * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+         * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+         * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
+         * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+         * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+         * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+         * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+         * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+         * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+         */
+
+        /// <summary>
+        /// Simple, original brics implementation of Brzozowski minimize()
+        /// </summary>
+        public static void MinimizeSimple(Automaton a)
+        {
+            if (!String.IsNullOrEmpty(a.Singleton))
+            {
+                return;
+            }
+            DeterminizeSimple(a, SpecialOperations.Reverse(a));
+            DeterminizeSimple(a, SpecialOperations.Reverse(a));
+        }
+
+        /// <summary>
+        /// Simple, original brics implementation of determinize()
+        /// </summary>
+        public static void DeterminizeSimple(Automaton a)
+        {
+            if (a.IsDeterministic || a.IsSingleton)
+            {
+                return;
+            }
+            HashSet<State> initialset = new ValueHashSet<State>();
+            initialset.Add(a.initial);
+            DeterminizeSimple(a, initialset);
+        }
+
+        /// <summary>
+        /// Simple, original brics implementation of determinize()
+        /// Determinizes the given automaton using the given set of initial states.
+        /// </summary>
+        public static void DeterminizeSimple(Automaton a, ISet<State> initialset)
+        {
+            int[] points = a.GetStartPoints();
+            // subset construction
+            IDictionary<ISet<State>, ISet<State>> sets = new Dictionary<ISet<State>, ISet<State>>();
+            LinkedList<ISet<State>> worklist = new LinkedList<ISet<State>>();
+            IDictionary<ISet<State>, State> newstate = new Dictionary<ISet<State>, State>();
+            sets[initialset] = initialset;
+            worklist.AddLast(initialset);
+            a.initial = new State();
+            newstate[initialset] = a.initial;
+            while (worklist.Count > 0)
+            {
+                ISet<State> s = worklist.First.Value;
+                worklist.Remove(s);
+                State r = newstate[s];
+                foreach (State q in s)
+                {
+                    if (q.Accept)
+                    {
+                        r.Accept = true;
+                        break;
+                    }
+                }
+                for (int n = 0; n < points.Length; n++)
+                {
+                    ISet<State> p = new ValueHashSet<State>();
+                    foreach (State q in s)
+                    {
+                        foreach (Transition t in q.GetTransitions())
+                        {
+                            if (t.Min <= points[n] && points[n] <= t.Max)
+                            {
+                                p.Add(t.to);
+                            }
+                        }
+                    }
+                    if (!sets.ContainsKey(p))
+                    {
+                        sets[p] = p;
+                        worklist.AddLast(p);
+                        newstate[p] = new State();
+                    }
+                    State q_ = newstate[p];
+                    int min = points[n];
+                    int max;
+                    if (n + 1 < points.Length)
+                    {
+                        max = points[n + 1] - 1;
+                    }
+                    else
+                    {
+                        max = Character.MAX_CODE_POINT;
+                    }
+                    r.AddTransition(new Transition(min, max, q_));
+                }
+            }
+            a.IsDeterministic = true;
+            a.ClearNumberedStates();
+            a.RemoveDeadTransitions();
+        }
+
+        /// <summary>
+        /// Returns true if the language of this automaton is finite.
+        /// <p>
+        /// WARNING: this method is slow, it will blow up if the automaton is large.
+        /// this is only used to test the correctness of our faster implementation.
+        /// </summary>
+        public static bool IsFiniteSlow(Automaton a)
+        {
+            if (!String.IsNullOrEmpty(a.Singleton))
+            {
+                return true;
+            }
+            return IsFiniteSlow(a.GetInitialState(), new HashSet<State>());
+        }
+
+        /// <summary>
+        /// Checks whether there is a loop containing s. (this is sufficient since
+        /// there are never transitions to dead states.)
+        /// </summary>
+        // TODO: not great that this is recursive... in theory a
+        // large automata could exceed java's stack
+        private static bool IsFiniteSlow(State s, HashSet<State> path)
+        {
+            path.Add(s);
+            foreach (Transition t in s.GetTransitions())
+            {
+                if (path.Contains(t.Dest) || !IsFiniteSlow(t.Dest, path))
+                {
+                    return false;
+                }
+            }
+            path.Remove(s);
+            return true;
+        }
+
+        /// <summary>
+        /// Checks that an automaton has no detached states that are unreachable
+        /// from the initial state.
+        /// </summary>
+        public static void AssertNoDetachedStates(Automaton a)
+        {
+            int numStates = a.GetNumberOfStates();
+            a.ClearNumberedStates(); // force recomputation of cached numbered states
+            Assert.True(numStates == a.GetNumberOfStates(), "automaton has " + (numStates - a.GetNumberOfStates()) + " detached states");
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/6a55c21f/src/Lucene.Net.TestFramework/Util/automaton/AutomatonTestUtil.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Util/automaton/AutomatonTestUtil.cs b/src/Lucene.Net.TestFramework/Util/automaton/AutomatonTestUtil.cs
deleted file mode 100644
index 2c0e1f9..0000000
--- a/src/Lucene.Net.TestFramework/Util/automaton/AutomatonTestUtil.cs
+++ /dev/null
@@ -1,575 +0,0 @@
-using Lucene.Net.Support;
-using System;
-using System.Collections.Generic;
-using NUnit.Framework;
-
-namespace Lucene.Net.Util.Automaton
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    using Lucene.Net.Randomized.Generators;
-
-    /// <summary>
-    /// Utilities for testing automata.
-    /// <p>
-    /// Capable of generating random regular expressions,
-    /// and automata, and also provides a number of very
-    /// basic unoptimized implementations (*slow) for testing.
-    /// </summary>
-    public class AutomatonTestUtil
-    {
-        /// <summary>
-        /// Returns random string, including full unicode range. </summary>
-        public static string RandomRegexp(Random r)
-        {
-            while (true)
-            {
-                string regexp = RandomRegexpString(r);
-                // we will also generate some undefined unicode queries
-                if (!UnicodeUtil.ValidUTF16String(regexp.ToCharArray()))
-                {
-                    continue;
-                }
-                try
-                {
-                    new RegExp(regexp, RegExp.NONE);
-                    return regexp;
-                }
-#pragma warning disable 168
-                catch (Exception e)
-#pragma warning restore 168
-                {
-                }
-            }
-        }
-
-        private static string RandomRegexpString(Random r)
-        {
-            int end = r.Next(20);
-            if (end == 0)
-            {
-                // allow 0 length
-                return "";
-            }
-            char[] buffer = new char[end];
-            for (int i = 0; i < end; i++)
-            {
-                int t = r.Next(15);
-                if (0 == t && i < end - 1)
-                {
-                    // Make a surrogate pair
-                    // High surrogate
-                    buffer[i++] = (char)TestUtil.NextInt(r, 0xd800, 0xdbff);
-                    // Low surrogate
-                    buffer[i] = (char)TestUtil.NextInt(r, 0xdc00, 0xdfff);
-                }
-                else if (t <= 1)
-                {
-                    buffer[i] = (char)r.Next(0x80);
-                }
-                else if (2 == t)
-                {
-                    buffer[i] = (char)TestUtil.NextInt(r, 0x80, 0x800);
-                }
-                else if (3 == t)
-                {
-                    buffer[i] = (char)TestUtil.NextInt(r, 0x800, 0xd7ff);
-                }
-                else if (4 == t)
-                {
-                    buffer[i] = (char)TestUtil.NextInt(r, 0xe000, 0xffff);
-                }
-                else if (5 == t)
-                {
-                    buffer[i] = '.';
-                }
-                else if (6 == t)
-                {
-                    buffer[i] = '?';
-                }
-                else if (7 == t)
-                {
-                    buffer[i] = '*';
-                }
-                else if (8 == t)
-                {
-                    buffer[i] = '+';
-                }
-                else if (9 == t)
-                {
-                    buffer[i] = '(';
-                }
-                else if (10 == t)
-                {
-                    buffer[i] = ')';
-                }
-                else if (11 == t)
-                {
-                    buffer[i] = '-';
-                }
-                else if (12 == t)
-                {
-                    buffer[i] = '[';
-                }
-                else if (13 == t)
-                {
-                    buffer[i] = ']';
-                }
-                else if (14 == t)
-                {
-                    buffer[i] = '|';
-                }
-            }
-            return new string(buffer, 0, end);
-        }
-
-        /// <summary>
-        /// picks a random int code point, avoiding surrogates;
-        /// throws IllegalArgumentException if this transition only
-        /// accepts surrogates
-        /// </summary>
-        private static int GetRandomCodePoint(Random r, Transition t)
-        {
-            int code;
-            if (t.Max < UnicodeUtil.UNI_SUR_HIGH_START || t.Min > UnicodeUtil.UNI_SUR_HIGH_END)
-            {
-                // easy: entire range is before or after surrogates
-                code = t.Min + r.Next(t.Max - t.Min + 1);
-            }
-            else if (t.Min >= UnicodeUtil.UNI_SUR_HIGH_START)
-            {
-                if (t.Max > UnicodeUtil.UNI_SUR_LOW_END)
-                {
-                    // after surrogates
-                    code = 1 + UnicodeUtil.UNI_SUR_LOW_END + r.Next(t.Max - UnicodeUtil.UNI_SUR_LOW_END);
-                }
-                else
-                {
-                    throw new System.ArgumentException("transition accepts only surrogates: " + t);
-                }
-            }
-            else if (t.Max <= UnicodeUtil.UNI_SUR_LOW_END)
-            {
-                if (t.Min < UnicodeUtil.UNI_SUR_HIGH_START)
-                {
-                    // before surrogates
-                    code = t.Min + r.Next(UnicodeUtil.UNI_SUR_HIGH_START - t.Min);
-                }
-                else
-                {
-                    throw new System.ArgumentException("transition accepts only surrogates: " + t);
-                }
-            }
-            else
-            {
-                // range includes all surrogates
-                int gap1 = UnicodeUtil.UNI_SUR_HIGH_START - t.Min;
-                int gap2 = t.Max - UnicodeUtil.UNI_SUR_LOW_END;
-                int c = r.Next(gap1 + gap2);
-                if (c < gap1)
-                {
-                    code = t.Min + c;
-                }
-                else
-                {
-                    code = UnicodeUtil.UNI_SUR_LOW_END + c - gap1 + 1;
-                }
-            }
-
-            Assert.True(code >= t.Min && code <= t.Max && (code < UnicodeUtil.UNI_SUR_HIGH_START || code > UnicodeUtil.UNI_SUR_LOW_END), "code=" + code + " min=" + t.Min + " max=" + t.Max);
-            return code;
-        }
-
-        /// <summary>
-        /// Lets you retrieve random strings accepted
-        /// by an Automaton.
-        /// <p>
-        /// Once created, call <seealso cref="#getRandomAcceptedString(Random)"/>
-        /// to get a new string (in UTF-32 codepoints).
-        /// </summary>
-        public class RandomAcceptedStrings
-        {
-            internal readonly IDictionary<Transition, bool?> LeadsToAccept;
-            internal readonly Automaton a;
-
-            private class ArrivingTransition
-            {
-                internal readonly State From;
-                internal readonly Transition t;
-
-                public ArrivingTransition(State from, Transition t)
-                {
-                    this.From = from;
-                    this.t = t;
-                }
-            }
-
-            public RandomAcceptedStrings(Automaton a)
-            {
-                this.a = a;
-                if (!String.IsNullOrEmpty(a.Singleton))
-                {
-                    LeadsToAccept = null;
-                    return;
-                }
-
-                // must use IdentityHashmap because two Transitions w/
-                // different start nodes can be considered the same
-                LeadsToAccept = new IdentityHashMap<Transition, bool?>();
-                IDictionary<State, IList<ArrivingTransition>> allArriving = new Dictionary<State, IList<ArrivingTransition>>();
-
-                LinkedList<State> q = new LinkedList<State>();
-                HashSet<State> seen = new HashSet<State>();
-
-                // reverse map the transitions, so we can quickly look
-                // up all arriving transitions to a given state
-                foreach (State s in a.GetNumberedStates())
-                {
-                    for (int i = 0; i < s.numTransitions; i++)
-                    {
-                        Transition t = s.TransitionsArray[i];
-                        IList<ArrivingTransition> tl;
-                        allArriving.TryGetValue(t.Dest, out tl);
-                        if (tl == null)
-                        {
-                            tl = new List<ArrivingTransition>();
-                            allArriving[t.Dest] = tl;
-                        }
-                        tl.Add(new ArrivingTransition(s, t));
-                    }
-                    if (s.Accept)
-                    {
-                        q.AddLast(s);
-                        seen.Add(s);
-                    }
-                }
-
-                // Breadth-first search, from accept states,
-                // backwards:
-                while (q.Count > 0)
-                {
-                    State s = q.First.Value;
-                    q.Remove(s);
-                    IList<ArrivingTransition> arriving;
-                    allArriving.TryGetValue(s, out arriving);
-                    if (arriving != null)
-                    {
-                        foreach (ArrivingTransition at in arriving)
-                        {
-                            State from = at.From;
-                            if (!seen.Contains(from))
-                            {
-                                q.AddLast(from);
-                                seen.Add(from);
-                                LeadsToAccept[at.t] = true;
-                            }
-                        }
-                    }
-                }
-            }
-
-            public int[] GetRandomAcceptedString(Random r)
-            {
-                IList<int?> soFar = new List<int?>();
-                if (a.IsSingleton)
-                {
-                    // accepts only one
-                    var s = a.Singleton;
-
-                    int charUpto = 0;
-                    while (charUpto < s.Length)
-                    {
-                        int cp = Character.CodePointAt(s, charUpto);
-                        charUpto += Character.CharCount(cp);
-                        soFar.Add(cp);
-                    }
-                }
-                else
-                {
-                    var s = a.GetInitialState();
-
-                    while (true)
-                    {
-                        if (s.Accept)
-                        {
-                            if (s.numTransitions == 0)
-                            {
-                                // stop now
-                                break;
-                            }
-                            else
-                            {
-                                if (r.NextBoolean())
-                                {
-                                    break;
-                                }
-                            }
-                        }
-
-                        if (s.numTransitions == 0)
-                        {
-                            throw new Exception("this automaton has dead states");
-                        }
-
-                        bool cheat = r.NextBoolean();
-
-                        Transition t;
-                        if (cheat)
-                        {
-                            // pick a transition that we know is the fastest
-                            // path to an accept state
-                            IList<Transition> toAccept = new List<Transition>();
-                            for (int i = 0; i < s.numTransitions; i++)
-                            {
-                                Transition t0 = s.TransitionsArray[i];
-                                if (LeadsToAccept.ContainsKey(t0))
-                                {
-                                    toAccept.Add(t0);
-                                }
-                            }
-                            if (toAccept.Count == 0)
-                            {
-                                // this is OK -- it means we jumped into a cycle
-                                t = s.TransitionsArray[r.Next(s.numTransitions)];
-                            }
-                            else
-                            {
-                                t = toAccept[r.Next(toAccept.Count)];
-                            }
-                        }
-                        else
-                        {
-                            t = s.TransitionsArray[r.Next(s.numTransitions)];
-                        }
-                        soFar.Add(GetRandomCodePoint(r, t));
-                        s = t.Dest;
-                    }
-                }
-
-                return ArrayUtil.ToInt32Array(soFar);
-            }
-        }
-
-        /// <summary>
-        /// return a random NFA/DFA for testing </summary>
-        public static Automaton RandomAutomaton(Random random)
-        {
-            // get two random Automata from regexps
-            Automaton a1 = (new RegExp(AutomatonTestUtil.RandomRegexp(random), RegExp.NONE)).ToAutomaton();
-            if (random.NextBoolean())
-            {
-                a1 = BasicOperations.Complement(a1);
-            }
-
-            Automaton a2 = (new RegExp(AutomatonTestUtil.RandomRegexp(random), RegExp.NONE)).ToAutomaton();
-            if (random.NextBoolean())
-            {
-                a2 = BasicOperations.Complement(a2);
-            }
-
-            // combine them in random ways
-            switch (random.Next(4))
-            {
-                case 0:
-                    return BasicOperations.Concatenate(a1, a2);
-
-                case 1:
-                    return BasicOperations.Union(a1, a2);
-
-                case 2:
-                    return BasicOperations.Intersection(a1, a2);
-
-                default:
-                    return BasicOperations.Minus(a1, a2);
-            }
-        }
-
-        /// <summary>
-        /// below are original, unoptimized implementations of DFA operations for testing.
-        /// These are from brics automaton, full license (BSD) below:
-        /// </summary>
-
-        /*
-         * dk.brics.automaton
-         *
-         * Copyright (c) 2001-2009 Anders Moeller
-         * All rights reserved.
-         *
-         * Redistribution and use in source and binary forms, with or without
-         * modification, are permitted provided that the following conditions
-         * are met:
-         * 1. Redistributions of source code must retain the above copyright
-         *    notice, this list of conditions and the following disclaimer.
-         * 2. Redistributions in binary form must reproduce the above copyright
-         *    notice, this list of conditions and the following disclaimer in the
-         *    documentation and/or other materials provided with the distribution.
-         * 3. The name of the author may not be used to endorse or promote products
-         *    derived from this software without specific prior written permission.
-         *
-         * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
-         * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
-         * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
-         * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
-         * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
-         * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-         * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-         * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-         * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
-         * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-         */
-
-        /// <summary>
-        /// Simple, original brics implementation of Brzozowski minimize()
-        /// </summary>
-        public static void MinimizeSimple(Automaton a)
-        {
-            if (!String.IsNullOrEmpty(a.Singleton))
-            {
-                return;
-            }
-            DeterminizeSimple(a, SpecialOperations.Reverse(a));
-            DeterminizeSimple(a, SpecialOperations.Reverse(a));
-        }
-
-        /// <summary>
-        /// Simple, original brics implementation of determinize()
-        /// </summary>
-        public static void DeterminizeSimple(Automaton a)
-        {
-            if (a.IsDeterministic || a.IsSingleton)
-            {
-                return;
-            }
-            HashSet<State> initialset = new ValueHashSet<State>();
-            initialset.Add(a.initial);
-            DeterminizeSimple(a, initialset);
-        }
-
-        /// <summary>
-        /// Simple, original brics implementation of determinize()
-        /// Determinizes the given automaton using the given set of initial states.
-        /// </summary>
-        public static void DeterminizeSimple(Automaton a, ISet<State> initialset)
-        {
-            int[] points = a.GetStartPoints();
-            // subset construction
-            IDictionary<ISet<State>, ISet<State>> sets = new Dictionary<ISet<State>, ISet<State>>();
-            LinkedList<ISet<State>> worklist = new LinkedList<ISet<State>>();
-            IDictionary<ISet<State>, State> newstate = new Dictionary<ISet<State>, State>();
-            sets[initialset] = initialset;
-            worklist.AddLast(initialset);
-            a.initial = new State();
-            newstate[initialset] = a.initial;
-            while (worklist.Count > 0)
-            {
-                ISet<State> s = worklist.First.Value;
-                worklist.Remove(s);
-                State r = newstate[s];
-                foreach (State q in s)
-                {
-                    if (q.Accept)
-                    {
-                        r.Accept = true;
-                        break;
-                    }
-                }
-                for (int n = 0; n < points.Length; n++)
-                {
-                    ISet<State> p = new ValueHashSet<State>();
-                    foreach (State q in s)
-                    {
-                        foreach (Transition t in q.GetTransitions())
-                        {
-                            if (t.Min <= points[n] && points[n] <= t.Max)
-                            {
-                                p.Add(t.to);
-                            }
-                        }
-                    }
-                    if (!sets.ContainsKey(p))
-                    {
-                        sets[p] = p;
-                        worklist.AddLast(p);
-                        newstate[p] = new State();
-                    }
-                    State q_ = newstate[p];
-                    int min = points[n];
-                    int max;
-                    if (n + 1 < points.Length)
-                    {
-                        max = points[n + 1] - 1;
-                    }
-                    else
-                    {
-                        max = Character.MAX_CODE_POINT;
-                    }
-                    r.AddTransition(new Transition(min, max, q_));
-                }
-            }
-            a.IsDeterministic = true;
-            a.ClearNumberedStates();
-            a.RemoveDeadTransitions();
-        }
-
-        /// <summary>
-        /// Returns true if the language of this automaton is finite.
-        /// <p>
-        /// WARNING: this method is slow, it will blow up if the automaton is large.
-        /// this is only used to test the correctness of our faster implementation.
-        /// </summary>
-        public static bool IsFiniteSlow(Automaton a)
-        {
-            if (!String.IsNullOrEmpty(a.Singleton))
-            {
-                return true;
-            }
-            return IsFiniteSlow(a.GetInitialState(), new HashSet<State>());
-        }
-
-        /// <summary>
-        /// Checks whether there is a loop containing s. (this is sufficient since
-        /// there are never transitions to dead states.)
-        /// </summary>
-        // TODO: not great that this is recursive... in theory a
-        // large automata could exceed java's stack
-        private static bool IsFiniteSlow(State s, HashSet<State> path)
-        {
-            path.Add(s);
-            foreach (Transition t in s.GetTransitions())
-            {
-                if (path.Contains(t.Dest) || !IsFiniteSlow(t.Dest, path))
-                {
-                    return false;
-                }
-            }
-            path.Remove(s);
-            return true;
-        }
-
-        /// <summary>
-        /// Checks that an automaton has no detached states that are unreachable
-        /// from the initial state.
-        /// </summary>
-        public static void AssertNoDetachedStates(Automaton a)
-        {
-            int numStates = a.GetNumberOfStates();
-            a.ClearNumberedStates(); // force recomputation of cached numbered states
-            Assert.True(numStates == a.GetNumberOfStates(), "automaton has " + (numStates - a.GetNumberOfStates()) + " detached states");
-        }
-    }
-}
\ No newline at end of file


[36/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestDoc.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestDoc.cs b/src/Lucene.Net.Tests/Index/TestDoc.cs
new file mode 100644
index 0000000..bd65361
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestDoc.cs
@@ -0,0 +1,277 @@
+using Lucene.Net.Documents;
+using Lucene.Net.Support;
+using System;
+using System.Collections.Generic;
+
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+    using System.IO;
+    using Codec = Lucene.Net.Codecs.Codec;
+    using Constants = Lucene.Net.Util.Constants;
+    using Directory = Lucene.Net.Store.Directory;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using Document = Documents.Document;
+    using InfoStream = Lucene.Net.Util.InfoStream;
+    using IOContext = Lucene.Net.Store.IOContext;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+    using TextField = TextField;
+    using TrackingDirectoryWrapper = Lucene.Net.Store.TrackingDirectoryWrapper;
+
+    /// <summary>
+    /// JUnit adaptation of an older test case DocTest. </summary>
+    [TestFixture]
+    public class TestDoc : LuceneTestCase
+    {
+        private DirectoryInfo WorkDir;
+        private DirectoryInfo IndexDir;
+        private LinkedList<FileInfo> Files;
+
+        /// <summary>
+        /// Set the test case. this test case needs
+        ///  a few text files created in the current working directory.
+        /// </summary>
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: setUp");
+            }
+            WorkDir = CreateTempDir("TestDoc");
+
+            IndexDir = CreateTempDir("testIndex");
+
+            Directory directory = NewFSDirectory(IndexDir);
+            directory.Dispose();
+
+            Files = new LinkedList<FileInfo>();
+            Files.AddLast(CreateOutput("test.txt", "this is the first test file"));
+
+            Files.AddLast(CreateOutput("test2.txt", "this is the second test file"));
+        }
+
+        private FileInfo CreateOutput(string name, string text)
+        {
+            //TextWriter fw = null;
+            StreamWriter pw = null;
+
+            try
+            {
+                FileInfo f = new FileInfo(Path.Combine(WorkDir.FullName, name));
+                if (f.Exists)
+                {
+                    f.Delete();
+                }
+
+                //fw = new StreamWriter(new FileOutputStream(f), IOUtils.CHARSET_UTF_8);
+                pw = new StreamWriter(File.Open(f.FullName, FileMode.OpenOrCreate));
+                pw.WriteLine(text);
+                return f;
+            }
+            finally
+            {
+                if (pw != null)
+                {
+                    pw.Dispose();
+                }
+                /*if (fw != null)
+                {
+                    fw.Dispose();
+                }*/
+            }
+        }
+
+        /// <summary>
+        /// this test executes a number of merges and compares the contents of
+        ///  the segments created when using compound file or not using one.
+        ///
+        ///  TODO: the original test used to print the segment contents to System.out
+        ///        for visual validation. To have the same effect, a new method
+        ///        checkSegment(String name, ...) should be created that would
+        ///        assert various things about the segment.
+        /// </summary>
+        [Test]
+        public virtual void TestIndexAndMerge()
+        {
+            MemoryStream sw = new MemoryStream();
+            StreamWriter @out = new StreamWriter(sw);
+
+            Directory directory = NewFSDirectory(IndexDir, null);
+
+            MockDirectoryWrapper wrapper = directory as MockDirectoryWrapper;
+            if (wrapper != null)
+            {
+                // We create unreferenced files (we don't even write
+                // a segments file):
+                wrapper.AssertNoUnrefencedFilesOnClose = false;
+            }
+
+            IndexWriter writer = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(-1).SetMergePolicy(NewLogMergePolicy(10)));
+
+            SegmentCommitInfo si1 = IndexDoc(writer, "test.txt");
+            PrintSegment(@out, si1);
+
+            SegmentCommitInfo si2 = IndexDoc(writer, "test2.txt");
+            PrintSegment(@out, si2);
+            writer.Dispose();
+
+            SegmentCommitInfo siMerge = Merge(directory, si1, si2, "_merge", false);
+            PrintSegment(@out, siMerge);
+
+            SegmentCommitInfo siMerge2 = Merge(directory, si1, si2, "_merge2", false);
+            PrintSegment(@out, siMerge2);
+
+            SegmentCommitInfo siMerge3 = Merge(directory, siMerge, siMerge2, "_merge3", false);
+            PrintSegment(@out, siMerge3);
+
+            directory.Dispose();
+            @out.Dispose();
+            sw.Dispose();
+
+            string multiFileOutput = sw.ToString();
+            //System.out.println(multiFileOutput);
+
+            sw = new MemoryStream();
+            @out = new StreamWriter(sw);
+
+            directory = NewFSDirectory(IndexDir, null);
+
+            wrapper = directory as MockDirectoryWrapper;
+            if (wrapper != null)
+            {
+                // We create unreferenced files (we don't even write
+                // a segments file):
+                wrapper.AssertNoUnrefencedFilesOnClose = false;
+            }
+
+            writer = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(-1).SetMergePolicy(NewLogMergePolicy(10)));
+
+            si1 = IndexDoc(writer, "test.txt");
+            PrintSegment(@out, si1);
+
+            si2 = IndexDoc(writer, "test2.txt");
+            PrintSegment(@out, si2);
+            writer.Dispose();
+
+            siMerge = Merge(directory, si1, si2, "_merge", true);
+            PrintSegment(@out, siMerge);
+
+            siMerge2 = Merge(directory, si1, si2, "_merge2", true);
+            PrintSegment(@out, siMerge2);
+
+            siMerge3 = Merge(directory, siMerge, siMerge2, "_merge3", true);
+            PrintSegment(@out, siMerge3);
+
+            directory.Dispose();
+            @out.Dispose();
+            sw.Dispose();
+            string singleFileOutput = sw.ToString();
+
+            Assert.AreEqual(multiFileOutput, singleFileOutput);
+        }
+
+        private SegmentCommitInfo IndexDoc(IndexWriter writer, string fileName)
+        {
+            FileInfo file = new FileInfo(Path.Combine(WorkDir.FullName, fileName));
+            Document doc = new Document();
+            StreamReader @is = new StreamReader(File.Open(file.FullName, FileMode.Open));
+            doc.Add(new TextField("contents", @is));
+            writer.AddDocument(doc);
+            writer.Commit();
+            @is.Dispose();
+            return writer.NewestSegment();
+        }
+
+        private SegmentCommitInfo Merge(Directory dir, SegmentCommitInfo si1, SegmentCommitInfo si2, string merged, bool useCompoundFile)
+        {
+            IOContext context = NewIOContext(Random());
+            SegmentReader r1 = new SegmentReader(si1, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, context);
+            SegmentReader r2 = new SegmentReader(si2, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, context);
+
+            Codec codec = Codec.Default;
+            TrackingDirectoryWrapper trackingDir = new TrackingDirectoryWrapper(si1.Info.Dir);
+            SegmentInfo si = new SegmentInfo(si1.Info.Dir, Constants.LUCENE_MAIN_VERSION, merged, -1, false, codec, null);
+
+            SegmentMerger merger = new SegmentMerger(Arrays.AsList<AtomicReader>(r1, r2), si, InfoStream.Default, trackingDir, IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL, CheckAbort.NONE, new FieldInfos.FieldNumbers(), context, true);
+
+            MergeState mergeState = merger.Merge();
+            r1.Dispose();
+            r2.Dispose();
+            SegmentInfo info = new SegmentInfo(si1.Info.Dir, Constants.LUCENE_MAIN_VERSION, merged, si1.Info.DocCount + si2.Info.DocCount, false, codec, null);
+            info.SetFiles(new HashSet<string>(trackingDir.CreatedFiles));
+
+            if (useCompoundFile)
+            {
+                ICollection<string> filesToDelete = IndexWriter.CreateCompoundFile(InfoStream.Default, dir, CheckAbort.NONE, info, NewIOContext(Random()));
+                info.UseCompoundFile = true;
+                foreach (String fileToDelete in filesToDelete)
+                {
+                    si1.Info.Dir.DeleteFile(fileToDelete);
+                }
+            }
+
+            return new SegmentCommitInfo(info, 0, -1L, -1L);
+        }
+
+        private void PrintSegment(StreamWriter @out, SegmentCommitInfo si)
+        {
+            SegmentReader reader = new SegmentReader(si, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, NewIOContext(Random()));
+
+            for (int i = 0; i < reader.NumDocs; i++)
+            {
+                @out.WriteLine(reader.Document(i));
+            }
+
+            Fields fields = reader.Fields;
+            foreach (string field in fields)
+            {
+                Terms terms = fields.GetTerms(field);
+                Assert.IsNotNull(terms);
+                TermsEnum tis = terms.GetIterator(null);
+                while (tis.Next() != null)
+                {
+                    @out.Write("  term=" + field + ":" + tis.Term);
+                    @out.WriteLine("    DF=" + tis.DocFreq);
+
+                    DocsAndPositionsEnum positions = tis.DocsAndPositions(reader.LiveDocs, null);
+
+                    while (positions.NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
+                    {
+                        @out.Write(" doc=" + positions.DocID);
+                        @out.Write(" TF=" + positions.Freq);
+                        @out.Write(" pos=");
+                        @out.Write(positions.NextPosition());
+                        for (int j = 1; j < positions.Freq; j++)
+                        {
+                            @out.Write("," + positions.NextPosition());
+                        }
+                        @out.WriteLine("");
+                    }
+                }
+            }
+            reader.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestDocCount.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestDocCount.cs b/src/Lucene.Net.Tests/Index/TestDocCount.cs
new file mode 100644
index 0000000..3ac9a3c
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestDocCount.cs
@@ -0,0 +1,101 @@
+using System.Collections.Generic;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+    using Directory = Lucene.Net.Store.Directory;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Document = Documents.Document;
+    using Field = Field;
+    using FixedBitSet = Lucene.Net.Util.FixedBitSet;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    /// <summary>
+    /// Tests the Terms.DocCount statistic
+    /// </summary>
+    [SuppressCodecs("Lucene3x")]
+    [TestFixture]
+    public class TestDocCount : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestSimple()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            int numDocs = AtLeast(100);
+            for (int i = 0; i < numDocs; i++)
+            {
+                iw.AddDocument(Doc());
+            }
+            IndexReader ir = iw.Reader;
+            VerifyCount(ir);
+            ir.Dispose();
+            iw.ForceMerge(1);
+            ir = iw.Reader;
+            VerifyCount(ir);
+            ir.Dispose();
+            iw.Dispose();
+            dir.Dispose();
+        }
+
+        private IEnumerable<IIndexableField> Doc()
+        {
+            Document doc = new Document();
+            int numFields = TestUtil.NextInt(Random(), 1, 10);
+            for (int i = 0; i < numFields; i++)
+            {
+                doc.Add(NewStringField("" + TestUtil.NextInt(Random(), 'a', 'z'), "" + TestUtil.NextInt(Random(), 'a', 'z'), Field.Store.NO));
+            }
+            return doc;
+        }
+
+        private void VerifyCount(IndexReader ir)
+        {
+            Fields fields = MultiFields.GetFields(ir);
+            if (fields == null)
+            {
+                return;
+            }
+            foreach (string field in fields)
+            {
+                Terms terms = fields.GetTerms(field);
+                if (terms == null)
+                {
+                    continue;
+                }
+                int docCount = terms.DocCount;
+                FixedBitSet visited = new FixedBitSet(ir.MaxDoc);
+                TermsEnum te = terms.GetIterator(null);
+                while (te.Next() != null)
+                {
+                    DocsEnum de = TestUtil.Docs(Random(), te, null, null, DocsEnum.FLAG_NONE);
+                    while (de.NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
+                    {
+                        visited.Set(de.DocID);
+                    }
+                }
+                Assert.AreEqual(visited.Cardinality(), docCount);
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestDocInverterPerFieldErrorInfo.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestDocInverterPerFieldErrorInfo.cs b/src/Lucene.Net.Tests/Index/TestDocInverterPerFieldErrorInfo.cs
new file mode 100644
index 0000000..0e498c8
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestDocInverterPerFieldErrorInfo.cs
@@ -0,0 +1,148 @@
+using Lucene.Net.Documents;
+using Lucene.Net.Support;
+using System;
+using System.Text;
+
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+    using System.IO;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Analyzer = Lucene.Net.Analysis.Analyzer;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using FieldType = FieldType;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockTokenizer = Lucene.Net.Analysis.MockTokenizer;
+    using PrintStreamInfoStream = Lucene.Net.Util.PrintStreamInfoStream;
+    using TextField = TextField;
+    using TokenFilter = Lucene.Net.Analysis.TokenFilter;
+    using Tokenizer = Lucene.Net.Analysis.Tokenizer;
+
+    /// <summary>
+    /// Test adding to the info stream when there's an exception thrown during field analysis.
+    /// </summary>
+    [TestFixture]
+    public class TestDocInverterPerFieldErrorInfo : LuceneTestCase
+    {
+        private static readonly FieldType StoredTextType = new FieldType(TextField.TYPE_NOT_STORED);
+
+        private class BadNews : Exception
+        {
+            internal BadNews(string message)
+                : base(message)
+            {
+            }
+        }
+
+        private class ThrowingAnalyzer : Analyzer
+        {
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader input)
+            {
+                Tokenizer tokenizer = new MockTokenizer(input);
+                if (fieldName.Equals("distinctiveFieldName"))
+                {
+                    TokenFilter tosser = new TokenFilterAnonymousInnerClassHelper(this, tokenizer);
+                    return new TokenStreamComponents(tokenizer, tosser);
+                }
+                else
+                {
+                    return new TokenStreamComponents(tokenizer);
+                }
+            }
+
+            private class TokenFilterAnonymousInnerClassHelper : TokenFilter
+            {
+                private readonly ThrowingAnalyzer OuterInstance;
+
+                public TokenFilterAnonymousInnerClassHelper(ThrowingAnalyzer outerInstance, Tokenizer tokenizer)
+                    : base(tokenizer)
+                {
+                    this.OuterInstance = outerInstance;
+                }
+
+                public sealed override bool IncrementToken()
+                {
+                    throw new BadNews("Something is icky.");
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestInfoStreamGetsFieldName()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer;
+            IndexWriterConfig c = new IndexWriterConfig(TEST_VERSION_CURRENT, new ThrowingAnalyzer());
+            ByteArrayOutputStream infoBytes = new ByteArrayOutputStream();
+            StreamWriter infoPrintStream = new StreamWriter(infoBytes, Encoding.UTF8);
+            PrintStreamInfoStream printStreamInfoStream = new PrintStreamInfoStream(infoPrintStream);
+            c.InfoStream = printStreamInfoStream;
+            writer = new IndexWriter(dir, c);
+            Document doc = new Document();
+            doc.Add(NewField("distinctiveFieldName", "aaa ", StoredTextType));
+            try
+            {
+                writer.AddDocument(doc);
+                Assert.Fail("Failed to fail.");
+            }
+            catch (BadNews)
+            {
+                infoPrintStream.Flush();
+                string infoStream = Encoding.UTF8.GetString(infoBytes.ToArray());
+                Assert.IsTrue(infoStream.Contains("distinctiveFieldName"));
+            }
+
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestNoExtraNoise()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer;
+            IndexWriterConfig c = new IndexWriterConfig(TEST_VERSION_CURRENT, new ThrowingAnalyzer());
+            ByteArrayOutputStream infoBytes = new ByteArrayOutputStream();
+            StreamWriter infoPrintStream = new StreamWriter(infoBytes, Encoding.UTF8);
+            PrintStreamInfoStream printStreamInfoStream = new PrintStreamInfoStream(infoPrintStream);
+            c.InfoStream = printStreamInfoStream;
+            writer = new IndexWriter(dir, c);
+            Document doc = new Document();
+            doc.Add(NewField("boringFieldName", "aaa ", StoredTextType));
+            try
+            {
+                writer.AddDocument(doc);
+            }
+#pragma warning disable 168
+            catch (BadNews badNews)
+#pragma warning restore 168
+            {
+                Assert.Fail("Unwanted exception");
+            }
+            infoPrintStream.Flush();
+            string infoStream = Encoding.UTF8.GetString(infoBytes.ToArray());
+            Assert.IsFalse(infoStream.Contains("boringFieldName"));
+
+            writer.Dispose();
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestDocTermOrds.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestDocTermOrds.cs b/src/Lucene.Net.Tests/Index/TestDocTermOrds.cs
new file mode 100644
index 0000000..1ea4d2b
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestDocTermOrds.cs
@@ -0,0 +1,541 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using Lucene.Net.Documents;
+using Lucene.Net.Search;
+
+namespace Lucene.Net.Index
+{
+    using Attributes;
+    using NUnit.Framework;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Analyzer = Lucene.Net.Analysis.Analyzer;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Codec = Lucene.Net.Codecs.Codec;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using Int32Field = Int32Field;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using PostingsFormat = Lucene.Net.Codecs.PostingsFormat;
+    using SeekStatus = Lucene.Net.Index.TermsEnum.SeekStatus;
+    using StringField = StringField;
+    using StringHelper = Lucene.Net.Util.StringHelper;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    // TODO:
+    //   - test w/ del docs
+    //   - test prefix
+    //   - test w/ cutoff
+    //   - crank docs way up so we get some merging sometimes
+    [TestFixture]
+    public class TestDocTermOrds : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestSimple()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()));
+            Document doc = new Document();
+            Field field = NewTextField("field", "", Field.Store.NO);
+            doc.Add(field);
+            field.SetStringValue("a b c");
+            w.AddDocument(doc);
+
+            field.SetStringValue("d e f");
+            w.AddDocument(doc);
+
+            field.SetStringValue("a f");
+            w.AddDocument(doc);
+
+            IndexReader r = w.Reader;
+            w.Dispose();
+
+            AtomicReader ar = SlowCompositeReaderWrapper.Wrap(r);
+            DocTermOrds dto = new DocTermOrds(ar, ar.LiveDocs, "field");
+            SortedSetDocValues iter = dto.GetIterator(ar);
+
+            iter.SetDocument(0);
+            Assert.AreEqual(0, iter.NextOrd());
+            Assert.AreEqual(1, iter.NextOrd());
+            Assert.AreEqual(2, iter.NextOrd());
+            Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, iter.NextOrd());
+
+            iter.SetDocument(1);
+            Assert.AreEqual(3, iter.NextOrd());
+            Assert.AreEqual(4, iter.NextOrd());
+            Assert.AreEqual(5, iter.NextOrd());
+            Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, iter.NextOrd());
+
+            iter.SetDocument(2);
+            Assert.AreEqual(0, iter.NextOrd());
+            Assert.AreEqual(5, iter.NextOrd());
+            Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, iter.NextOrd());
+
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestRandom()
+        {
+            Directory dir = NewDirectory();
+
+            int NUM_TERMS = AtLeast(20);
+            HashSet<BytesRef> terms = new HashSet<BytesRef>();
+            while (terms.Count < NUM_TERMS)
+            {
+                string s = TestUtil.RandomRealisticUnicodeString(Random());
+                //final String s = TestUtil.RandomSimpleString(random);
+                if (s.Length > 0)
+                {
+                    terms.Add(new BytesRef(s));
+                }
+            }
+            BytesRef[] termsArray = terms.ToArray(/*new BytesRef[terms.Count]*/);
+            Array.Sort(termsArray);
+
+            int NUM_DOCS = AtLeast(100);
+
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+
+            // Sometimes swap in codec that impls ord():
+            if (Random().Next(10) == 7)
+            {
+                // Make sure terms index has ords:
+                Codec codec = TestUtil.AlwaysPostingsFormat(PostingsFormat.ForName("Lucene41WithOrds"));
+                conf.SetCodec(codec);
+            }
+
+            RandomIndexWriter w = new RandomIndexWriter(Random(), dir, conf);
+
+            int[][] idToOrds = new int[NUM_DOCS][];
+            HashSet<int?> ordsForDocSet = new HashSet<int?>();
+
+            for (int id = 0; id < NUM_DOCS; id++)
+            {
+                Document doc = new Document();
+
+                doc.Add(new Int32Field("id", id, Field.Store.NO));
+
+                int termCount = TestUtil.NextInt(Random(), 0, 20 * RANDOM_MULTIPLIER);
+                while (ordsForDocSet.Count < termCount)
+                {
+                    ordsForDocSet.Add(Random().Next(termsArray.Length));
+                }
+                int[] ordsForDoc = new int[termCount];
+                int upto = 0;
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: doc id=" + id);
+                }
+                foreach (int ord in ordsForDocSet)
+                {
+                    ordsForDoc[upto++] = ord;
+                    Field field = NewStringField("field", termsArray[ord].Utf8ToString(), Field.Store.NO);
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("  f=" + termsArray[ord].Utf8ToString());
+                    }
+                    doc.Add(field);
+                }
+                ordsForDocSet.Clear();
+                Array.Sort(ordsForDoc);
+                idToOrds[id] = ordsForDoc;
+                w.AddDocument(doc);
+            }
+
+            DirectoryReader r = w.Reader;
+            w.Dispose();
+
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: reader=" + r);
+            }
+
+            foreach (AtomicReaderContext ctx in r.Leaves)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("\nTEST: sub=" + ctx.Reader);
+                }
+                Verify((AtomicReader)ctx.Reader, idToOrds, termsArray, null);
+            }
+
+            // Also test top-level reader: its enum does not support
+            // ord, so this forces the OrdWrapper to run:
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: top reader");
+            }
+            AtomicReader slowR = SlowCompositeReaderWrapper.Wrap(r);
+            Verify(slowR, idToOrds, termsArray, null);
+
+            FieldCache.DEFAULT.PurgeByCacheKey(slowR.CoreCacheKey);
+
+            r.Dispose();
+            dir.Dispose();
+        }
+
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(300000)]
+#endif
+        [Test, HasTimeout]
+        public virtual void TestRandomWithPrefix()
+        {
+            Directory dir = NewDirectory();
+
+            HashSet<string> prefixes = new HashSet<string>();
+            int numPrefix = TestUtil.NextInt(Random(), 2, 7);
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: use " + numPrefix + " prefixes");
+            }
+            while (prefixes.Count < numPrefix)
+            {
+                prefixes.Add(TestUtil.RandomRealisticUnicodeString(Random()));
+                //prefixes.Add(TestUtil.RandomSimpleString(random));
+            }
+            string[] prefixesArray = prefixes.ToArray(/*new string[prefixes.Count]*/);
+
+            int NUM_TERMS = AtLeast(20);
+            HashSet<BytesRef> terms = new HashSet<BytesRef>();
+            while (terms.Count < NUM_TERMS)
+            {
+                string s = prefixesArray[Random().Next(prefixesArray.Length)] + TestUtil.RandomRealisticUnicodeString(Random());
+                //final String s = prefixesArray[random.nextInt(prefixesArray.Length)] + TestUtil.RandomSimpleString(random);
+                if (s.Length > 0)
+                {
+                    terms.Add(new BytesRef(s));
+                }
+            }
+            BytesRef[] termsArray = terms.ToArray();
+            Array.Sort(termsArray);
+
+            int NUM_DOCS = AtLeast(100);
+
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+
+            // Sometimes swap in codec that impls ord():
+            if (Random().Next(10) == 7)
+            {
+                Codec codec = TestUtil.AlwaysPostingsFormat(PostingsFormat.ForName("Lucene41WithOrds"));
+                conf.SetCodec(codec);
+            }
+
+            RandomIndexWriter w = new RandomIndexWriter(Random(), dir, conf);
+
+            int[][] idToOrds = new int[NUM_DOCS][];
+            HashSet<int?> ordsForDocSet = new HashSet<int?>();
+
+            for (int id = 0; id < NUM_DOCS; id++)
+            {
+                Document doc = new Document();
+
+                doc.Add(new Int32Field("id", id, Field.Store.NO));
+
+                int termCount = TestUtil.NextInt(Random(), 0, 20 * RANDOM_MULTIPLIER);
+                while (ordsForDocSet.Count < termCount)
+                {
+                    ordsForDocSet.Add(Random().Next(termsArray.Length));
+                }
+                int[] ordsForDoc = new int[termCount];
+                int upto = 0;
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: doc id=" + id);
+                }
+                foreach (int ord in ordsForDocSet)
+                {
+                    ordsForDoc[upto++] = ord;
+                    Field field = NewStringField("field", termsArray[ord].Utf8ToString(), Field.Store.NO);
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("  f=" + termsArray[ord].Utf8ToString());
+                    }
+                    doc.Add(field);
+                }
+                ordsForDocSet.Clear();
+                Array.Sort(ordsForDoc);
+                idToOrds[id] = ordsForDoc;
+                w.AddDocument(doc);
+            }
+
+            DirectoryReader r = w.Reader;
+            w.Dispose();
+
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: reader=" + r);
+            }
+
+            AtomicReader slowR = SlowCompositeReaderWrapper.Wrap(r);
+            foreach (string prefix in prefixesArray)
+            {
+                BytesRef prefixRef = prefix == null ? null : new BytesRef(prefix);
+
+                int[][] idToOrdsPrefix = new int[NUM_DOCS][];
+                for (int id = 0; id < NUM_DOCS; id++)
+                {
+                    int[] docOrds = idToOrds[id];
+                    IList<int?> newOrds = new List<int?>();
+                    foreach (int ord in idToOrds[id])
+                    {
+                        if (StringHelper.StartsWith(termsArray[ord], prefixRef))
+                        {
+                            newOrds.Add(ord);
+                        }
+                    }
+                    int[] newOrdsArray = new int[newOrds.Count];
+                    int upto = 0;
+                    foreach (int ord in newOrds)
+                    {
+                        newOrdsArray[upto++] = ord;
+                    }
+                    idToOrdsPrefix[id] = newOrdsArray;
+                }
+
+                foreach (AtomicReaderContext ctx in r.Leaves)
+                {
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("\nTEST: sub=" + ctx.Reader);
+                    }
+                    Verify((AtomicReader)ctx.Reader, idToOrdsPrefix, termsArray, prefixRef);
+                }
+
+                // Also test top-level reader: its enum does not support
+                // ord, so this forces the OrdWrapper to run:
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: top reader");
+                }
+                Verify(slowR, idToOrdsPrefix, termsArray, prefixRef);
+            }
+
+            FieldCache.DEFAULT.PurgeByCacheKey(slowR.CoreCacheKey);
+
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        private void Verify(AtomicReader r, int[][] idToOrds, BytesRef[] termsArray, BytesRef prefixRef)
+        {
+            DocTermOrds dto = new DocTermOrds(r, r.LiveDocs, "field", prefixRef, int.MaxValue, TestUtil.NextInt(Random(), 2, 10));
+
+            FieldCache.Int32s docIDToID = FieldCache.DEFAULT.GetInt32s(r, "id", false);
+            /*
+              for(int docID=0;docID<subR.MaxDoc;docID++) {
+              System.out.println("  docID=" + docID + " id=" + docIDToID[docID]);
+              }
+            */
+
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: verify prefix=" + (prefixRef == null ? "null" : prefixRef.Utf8ToString()));
+                Console.WriteLine("TEST: all TERMS:");
+                TermsEnum allTE = MultiFields.GetTerms(r, "field").GetIterator(null);
+                int ord = 0;
+                while (allTE.Next() != null)
+                {
+                    Console.WriteLine("  ord=" + (ord++) + " term=" + allTE.Term.Utf8ToString());
+                }
+            }
+
+            //final TermsEnum te = subR.Fields.Terms("field").iterator();
+            TermsEnum te = dto.GetOrdTermsEnum(r);
+            if (dto.NumTerms == 0)
+            {
+                if (prefixRef == null)
+                {
+                    Assert.IsNull(MultiFields.GetTerms(r, "field"));
+                }
+                else
+                {
+                    Terms terms = MultiFields.GetTerms(r, "field");
+                    if (terms != null)
+                    {
+                        TermsEnum termsEnum = terms.GetIterator(null);
+                        TermsEnum.SeekStatus result = termsEnum.SeekCeil(prefixRef);
+                        if (result != TermsEnum.SeekStatus.END)
+                        {
+                            Assert.IsFalse(StringHelper.StartsWith(termsEnum.Term, prefixRef), "term=" + termsEnum.Term.Utf8ToString() + " matches prefix=" + prefixRef.Utf8ToString());
+                        }
+                        else
+                        {
+                            // ok
+                        }
+                    }
+                    else
+                    {
+                        // ok
+                    }
+                }
+                return;
+            }
+
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: TERMS:");
+                te.SeekExact(0);
+                while (true)
+                {
+                    Console.WriteLine("  ord=" + te.Ord + " term=" + te.Term.Utf8ToString());
+                    if (te.Next() == null)
+                    {
+                        break;
+                    }
+                }
+            }
+
+            SortedSetDocValues iter = dto.GetIterator(r);
+            for (int docID = 0; docID < r.MaxDoc; docID++)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: docID=" + docID + " of " + r.MaxDoc + " (id=" + docIDToID.Get(docID) + ")");
+                }
+                iter.SetDocument(docID);
+                int[] answers = idToOrds[docIDToID.Get(docID)];
+                int upto = 0;
+                long ord;
+                while ((ord = iter.NextOrd()) != SortedSetDocValues.NO_MORE_ORDS)
+                {
+                    te.SeekExact(ord);
+                    BytesRef expected = termsArray[answers[upto++]];
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("  exp=" + expected.Utf8ToString() + " actual=" + te.Term.Utf8ToString());
+                    }
+                    Assert.AreEqual(expected, te.Term, "expected=" + expected.Utf8ToString() + " actual=" + te.Term.Utf8ToString() + " ord=" + ord);
+                }
+                Assert.AreEqual(answers.Length, upto);
+            }
+        }
+
+        [Test]
+        public virtual void TestBackToTheFuture()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter iw = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, null));
+
+            Document doc = new Document();
+            doc.Add(NewStringField("foo", "bar", Field.Store.NO));
+            iw.AddDocument(doc);
+
+            doc = new Document();
+            doc.Add(NewStringField("foo", "baz", Field.Store.NO));
+            iw.AddDocument(doc);
+
+            DirectoryReader r1 = DirectoryReader.Open(iw, true);
+
+            iw.DeleteDocuments(new Term("foo", "baz"));
+            DirectoryReader r2 = DirectoryReader.Open(iw, true);
+
+            FieldCache.DEFAULT.GetDocTermOrds(GetOnlySegmentReader(r2), "foo");
+
+            SortedSetDocValues v = FieldCache.DEFAULT.GetDocTermOrds(GetOnlySegmentReader(r1), "foo");
+            Assert.AreEqual(2, v.ValueCount);
+            v.SetDocument(1);
+            Assert.AreEqual(1, v.NextOrd());
+
+            iw.Dispose();
+            r1.Dispose();
+            r2.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestSortedTermsEnum()
+        {
+            Directory directory = NewDirectory();
+            Analyzer analyzer = new MockAnalyzer(Random());
+            IndexWriterConfig iwconfig = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
+            iwconfig.SetMergePolicy(NewLogMergePolicy());
+            RandomIndexWriter iwriter = new RandomIndexWriter(Random(), directory, iwconfig);
+
+            Document doc = new Document();
+            doc.Add(new StringField("field", "hello", Field.Store.NO));
+            iwriter.AddDocument(doc);
+
+            doc = new Document();
+            doc.Add(new StringField("field", "world", Field.Store.NO));
+            iwriter.AddDocument(doc);
+
+            doc = new Document();
+            doc.Add(new StringField("field", "beer", Field.Store.NO));
+            iwriter.AddDocument(doc);
+            iwriter.ForceMerge(1);
+
+            DirectoryReader ireader = iwriter.Reader;
+            iwriter.Dispose();
+
+            AtomicReader ar = GetOnlySegmentReader(ireader);
+            SortedSetDocValues dv = FieldCache.DEFAULT.GetDocTermOrds(ar, "field");
+            Assert.AreEqual(3, dv.ValueCount);
+
+            TermsEnum termsEnum = dv.GetTermsEnum();
+
+            // next()
+            Assert.AreEqual("beer", termsEnum.Next().Utf8ToString());
+            Assert.AreEqual(0, termsEnum.Ord);
+            Assert.AreEqual("hello", termsEnum.Next().Utf8ToString());
+            Assert.AreEqual(1, termsEnum.Ord);
+            Assert.AreEqual("world", termsEnum.Next().Utf8ToString());
+            Assert.AreEqual(2, termsEnum.Ord);
+
+            // seekCeil()
+            Assert.AreEqual(SeekStatus.NOT_FOUND, termsEnum.SeekCeil(new BytesRef("ha!")));
+            Assert.AreEqual("hello", termsEnum.Term.Utf8ToString());
+            Assert.AreEqual(1, termsEnum.Ord);
+            Assert.AreEqual(SeekStatus.FOUND, termsEnum.SeekCeil(new BytesRef("beer")));
+            Assert.AreEqual("beer", termsEnum.Term.Utf8ToString());
+            Assert.AreEqual(0, termsEnum.Ord);
+            Assert.AreEqual(SeekStatus.END, termsEnum.SeekCeil(new BytesRef("zzz")));
+
+            // seekExact()
+            Assert.IsTrue(termsEnum.SeekExact(new BytesRef("beer")));
+            Assert.AreEqual("beer", termsEnum.Term.Utf8ToString());
+            Assert.AreEqual(0, termsEnum.Ord);
+            Assert.IsTrue(termsEnum.SeekExact(new BytesRef("hello")));
+            Assert.AreEqual("hello", termsEnum.Term.Utf8ToString());
+            Assert.AreEqual(1, termsEnum.Ord);
+            Assert.IsTrue(termsEnum.SeekExact(new BytesRef("world")));
+            Assert.AreEqual("world", termsEnum.Term.Utf8ToString());
+            Assert.AreEqual(2, termsEnum.Ord);
+            Assert.IsFalse(termsEnum.SeekExact(new BytesRef("bogus")));
+
+            // seek(ord)
+            termsEnum.SeekExact(0);
+            Assert.AreEqual("beer", termsEnum.Term.Utf8ToString());
+            Assert.AreEqual(0, termsEnum.Ord);
+            termsEnum.SeekExact(1);
+            Assert.AreEqual("hello", termsEnum.Term.Utf8ToString());
+            Assert.AreEqual(1, termsEnum.Ord);
+            termsEnum.SeekExact(2);
+            Assert.AreEqual("world", termsEnum.Term.Utf8ToString());
+            Assert.AreEqual(2, termsEnum.Ord);
+            ireader.Dispose();
+            directory.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestDocValuesFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestDocValuesFormat.cs b/src/Lucene.Net.Tests/Index/TestDocValuesFormat.cs
new file mode 100644
index 0000000..628ddc0
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestDocValuesFormat.cs
@@ -0,0 +1,546 @@
+using NUnit.Framework;
+
+namespace Lucene.Net.Index
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    using Codec = Lucene.Net.Codecs.Codec;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    /// <summary>
+    /// Tests the codec configuration defined by LuceneTestCase randomly
+    ///  (typically a mix across different fields).
+    /// </summary>
+    [SuppressCodecs("Lucene3x")]
+    public class TestDocValuesFormat : BaseDocValuesFormatTestCase
+    {
+        protected override Codec Codec
+        {
+            get
+            {
+                return Codec.Default;
+            }
+        }
+
+        protected internal override bool CodecAcceptsHugeBinaryValues(string field)
+        {
+            return TestUtil.FieldSupportsHugeBinaryDocValues(field);
+        }
+
+
+        #region BaseDocValuesFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestOneNumber()
+        {
+            base.TestOneNumber();
+        }
+
+        [Test]
+        public override void TestOneFloat()
+        {
+            base.TestOneFloat();
+        }
+
+        [Test]
+        public override void TestTwoNumbers()
+        {
+            base.TestTwoNumbers();
+        }
+
+        [Test]
+        public override void TestTwoBinaryValues()
+        {
+            base.TestTwoBinaryValues();
+        }
+
+        [Test]
+        public override void TestTwoFieldsMixed()
+        {
+            base.TestTwoFieldsMixed();
+        }
+
+        [Test]
+        public override void TestThreeFieldsMixed()
+        {
+            base.TestThreeFieldsMixed();
+        }
+
+        [Test]
+        public override void TestThreeFieldsMixed2()
+        {
+            base.TestThreeFieldsMixed2();
+        }
+
+        [Test]
+        public override void TestTwoDocumentsNumeric()
+        {
+            base.TestTwoDocumentsNumeric();
+        }
+
+        [Test]
+        public override void TestTwoDocumentsMerged()
+        {
+            base.TestTwoDocumentsMerged();
+        }
+
+        [Test]
+        public override void TestBigNumericRange()
+        {
+            base.TestBigNumericRange();
+        }
+
+        [Test]
+        public override void TestBigNumericRange2()
+        {
+            base.TestBigNumericRange2();
+        }
+
+        [Test]
+        public override void TestBytes()
+        {
+            base.TestBytes();
+        }
+
+        [Test]
+        public override void TestBytesTwoDocumentsMerged()
+        {
+            base.TestBytesTwoDocumentsMerged();
+        }
+
+        [Test]
+        public override void TestSortedBytes()
+        {
+            base.TestSortedBytes();
+        }
+
+        [Test]
+        public override void TestSortedBytesTwoDocuments()
+        {
+            base.TestSortedBytesTwoDocuments();
+        }
+
+        [Test]
+        public override void TestSortedBytesThreeDocuments()
+        {
+            base.TestSortedBytesThreeDocuments();
+        }
+
+        [Test]
+        public override void TestSortedBytesTwoDocumentsMerged()
+        {
+            base.TestSortedBytesTwoDocumentsMerged();
+        }
+
+        [Test]
+        public override void TestSortedMergeAwayAllValues()
+        {
+            base.TestSortedMergeAwayAllValues();
+        }
+
+        [Test]
+        public override void TestBytesWithNewline()
+        {
+            base.TestBytesWithNewline();
+        }
+
+        [Test]
+        public override void TestMissingSortedBytes()
+        {
+            base.TestMissingSortedBytes();
+        }
+
+        [Test]
+        public override void TestSortedTermsEnum()
+        {
+            base.TestSortedTermsEnum();
+        }
+
+        [Test]
+        public override void TestEmptySortedBytes()
+        {
+            base.TestEmptySortedBytes();
+        }
+
+        [Test]
+        public override void TestEmptyBytes()
+        {
+            base.TestEmptyBytes();
+        }
+
+        [Test]
+        public override void TestVeryLargeButLegalBytes()
+        {
+            base.TestVeryLargeButLegalBytes();
+        }
+
+        [Test]
+        public override void TestVeryLargeButLegalSortedBytes()
+        {
+            base.TestVeryLargeButLegalSortedBytes();
+        }
+
+        [Test]
+        public override void TestCodecUsesOwnBytes()
+        {
+            base.TestCodecUsesOwnBytes();
+        }
+
+        [Test]
+        public override void TestCodecUsesOwnSortedBytes()
+        {
+            base.TestCodecUsesOwnSortedBytes();
+        }
+
+        [Test]
+        public override void TestCodecUsesOwnBytesEachTime()
+        {
+            base.TestCodecUsesOwnBytesEachTime();
+        }
+
+        [Test]
+        public override void TestCodecUsesOwnSortedBytesEachTime()
+        {
+            base.TestCodecUsesOwnSortedBytesEachTime();
+        }
+
+        /*
+         * Simple test case to show how to use the API
+         */
+        [Test]
+        public override void TestDocValuesSimple()
+        {
+            base.TestDocValuesSimple();
+        }
+
+        [Test]
+        public override void TestRandomSortedBytes()
+        {
+            base.TestRandomSortedBytes();
+        }
+
+        [Test]
+        public override void TestBooleanNumericsVsStoredFields()
+        {
+            base.TestBooleanNumericsVsStoredFields();
+        }
+
+        [Test]
+        public override void TestByteNumericsVsStoredFields()
+        {
+            base.TestByteNumericsVsStoredFields();
+        }
+
+        [Test]
+        public override void TestByteMissingVsFieldCache()
+        {
+            base.TestByteMissingVsFieldCache();
+        }
+
+        [Test]
+        public override void TestShortNumericsVsStoredFields()
+        {
+            base.TestShortNumericsVsStoredFields();
+        }
+
+        [Test]
+        public override void TestShortMissingVsFieldCache()
+        {
+            base.TestShortMissingVsFieldCache();
+        }
+
+        [Test]
+        public override void TestIntNumericsVsStoredFields()
+        {
+            base.TestIntNumericsVsStoredFields();
+        }
+
+        [Test]
+        public override void TestIntMissingVsFieldCache()
+        {
+            base.TestIntMissingVsFieldCache();
+        }
+
+        [Test]
+        public override void TestLongNumericsVsStoredFields()
+        {
+            base.TestLongNumericsVsStoredFields();
+        }
+
+        [Test]
+        public override void TestLongMissingVsFieldCache()
+        {
+            base.TestLongMissingVsFieldCache();
+        }
+
+        [Test]
+        public override void TestBinaryFixedLengthVsStoredFields()
+        {
+            base.TestBinaryFixedLengthVsStoredFields();
+        }
+
+        [Test]
+        public override void TestBinaryVariableLengthVsStoredFields()
+        {
+            base.TestBinaryVariableLengthVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedFixedLengthVsStoredFields()
+        {
+            base.TestSortedFixedLengthVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedFixedLengthVsFieldCache()
+        {
+            base.TestSortedFixedLengthVsFieldCache();
+        }
+
+        [Test]
+        public override void TestSortedVariableLengthVsFieldCache()
+        {
+            base.TestSortedVariableLengthVsFieldCache();
+        }
+
+        [Test]
+        public override void TestSortedVariableLengthVsStoredFields()
+        {
+            base.TestSortedVariableLengthVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedSetOneValue()
+        {
+            base.TestSortedSetOneValue();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoFields()
+        {
+            base.TestSortedSetTwoFields();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoDocumentsMerged()
+        {
+            base.TestSortedSetTwoDocumentsMerged();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoValues()
+        {
+            base.TestSortedSetTwoValues();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoValuesUnordered()
+        {
+            base.TestSortedSetTwoValuesUnordered();
+        }
+
+        [Test]
+        public override void TestSortedSetThreeValuesTwoDocs()
+        {
+            base.TestSortedSetThreeValuesTwoDocs();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoDocumentsLastMissing()
+        {
+            base.TestSortedSetTwoDocumentsLastMissing();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoDocumentsLastMissingMerge()
+        {
+            base.TestSortedSetTwoDocumentsLastMissingMerge();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoDocumentsFirstMissing()
+        {
+            base.TestSortedSetTwoDocumentsFirstMissing();
+        }
+
+        [Test]
+        public override void TestSortedSetTwoDocumentsFirstMissingMerge()
+        {
+            base.TestSortedSetTwoDocumentsFirstMissingMerge();
+        }
+
+        [Test]
+        public override void TestSortedSetMergeAwayAllValues()
+        {
+            base.TestSortedSetMergeAwayAllValues();
+        }
+
+        [Test]
+        public override void TestSortedSetTermsEnum()
+        {
+            base.TestSortedSetTermsEnum();
+        }
+
+        [Test]
+        public override void TestSortedSetFixedLengthVsStoredFields()
+        {
+            base.TestSortedSetFixedLengthVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedSetVariableLengthVsStoredFields()
+        {
+            base.TestSortedSetVariableLengthVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedSetFixedLengthSingleValuedVsStoredFields()
+        {
+            base.TestSortedSetFixedLengthSingleValuedVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedSetVariableLengthSingleValuedVsStoredFields()
+        {
+            base.TestSortedSetVariableLengthSingleValuedVsStoredFields();
+        }
+
+        [Test]
+        public override void TestSortedSetFixedLengthVsUninvertedField()
+        {
+            base.TestSortedSetFixedLengthVsUninvertedField();
+        }
+
+        [Test]
+        public override void TestSortedSetVariableLengthVsUninvertedField()
+        {
+            base.TestSortedSetVariableLengthVsUninvertedField();
+        }
+
+        [Test]
+        public override void TestGCDCompression()
+        {
+            base.TestGCDCompression();
+        }
+
+        [Test]
+        public override void TestZeros()
+        {
+            base.TestZeros();
+        }
+
+        [Test]
+        public override void TestZeroOrMin()
+        {
+            base.TestZeroOrMin();
+        }
+
+        [Test]
+        public override void TestTwoNumbersOneMissing()
+        {
+            base.TestTwoNumbersOneMissing();
+        }
+
+        [Test]
+        public override void TestTwoNumbersOneMissingWithMerging()
+        {
+            base.TestTwoNumbersOneMissingWithMerging();
+        }
+
+        [Test]
+        public override void TestThreeNumbersOneMissingWithMerging()
+        {
+            base.TestThreeNumbersOneMissingWithMerging();
+        }
+
+        [Test]
+        public override void TestTwoBytesOneMissing()
+        {
+            base.TestTwoBytesOneMissing();
+        }
+
+        [Test]
+        public override void TestTwoBytesOneMissingWithMerging()
+        {
+            base.TestTwoBytesOneMissingWithMerging();
+        }
+
+        [Test]
+        public override void TestThreeBytesOneMissingWithMerging()
+        {
+            base.TestThreeBytesOneMissingWithMerging();
+        }
+
+        // LUCENE-4853
+        [Test]
+        public override void TestHugeBinaryValues()
+        {
+            base.TestHugeBinaryValues();
+        }
+
+        // TODO: get this out of here and into the deprecated codecs (4.0, 4.2)
+        [Test]
+        public override void TestHugeBinaryValueLimit()
+        {
+            base.TestHugeBinaryValueLimit();
+        }
+
+        /// <summary>
+        /// Tests dv against stored fields with threads (binary/numeric/sorted, no missing)
+        /// </summary>
+        [Test]
+        public override void TestThreads()
+        {
+            base.TestThreads();
+        }
+
+        /// <summary>
+        /// Tests dv against stored fields with threads (all types + missing)
+        /// </summary>
+        [Test]
+        public override void TestThreads2()
+        {
+            base.TestThreads2();
+        }
+
+        // LUCENE-5218
+        [Test]
+        public override void TestEmptyBinaryValueOnPageSizes()
+        {
+            base.TestEmptyBinaryValueOnPageSizes();
+        }
+
+        #endregion
+
+        #region BaseIndexFileFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestMergeStability()
+        {
+            base.TestMergeStability();
+        }
+
+        #endregion
+    }
+}
\ No newline at end of file


[63/72] [abbrv] lucenenet git commit: Lucene.Net.TestFramework: Renamed namespace from Codecs.IntBlock to Codecs.MockIntBlock (as per the original)

Posted by ni...@apache.org.
Lucene.Net.TestFramework: Renamed namespace from Codecs.IntBlock to Codecs.MockIntBlock (as per the original)


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/63b45ce0
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/63b45ce0
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/63b45ce0

Branch: refs/heads/api-work
Commit: 63b45ce00f3d9a8ccd3603b1f43ad842dacbaf24
Parents: 7c9f572
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 26 03:29:02 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Feb 27 06:17:59 2017 +0700

----------------------------------------------------------------------
 .../Codecs/MockIntBlock/MockFixedIntBlockPostingsFormat.cs        | 3 ++-
 .../Codecs/MockIntBlock/MockVariableIntBlockPostingsFormat.cs     | 3 ++-
 .../IntBlock/TestFixedIntBlockPostingsFormat.cs                   | 3 ++-
 src/Lucene.Net.Tests.Codecs/IntBlock/TestIntBlockCodec.cs         | 3 ++-
 .../IntBlock/TestVariableIntBlockPostingsFormat.cs                | 3 ++-
 5 files changed, 10 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/63b45ce0/src/Lucene.Net.TestFramework/Codecs/MockIntBlock/MockFixedIntBlockPostingsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/MockIntBlock/MockFixedIntBlockPostingsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/MockIntBlock/MockFixedIntBlockPostingsFormat.cs
index 0f78113..31e8731 100644
--- a/src/Lucene.Net.TestFramework/Codecs/MockIntBlock/MockFixedIntBlockPostingsFormat.cs
+++ b/src/Lucene.Net.TestFramework/Codecs/MockIntBlock/MockFixedIntBlockPostingsFormat.cs
@@ -1,10 +1,11 @@
 \ufeffusing Lucene.Net.Codecs.BlockTerms;
+using Lucene.Net.Codecs.IntBlock;
 using Lucene.Net.Codecs.Sep;
 using Lucene.Net.Index;
 using Lucene.Net.Store;
 using Lucene.Net.Util;
 
-namespace Lucene.Net.Codecs.IntBlock
+namespace Lucene.Net.Codecs.MockIntBlock
 {
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/63b45ce0/src/Lucene.Net.TestFramework/Codecs/MockIntBlock/MockVariableIntBlockPostingsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/MockIntBlock/MockVariableIntBlockPostingsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/MockIntBlock/MockVariableIntBlockPostingsFormat.cs
index 6cb6990..b152a10 100644
--- a/src/Lucene.Net.TestFramework/Codecs/MockIntBlock/MockVariableIntBlockPostingsFormat.cs
+++ b/src/Lucene.Net.TestFramework/Codecs/MockIntBlock/MockVariableIntBlockPostingsFormat.cs
@@ -1,11 +1,12 @@
 \ufeffusing Lucene.Net.Codecs.BlockTerms;
+using Lucene.Net.Codecs.IntBlock;
 using Lucene.Net.Codecs.Sep;
 using Lucene.Net.Index;
 using Lucene.Net.Store;
 using Lucene.Net.Util;
 using System.Diagnostics;
 
-namespace Lucene.Net.Codecs.IntBlock
+namespace Lucene.Net.Codecs.MockIntBlock
 {
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/63b45ce0/src/Lucene.Net.Tests.Codecs/IntBlock/TestFixedIntBlockPostingsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Codecs/IntBlock/TestFixedIntBlockPostingsFormat.cs b/src/Lucene.Net.Tests.Codecs/IntBlock/TestFixedIntBlockPostingsFormat.cs
index 26b2684..4aa7219 100644
--- a/src/Lucene.Net.Tests.Codecs/IntBlock/TestFixedIntBlockPostingsFormat.cs
+++ b/src/Lucene.Net.Tests.Codecs/IntBlock/TestFixedIntBlockPostingsFormat.cs
@@ -1,4 +1,5 @@
-\ufeffusing Lucene.Net.Index;
+\ufeffusing Lucene.Net.Codecs.MockIntBlock;
+using Lucene.Net.Index;
 using Lucene.Net.Util;
 using NUnit.Framework;
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/63b45ce0/src/Lucene.Net.Tests.Codecs/IntBlock/TestIntBlockCodec.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Codecs/IntBlock/TestIntBlockCodec.cs b/src/Lucene.Net.Tests.Codecs/IntBlock/TestIntBlockCodec.cs
index 582a684..6429ae1 100644
--- a/src/Lucene.Net.Tests.Codecs/IntBlock/TestIntBlockCodec.cs
+++ b/src/Lucene.Net.Tests.Codecs/IntBlock/TestIntBlockCodec.cs
@@ -1,4 +1,5 @@
-\ufeffusing Lucene.Net.Codecs.Sep;
+\ufeffusing Lucene.Net.Codecs.MockIntBlock;
+using Lucene.Net.Codecs.Sep;
 using Lucene.Net.Store;
 using Lucene.Net.Util;
 using NUnit.Framework;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/63b45ce0/src/Lucene.Net.Tests.Codecs/IntBlock/TestVariableIntBlockPostingsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Codecs/IntBlock/TestVariableIntBlockPostingsFormat.cs b/src/Lucene.Net.Tests.Codecs/IntBlock/TestVariableIntBlockPostingsFormat.cs
index c255c90..23bc8fd 100644
--- a/src/Lucene.Net.Tests.Codecs/IntBlock/TestVariableIntBlockPostingsFormat.cs
+++ b/src/Lucene.Net.Tests.Codecs/IntBlock/TestVariableIntBlockPostingsFormat.cs
@@ -1,4 +1,5 @@
-\ufeffusing Lucene.Net.Index;
+\ufeffusing Lucene.Net.Codecs.MockIntBlock;
+using Lucene.Net.Index;
 using Lucene.Net.Util;
 using NUnit.Framework;
 


[10/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/Spans/TestSpans.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/Spans/TestSpans.cs b/src/Lucene.Net.Tests/Search/Spans/TestSpans.cs
new file mode 100644
index 0000000..651bfcc
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/Spans/TestSpans.cs
@@ -0,0 +1,571 @@
+using System.Collections.Generic;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search.Spans
+{
+    using Lucene.Net.Index;
+    using NUnit.Framework;
+    using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
+    using DefaultSimilarity = Lucene.Net.Search.Similarities.DefaultSimilarity;
+    using Directory = Lucene.Net.Store.Directory;
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using IndexReaderContext = Lucene.Net.Index.IndexReaderContext;
+    using IndexWriter = Lucene.Net.Index.IndexWriter;
+    using IndexWriterConfig = Lucene.Net.Index.IndexWriterConfig;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using ReaderUtil = Lucene.Net.Index.ReaderUtil;
+    using Similarity = Lucene.Net.Search.Similarities.Similarity;
+    using Term = Lucene.Net.Index.Term;
+
+    [TestFixture]
+    public class TestSpans : LuceneTestCase
+    {
+        private IndexSearcher Searcher;
+        private IndexReader Reader;
+        private Directory Directory;
+
+        public const string field = "field";
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), Directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()));
+            for (int i = 0; i < DocFields.Length; i++)
+            {
+                Document doc = new Document();
+                doc.Add(NewTextField(field, DocFields[i], Field.Store.YES));
+                writer.AddDocument(doc);
+            }
+            Reader = writer.Reader;
+            writer.Dispose();
+            Searcher = NewSearcher(Reader);
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            Reader.Dispose();
+            Directory.Dispose();
+            base.TearDown();
+        }
+
+        private string[] DocFields = new string[] { "w1 w2 w3 w4 w5", "w1 w3 w2 w3", "w1 xx w2 yy w3", "w1 w3 xx w2 yy w3", "u2 u2 u1", "u2 xx u2 u1", "u2 u2 xx u1", "u2 xx u2 yy u1", "u2 xx u1 u2", "u2 u1 xx u2", "u1 u2 xx u2", "t1 t2 t1 t3 t2 t3", "s2 s1 s1 xx xx s2 xx s2 xx s1 xx xx xx xx xx s2 xx" };
+
+        public virtual SpanTermQuery MakeSpanTermQuery(string text)
+        {
+            return new SpanTermQuery(new Term(field, text));
+        }
+
+        private void CheckHits(Query query, int[] results)
+        {
+            Search.CheckHits.DoCheckHits(Random(), query, field, Searcher, results, Similarity);
+        }
+
+        private void OrderedSlopTest3SQ(SpanQuery q1, SpanQuery q2, SpanQuery q3, int slop, int[] expectedDocs)
+        {
+            bool ordered = true;
+            SpanNearQuery snq = new SpanNearQuery(new SpanQuery[] { q1, q2, q3 }, slop, ordered);
+            CheckHits(snq, expectedDocs);
+        }
+
+        public virtual void OrderedSlopTest3(int slop, int[] expectedDocs)
+        {
+            OrderedSlopTest3SQ(MakeSpanTermQuery("w1"), MakeSpanTermQuery("w2"), MakeSpanTermQuery("w3"), slop, expectedDocs);
+        }
+
+        public virtual void OrderedSlopTest3Equal(int slop, int[] expectedDocs)
+        {
+            OrderedSlopTest3SQ(MakeSpanTermQuery("w1"), MakeSpanTermQuery("w3"), MakeSpanTermQuery("w3"), slop, expectedDocs);
+        }
+
+        public virtual void OrderedSlopTest1Equal(int slop, int[] expectedDocs)
+        {
+            OrderedSlopTest3SQ(MakeSpanTermQuery("u2"), MakeSpanTermQuery("u2"), MakeSpanTermQuery("u1"), slop, expectedDocs);
+        }
+
+        [Test]
+        public virtual void TestSpanNearOrdered01()
+        {
+            OrderedSlopTest3(0, new int[] { 0 });
+        }
+
+        [Test]
+        public virtual void TestSpanNearOrdered02()
+        {
+            OrderedSlopTest3(1, new int[] { 0, 1 });
+        }
+
+        [Test]
+        public virtual void TestSpanNearOrdered03()
+        {
+            OrderedSlopTest3(2, new int[] { 0, 1, 2 });
+        }
+
+        [Test]
+        public virtual void TestSpanNearOrdered04()
+        {
+            OrderedSlopTest3(3, new int[] { 0, 1, 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestSpanNearOrdered05()
+        {
+            OrderedSlopTest3(4, new int[] { 0, 1, 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestSpanNearOrderedEqual01()
+        {
+            OrderedSlopTest3Equal(0, new int[] { });
+        }
+
+        [Test]
+        public virtual void TestSpanNearOrderedEqual02()
+        {
+            OrderedSlopTest3Equal(1, new int[] { 1 });
+        }
+
+        [Test]
+        public virtual void TestSpanNearOrderedEqual03()
+        {
+            OrderedSlopTest3Equal(2, new int[] { 1 });
+        }
+
+        [Test]
+        public virtual void TestSpanNearOrderedEqual04()
+        {
+            OrderedSlopTest3Equal(3, new int[] { 1, 3 });
+        }
+
+        [Test]
+        public virtual void TestSpanNearOrderedEqual11()
+        {
+            OrderedSlopTest1Equal(0, new int[] { 4 });
+        }
+
+        [Test]
+        public virtual void TestSpanNearOrderedEqual12()
+        {
+            OrderedSlopTest1Equal(0, new int[] { 4 });
+        }
+
+        [Test]
+        public virtual void TestSpanNearOrderedEqual13()
+        {
+            OrderedSlopTest1Equal(1, new int[] { 4, 5, 6 });
+        }
+
+        [Test]
+        public virtual void TestSpanNearOrderedEqual14()
+        {
+            OrderedSlopTest1Equal(2, new int[] { 4, 5, 6, 7 });
+        }
+
+        [Test]
+        public virtual void TestSpanNearOrderedEqual15()
+        {
+            OrderedSlopTest1Equal(3, new int[] { 4, 5, 6, 7 });
+        }
+
+        [Test]
+        public virtual void TestSpanNearOrderedOverlap()
+        {
+            bool ordered = true;
+            int slop = 1;
+            SpanNearQuery snq = new SpanNearQuery(new SpanQuery[] { MakeSpanTermQuery("t1"), MakeSpanTermQuery("t2"), MakeSpanTermQuery("t3") }, slop, ordered);
+            Spans spans = MultiSpansWrapper.Wrap(Searcher.TopReaderContext, snq);
+
+            Assert.IsTrue(spans.Next(), "first range");
+            Assert.AreEqual(11, spans.Doc, "first doc");
+            Assert.AreEqual(0, spans.Start, "first start");
+            Assert.AreEqual(4, spans.End, "first end");
+
+            Assert.IsTrue(spans.Next(), "second range");
+            Assert.AreEqual(11, spans.Doc, "second doc");
+            Assert.AreEqual(2, spans.Start, "second start");
+            Assert.AreEqual(6, spans.End, "second end");
+
+            Assert.IsFalse(spans.Next(), "third range");
+        }
+
+        [Test]
+        public virtual void TestSpanNearUnOrdered()
+        {
+            //See http://www.gossamer-threads.com/lists/lucene/java-dev/52270 for discussion about this test
+            SpanNearQuery snq;
+            snq = new SpanNearQuery(new SpanQuery[] { MakeSpanTermQuery("u1"), MakeSpanTermQuery("u2") }, 0, false);
+            Spans spans = MultiSpansWrapper.Wrap(Searcher.TopReaderContext, snq);
+            Assert.IsTrue(spans.Next(), "Does not have next and it should");
+            Assert.AreEqual(4, spans.Doc, "doc");
+            Assert.AreEqual(1, spans.Start, "start");
+            Assert.AreEqual(3, spans.End, "end");
+
+            Assert.IsTrue(spans.Next(), "Does not have next and it should");
+            Assert.AreEqual(5, spans.Doc, "doc");
+            Assert.AreEqual(2, spans.Start, "start");
+            Assert.AreEqual(4, spans.End, "end");
+
+            Assert.IsTrue(spans.Next(), "Does not have next and it should");
+            Assert.AreEqual(8, spans.Doc, "doc");
+            Assert.AreEqual(2, spans.Start, "start");
+            Assert.AreEqual(4, spans.End, "end");
+
+            Assert.IsTrue(spans.Next(), "Does not have next and it should");
+            Assert.AreEqual(9, spans.Doc, "doc");
+            Assert.AreEqual(0, spans.Start, "start");
+            Assert.AreEqual(2, spans.End, "end");
+
+            Assert.IsTrue(spans.Next(), "Does not have next and it should");
+            Assert.AreEqual(10, spans.Doc, "doc");
+            Assert.AreEqual(0, spans.Start, "start");
+            Assert.AreEqual(2, spans.End, "end");
+            Assert.IsTrue(spans.Next() == false, "Has next and it shouldn't: " + spans.Doc);
+
+            SpanNearQuery u1u2 = new SpanNearQuery(new SpanQuery[] { MakeSpanTermQuery("u1"), MakeSpanTermQuery("u2") }, 0, false);
+            snq = new SpanNearQuery(new SpanQuery[] { u1u2, MakeSpanTermQuery("u2") }, 1, false);
+            spans = MultiSpansWrapper.Wrap(Searcher.TopReaderContext, snq);
+            Assert.IsTrue(spans.Next(), "Does not have next and it should");
+            Assert.AreEqual(4, spans.Doc, "doc");
+            Assert.AreEqual(0, spans.Start, "start");
+            Assert.AreEqual(3, spans.End, "end");
+
+            Assert.IsTrue(spans.Next(), "Does not have next and it should");
+            //unordered spans can be subsets
+            Assert.AreEqual(4, spans.Doc, "doc");
+            Assert.AreEqual(1, spans.Start, "start");
+            Assert.AreEqual(3, spans.End, "end");
+
+            Assert.IsTrue(spans.Next(), "Does not have next and it should");
+            Assert.AreEqual(5, spans.Doc, "doc");
+            Assert.AreEqual(0, spans.Start, "start");
+            Assert.AreEqual(4, spans.End, "end");
+
+            Assert.IsTrue(spans.Next(), "Does not have next and it should");
+            Assert.AreEqual(5, spans.Doc, "doc");
+            Assert.AreEqual(2, spans.Start, "start");
+            Assert.AreEqual(4, spans.End, "end");
+
+            Assert.IsTrue(spans.Next(), "Does not have next and it should");
+            Assert.AreEqual(8, spans.Doc, "doc");
+            Assert.AreEqual(0, spans.Start, "start");
+            Assert.AreEqual(4, spans.End, "end");
+
+            Assert.IsTrue(spans.Next(), "Does not have next and it should");
+            Assert.AreEqual(8, spans.Doc, "doc");
+            Assert.AreEqual(2, spans.Start, "start");
+            Assert.AreEqual(4, spans.End, "end");
+
+            Assert.IsTrue(spans.Next(), "Does not have next and it should");
+            Assert.AreEqual(9, spans.Doc, "doc");
+            Assert.AreEqual(0, spans.Start, "start");
+            Assert.AreEqual(2, spans.End, "end");
+
+            Assert.IsTrue(spans.Next(), "Does not have next and it should");
+            Assert.AreEqual(9, spans.Doc, "doc");
+            Assert.AreEqual(0, spans.Start, "start");
+            Assert.AreEqual(4, spans.End, "end");
+
+            Assert.IsTrue(spans.Next(), "Does not have next and it should");
+            Assert.AreEqual(10, spans.Doc, "doc");
+            Assert.AreEqual(0, spans.Start, "start");
+            Assert.AreEqual(2, spans.End, "end");
+
+            Assert.IsTrue(spans.Next() == false, "Has next and it shouldn't");
+        }
+
+        private Spans OrSpans(string[] terms)
+        {
+            SpanQuery[] sqa = new SpanQuery[terms.Length];
+            for (int i = 0; i < terms.Length; i++)
+            {
+                sqa[i] = MakeSpanTermQuery(terms[i]);
+            }
+            return MultiSpansWrapper.Wrap(Searcher.TopReaderContext, new SpanOrQuery(sqa));
+        }
+
+        private void TstNextSpans(Spans spans, int doc, int start, int end)
+        {
+            Assert.IsTrue(spans.Next(), "next");
+            Assert.AreEqual(doc, spans.Doc, "doc");
+            Assert.AreEqual(start, spans.Start, "start");
+            Assert.AreEqual(end, spans.End, "end");
+        }
+
+        [Test]
+        public virtual void TestSpanOrEmpty()
+        {
+            Spans spans = OrSpans(new string[0]);
+            Assert.IsFalse(spans.Next(), "empty next");
+
+            SpanOrQuery a = new SpanOrQuery();
+            SpanOrQuery b = new SpanOrQuery();
+            Assert.IsTrue(a.Equals(b), "empty should equal");
+        }
+
+        [Test]
+        public virtual void TestSpanOrSingle()
+        {
+            Spans spans = OrSpans(new string[] { "w5" });
+            TstNextSpans(spans, 0, 4, 5);
+            Assert.IsFalse(spans.Next(), "final next");
+        }
+
+        [Test]
+        public virtual void TestSpanOrMovesForward()
+        {
+            Spans spans = OrSpans(new string[] { "w1", "xx" });
+
+            spans.Next();
+            int doc = spans.Doc;
+            Assert.AreEqual(0, doc);
+
+            spans.SkipTo(0);
+            doc = spans.Doc;
+
+            // LUCENE-1583:
+            // according to Spans, a skipTo to the same doc or less
+            // should still call next() on the underlying Spans
+            Assert.AreEqual(1, doc);
+        }
+
+        [Test]
+        public virtual void TestSpanOrDouble()
+        {
+            Spans spans = OrSpans(new string[] { "w5", "yy" });
+            TstNextSpans(spans, 0, 4, 5);
+            TstNextSpans(spans, 2, 3, 4);
+            TstNextSpans(spans, 3, 4, 5);
+            TstNextSpans(spans, 7, 3, 4);
+            Assert.IsFalse(spans.Next(), "final next");
+        }
+
+        [Test]
+        public virtual void TestSpanOrDoubleSkip()
+        {
+            Spans spans = OrSpans(new string[] { "w5", "yy" });
+            Assert.IsTrue(spans.SkipTo(3), "initial skipTo");
+            Assert.AreEqual(3, spans.Doc, "doc");
+            Assert.AreEqual(4, spans.Start, "start");
+            Assert.AreEqual(5, spans.End, "end");
+            TstNextSpans(spans, 7, 3, 4);
+            Assert.IsFalse(spans.Next(), "final next");
+        }
+
+        [Test]
+        public virtual void TestSpanOrUnused()
+        {
+            Spans spans = OrSpans(new string[] { "w5", "unusedTerm", "yy" });
+            TstNextSpans(spans, 0, 4, 5);
+            TstNextSpans(spans, 2, 3, 4);
+            TstNextSpans(spans, 3, 4, 5);
+            TstNextSpans(spans, 7, 3, 4);
+            Assert.IsFalse(spans.Next(), "final next");
+        }
+
+        [Test]
+        public virtual void TestSpanOrTripleSameDoc()
+        {
+            Spans spans = OrSpans(new string[] { "t1", "t2", "t3" });
+            TstNextSpans(spans, 11, 0, 1);
+            TstNextSpans(spans, 11, 1, 2);
+            TstNextSpans(spans, 11, 2, 3);
+            TstNextSpans(spans, 11, 3, 4);
+            TstNextSpans(spans, 11, 4, 5);
+            TstNextSpans(spans, 11, 5, 6);
+            Assert.IsFalse(spans.Next(), "final next");
+        }
+
+        [Test]
+        public virtual void TestSpanScorerZeroSloppyFreq()
+        {
+            bool ordered = true;
+            int slop = 1;
+            IndexReaderContext topReaderContext = Searcher.TopReaderContext;
+            IList<AtomicReaderContext> leaves = topReaderContext.Leaves;
+            int subIndex = ReaderUtil.SubIndex(11, leaves);
+            for (int i = 0, c = leaves.Count; i < c; i++)
+            {
+                AtomicReaderContext ctx = leaves[i];
+
+                Similarity sim = new DefaultSimilarityAnonymousInnerClassHelper(this);
+
+                Similarity oldSim = Searcher.Similarity;
+                Scorer spanScorer;
+                try
+                {
+                    Searcher.Similarity = sim;
+                    SpanNearQuery snq = new SpanNearQuery(new SpanQuery[] { MakeSpanTermQuery("t1"), MakeSpanTermQuery("t2") }, slop, ordered);
+
+                    spanScorer = Searcher.CreateNormalizedWeight(snq).GetScorer(ctx, ((AtomicReader)ctx.Reader).LiveDocs);
+                }
+                finally
+                {
+                    Searcher.Similarity = oldSim;
+                }
+                if (i == subIndex)
+                {
+                    Assert.IsTrue(spanScorer.NextDoc() != DocIdSetIterator.NO_MORE_DOCS, "first doc");
+                    Assert.AreEqual(spanScorer.DocID + ctx.DocBase, 11, "first doc number");
+                    float score = spanScorer.GetScore();
+                    Assert.IsTrue(score == 0.0f, "first doc score should be zero, " + score);
+                }
+                else
+                {
+                    Assert.IsTrue(spanScorer.NextDoc() == DocIdSetIterator.NO_MORE_DOCS, "no second doc");
+                }
+            }
+        }
+
+        private class DefaultSimilarityAnonymousInnerClassHelper : DefaultSimilarity
+        {
+            private readonly TestSpans OuterInstance;
+
+            public DefaultSimilarityAnonymousInnerClassHelper(TestSpans outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            public override float SloppyFreq(int distance)
+            {
+                return 0.0f;
+            }
+        }
+
+        // LUCENE-1404
+        private void AddDoc(IndexWriter writer, string id, string text)
+        {
+            Document doc = new Document();
+            doc.Add(NewStringField("id", id, Field.Store.YES));
+            doc.Add(NewTextField("text", text, Field.Store.YES));
+            writer.AddDocument(doc);
+        }
+
+        // LUCENE-1404
+        private int HitCount(IndexSearcher searcher, string word)
+        {
+            return searcher.Search(new TermQuery(new Term("text", word)), 10).TotalHits;
+        }
+
+        // LUCENE-1404
+        private SpanQuery CreateSpan(string value)
+        {
+            return new SpanTermQuery(new Term("text", value));
+        }
+
+        // LUCENE-1404
+        private SpanQuery CreateSpan(int slop, bool ordered, SpanQuery[] clauses)
+        {
+            return new SpanNearQuery(clauses, slop, ordered);
+        }
+
+        // LUCENE-1404
+        private SpanQuery CreateSpan(int slop, bool ordered, string term1, string term2)
+        {
+            return CreateSpan(slop, ordered, new SpanQuery[] { CreateSpan(term1), CreateSpan(term2) });
+        }
+
+        // LUCENE-1404
+        [Test]
+        public virtual void TestNPESpanQuery()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+
+            // Add documents
+            AddDoc(writer, "1", "the big dogs went running to the market");
+            AddDoc(writer, "2", "the cat chased the mouse, then the cat ate the mouse quickly");
+
+            // Commit
+            writer.Dispose();
+
+            // Get searcher
+            IndexReader reader = DirectoryReader.Open(dir);
+            IndexSearcher searcher = NewSearcher(reader);
+
+            // Control (make sure docs indexed)
+            Assert.AreEqual(2, HitCount(searcher, "the"));
+            Assert.AreEqual(1, HitCount(searcher, "cat"));
+            Assert.AreEqual(1, HitCount(searcher, "dogs"));
+            Assert.AreEqual(0, HitCount(searcher, "rabbit"));
+
+            // this throws exception (it shouldn't)
+            Assert.AreEqual(1, searcher.Search(CreateSpan(0, true, new SpanQuery[] { CreateSpan(4, false, "chased", "cat"), CreateSpan("ate") }), 10).TotalHits);
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestSpanNots()
+        {
+            Assert.AreEqual(0, SpanCount("s2", "s2", 0, 0), 0, "SpanNotIncludeExcludeSame1");
+            Assert.AreEqual(0, SpanCount("s2", "s2", 10, 10), 0, "SpanNotIncludeExcludeSame2");
+
+            //focus on behind
+            Assert.AreEqual(1, SpanCount("s2", "s1", 6, 0), "SpanNotS2NotS1_6_0");
+            Assert.AreEqual(2, SpanCount("s2", "s1", 5, 0), "SpanNotS2NotS1_5_0");
+            Assert.AreEqual(3, SpanCount("s2", "s1", 3, 0), "SpanNotS2NotS1_3_0");
+            Assert.AreEqual(4, SpanCount("s2", "s1", 2, 0), "SpanNotS2NotS1_2_0");
+            Assert.AreEqual(4, SpanCount("s2", "s1", 0, 0), "SpanNotS2NotS1_0_0");
+
+            //focus on both
+            Assert.AreEqual(2, SpanCount("s2", "s1", 3, 1), "SpanNotS2NotS1_3_1");
+            Assert.AreEqual(3, SpanCount("s2", "s1", 2, 1), "SpanNotS2NotS1_2_1");
+            Assert.AreEqual(3, SpanCount("s2", "s1", 1, 1), "SpanNotS2NotS1_1_1");
+            Assert.AreEqual(0, SpanCount("s2", "s1", 10, 10), "SpanNotS2NotS1_10_10");
+
+            //focus on ahead
+            Assert.AreEqual(0, SpanCount("s1", "s2", 10, 10), "SpanNotS1NotS2_10_10");
+            Assert.AreEqual(3, SpanCount("s1", "s2", 0, 1), "SpanNotS1NotS2_0_1");
+            Assert.AreEqual(3, SpanCount("s1", "s2", 0, 2), "SpanNotS1NotS2_0_2");
+            Assert.AreEqual(2, SpanCount("s1", "s2", 0, 3), "SpanNotS1NotS2_0_3");
+            Assert.AreEqual(1, SpanCount("s1", "s2", 0, 4), "SpanNotS1NotS2_0_4");
+            Assert.AreEqual(0, SpanCount("s1", "s2", 0, 8), "SpanNotS1NotS2_0_8");
+
+            //exclude doesn't exist
+            Assert.AreEqual(3, SpanCount("s1", "s3", 8, 8), "SpanNotS1NotS3_8_8");
+
+            //include doesn't exist
+            Assert.AreEqual(0, SpanCount("s3", "s1", 8, 8), "SpanNotS3NotS1_8_8");
+        }
+
+        private int SpanCount(string include, string exclude, int pre, int post)
+        {
+            SpanTermQuery iq = new SpanTermQuery(new Term(field, include));
+            SpanTermQuery eq = new SpanTermQuery(new Term(field, exclude));
+            SpanNotQuery snq = new SpanNotQuery(iq, eq, pre, post);
+            Spans spans = MultiSpansWrapper.Wrap(Searcher.TopReaderContext, snq);
+
+            int i = 0;
+            while (spans.Next())
+            {
+                i++;
+            }
+            return i;
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/Spans/TestSpansAdvanced.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/Spans/TestSpansAdvanced.cs b/src/Lucene.Net.Tests/Search/Spans/TestSpansAdvanced.cs
new file mode 100644
index 0000000..fc32eaa
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/Spans/TestSpansAdvanced.cs
@@ -0,0 +1,181 @@
+using System;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search.Spans
+{
+    using Lucene.Net.Search;
+    using NUnit.Framework;
+    using DefaultSimilarity = Lucene.Net.Search.Similarities.DefaultSimilarity;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockTokenFilter = Lucene.Net.Analysis.MockTokenFilter;
+    using MockTokenizer = Lucene.Net.Analysis.MockTokenizer;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Term = Lucene.Net.Index.Term;
+
+    /// <summary>
+    ///*****************************************************************************
+    /// Tests the span query bug in Lucene. It demonstrates that SpanTermQuerys don't
+    /// work correctly in a BooleanQuery.
+    ///
+    /// </summary>
+    [TestFixture]
+    public class TestSpansAdvanced : LuceneTestCase
+    {
+        // location to the index
+        protected internal Directory MDirectory;
+
+        protected internal IndexReader Reader;
+        protected internal IndexSearcher Searcher;
+
+        // field names in the index
+        private const string FIELD_ID = "ID";
+
+        protected internal const string FIELD_TEXT = "TEXT";
+
+        /// <summary>
+        /// Initializes the tests by adding 4 identical documents to the index.
+        /// </summary>
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            // create test index
+            MDirectory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), MDirectory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET)).SetMergePolicy(NewLogMergePolicy()).SetSimilarity(new DefaultSimilarity()));
+            AddDocument(writer, "1", "I think it should work.");
+            AddDocument(writer, "2", "I think it should work.");
+            AddDocument(writer, "3", "I think it should work.");
+            AddDocument(writer, "4", "I think it should work.");
+            Reader = writer.Reader;
+            writer.Dispose();
+            Searcher = NewSearcher(Reader);
+            Searcher.Similarity = new DefaultSimilarity();
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            if (Reader != null)
+            {
+                Reader.Dispose();
+            }
+
+            if (MDirectory != null)
+            {
+                MDirectory.Dispose();
+                MDirectory = null;
+            }
+            base.TearDown();
+        }
+
+        /// <summary>
+        /// Adds the document to the index.
+        /// </summary>
+        /// <param name="writer"> the Lucene index writer </param>
+        /// <param name="id"> the unique id of the document </param>
+        /// <param name="text"> the text of the document </param>
+        protected internal virtual void AddDocument(RandomIndexWriter writer, string id, string text)
+        {
+            Document document = new Document();
+            document.Add(NewStringField(FIELD_ID, id, Field.Store.YES));
+            document.Add(NewTextField(FIELD_TEXT, text, Field.Store.YES));
+            writer.AddDocument(document);
+        }
+
+        /// <summary>
+        /// Tests two span queries.
+        /// </summary>
+        [Test]
+        public virtual void TestBooleanQueryWithSpanQueries()
+        {
+            DoTestBooleanQueryWithSpanQueries(Searcher, 0.3884282f);
+        }
+
+        /// <summary>
+        /// Tests two span queries.
+        /// </summary>
+        protected internal virtual void DoTestBooleanQueryWithSpanQueries(IndexSearcher s, float expectedScore)
+        {
+            Query spanQuery = new SpanTermQuery(new Term(FIELD_TEXT, "work"));
+            BooleanQuery query = new BooleanQuery();
+            query.Add(spanQuery, Occur.MUST);
+            query.Add(spanQuery, Occur.MUST);
+            string[] expectedIds = new string[] { "1", "2", "3", "4" };
+            float[] expectedScores = new float[] { expectedScore, expectedScore, expectedScore, expectedScore };
+            AssertHits(s, query, "two span queries", expectedIds, expectedScores);
+        }
+
+        /// <summary>
+        /// Checks to see if the hits are what we expected.
+        /// 
+        /// LUCENENET specific
+        /// Is non-static because it depends on the non-static variable, <see cref="LuceneTestCase.Similarity"/>
+        /// </summary>
+        /// <param name="query"> the query to execute </param>
+        /// <param name="description"> the description of the search </param>
+        /// <param name="expectedIds"> the expected document ids of the hits </param>
+        /// <param name="expectedScores"> the expected scores of the hits </param>
+        protected internal void AssertHits(IndexSearcher s, Query query, string description, string[] expectedIds, float[] expectedScores)
+        {
+            QueryUtils.Check(Random(), query, s, Similarity);
+
+            const float tolerance = 1e-5f;
+
+            // Hits hits = searcher.Search(query);
+            // hits normalizes and throws things off if one score is greater than 1.0
+            TopDocs topdocs = s.Search(query, null, 10000);
+
+            /*
+            /// // display the hits System.out.println(hits.Length() +
+            /// " hits for search: \"" + description + '\"'); for (int i = 0; i <
+            /// hits.Length(); i++) { System.out.println("  " + FIELD_ID + ':' +
+            /// hits.Doc(i).Get(FIELD_ID) + " (score:" + hits.Score(i) + ')'); }
+            /// ****
+            */
+
+            // did we get the hits we expected
+            Assert.AreEqual(expectedIds.Length, topdocs.TotalHits);
+            for (int i = 0; i < topdocs.TotalHits; i++)
+            {
+                // System.out.println(i + " exp: " + expectedIds[i]);
+                // System.out.println(i + " field: " + hits.Doc(i).Get(FIELD_ID));
+
+                int id = topdocs.ScoreDocs[i].Doc;
+                float score = topdocs.ScoreDocs[i].Score;
+                Document doc = s.Doc(id);
+                Assert.AreEqual(expectedIds[i], doc.Get(FIELD_ID));
+                bool scoreEq = Math.Abs(expectedScores[i] - score) < tolerance;
+                if (!scoreEq)
+                {
+                    Console.WriteLine(i + " warning, expected score: " + expectedScores[i] + ", actual " + score);
+                    Console.WriteLine(s.Explain(query, id));
+                }
+                Assert.AreEqual(expectedScores[i], score, tolerance);
+                Assert.AreEqual(s.Explain(query, id).Value, score, tolerance);
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/Spans/TestSpansAdvanced2.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/Spans/TestSpansAdvanced2.cs b/src/Lucene.Net.Tests/Search/Spans/TestSpansAdvanced2.cs
new file mode 100644
index 0000000..6565720
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/Spans/TestSpansAdvanced2.cs
@@ -0,0 +1,124 @@
+namespace Lucene.Net.Search.Spans
+{
+    using Lucene.Net.Search;
+    using NUnit.Framework;
+    using DefaultSimilarity = Lucene.Net.Search.Similarities.DefaultSimilarity;
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockTokenFilter = Lucene.Net.Analysis.MockTokenFilter;
+    using MockTokenizer = Lucene.Net.Analysis.MockTokenizer;
+    using OpenMode = Lucene.Net.Index.OpenMode;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Term = Lucene.Net.Index.Term;
+
+    /// <summary>
+    ///*****************************************************************************
+    /// Some expanded tests to make sure my patch doesn't break other SpanTermQuery
+    /// functionality.
+    ///
+    /// </summary>
+    [TestFixture]
+    public class TestSpansAdvanced2 : TestSpansAdvanced
+    {
+        internal IndexSearcher Searcher2;
+        internal IndexReader Reader2;
+
+        /// <summary>
+        /// Initializes the tests by adding documents to the index.
+        /// </summary>
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+
+            // create test index
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), MDirectory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET)).SetOpenMode(OpenMode.APPEND).SetMergePolicy(NewLogMergePolicy()).SetSimilarity(new DefaultSimilarity()));
+            AddDocument(writer, "A", "Should we, could we, would we?");
+            AddDocument(writer, "B", "It should.  Should it?");
+            AddDocument(writer, "C", "It shouldn't.");
+            AddDocument(writer, "D", "Should we, should we, should we.");
+            Reader2 = writer.Reader;
+            writer.Dispose();
+
+            // re-open the searcher since we added more docs
+            Searcher2 = NewSearcher(Reader2);
+            Searcher2.Similarity = new DefaultSimilarity();
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            Reader2.Dispose();
+            base.TearDown();
+        }
+
+        /// <summary>
+        /// Verifies that the index has the correct number of documents.
+        /// </summary>
+        [Test]
+        public virtual void TestVerifyIndex()
+        {
+            IndexReader reader = DirectoryReader.Open(MDirectory);
+            Assert.AreEqual(8, reader.NumDocs);
+            reader.Dispose();
+        }
+
+        /// <summary>
+        /// Tests a single span query that matches multiple documents.
+        /// </summary>
+        [Test]
+        public virtual void TestSingleSpanQuery()
+        {
+            Query spanQuery = new SpanTermQuery(new Term(FIELD_TEXT, "should"));
+            string[] expectedIds = new string[] { "B", "D", "1", "2", "3", "4", "A" };
+            float[] expectedScores = new float[] { 0.625f, 0.45927936f, 0.35355338f, 0.35355338f, 0.35355338f, 0.35355338f, 0.26516503f };
+            AssertHits(Searcher2, spanQuery, "single span query", expectedIds, expectedScores);
+        }
+
+        /// <summary>
+        /// Tests a single span query that matches multiple documents.
+        /// </summary>
+        [Test]
+        public virtual void TestMultipleDifferentSpanQueries()
+        {
+            Query spanQuery1 = new SpanTermQuery(new Term(FIELD_TEXT, "should"));
+            Query spanQuery2 = new SpanTermQuery(new Term(FIELD_TEXT, "we"));
+            BooleanQuery query = new BooleanQuery();
+            query.Add(spanQuery1, Occur.MUST);
+            query.Add(spanQuery2, Occur.MUST);
+            string[] expectedIds = new string[] { "D", "A" };
+            // these values were pre LUCENE-413
+            // final float[] expectedScores = new float[] { 0.93163157f, 0.20698164f };
+            float[] expectedScores = new float[] { 1.0191123f, 0.93163157f };
+            AssertHits(Searcher2, query, "multiple different span queries", expectedIds, expectedScores);
+        }
+
+        /// <summary>
+        /// Tests two span queries.
+        /// </summary>
+        [Test]
+        public override void TestBooleanQueryWithSpanQueries()
+        {
+            DoTestBooleanQueryWithSpanQueries(Searcher2, 0.73500174f);
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestAutomatonQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestAutomatonQuery.cs b/src/Lucene.Net.Tests/Search/TestAutomatonQuery.cs
new file mode 100644
index 0000000..0eeb5f9
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestAutomatonQuery.cs
@@ -0,0 +1,278 @@
+using Lucene.Net.Documents;
+using Lucene.Net.Support;
+using System;
+using System.Threading;
+
+namespace Lucene.Net.Search
+{
+    using Attributes;
+    using NUnit.Framework;
+    using Automaton = Lucene.Net.Util.Automaton.Automaton;
+    using AutomatonTestUtil = Lucene.Net.Util.Automaton.AutomatonTestUtil;
+    using BasicAutomata = Lucene.Net.Util.Automaton.BasicAutomata;
+    using BasicOperations = Lucene.Net.Util.Automaton.BasicOperations;
+    using Directory = Lucene.Net.Store.Directory;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MultiFields = Lucene.Net.Index.MultiFields;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using SingleTermsEnum = Lucene.Net.Index.SingleTermsEnum;
+    using Term = Lucene.Net.Index.Term;
+    using Terms = Lucene.Net.Index.Terms;
+    using TermsEnum = Lucene.Net.Index.TermsEnum;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    [TestFixture]
+    public class TestAutomatonQuery : LuceneTestCase
+    {
+        private Directory Directory;
+        private IndexReader Reader;
+        private IndexSearcher Searcher;
+
+        private readonly string FN = "field";
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), Directory, Similarity, TimeZone);
+            Document doc = new Document();
+            Field titleField = NewTextField("title", "some title", Field.Store.NO);
+            Field field = NewTextField(FN, "this is document one 2345", Field.Store.NO);
+            Field footerField = NewTextField("footer", "a footer", Field.Store.NO);
+            doc.Add(titleField);
+            doc.Add(field);
+            doc.Add(footerField);
+            writer.AddDocument(doc);
+            field.SetStringValue("some text from doc two a short piece 5678.91");
+            writer.AddDocument(doc);
+            field.SetStringValue("doc three has some different stuff" + " with numbers 1234 5678.9 and letter b");
+            writer.AddDocument(doc);
+            Reader = writer.Reader;
+            Searcher = NewSearcher(Reader);
+            writer.Dispose();
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            Reader.Dispose();
+            Directory.Dispose();
+            base.TearDown();
+        }
+
+        private Term NewTerm(string value)
+        {
+            return new Term(FN, value);
+        }
+
+        private int AutomatonQueryNrHits(AutomatonQuery query)
+        {
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: run aq=" + query);
+            }
+            return Searcher.Search(query, 5).TotalHits;
+        }
+
+        private void AssertAutomatonHits(int expected, Automaton automaton)
+        {
+            AutomatonQuery query = new AutomatonQuery(NewTerm("bogus"), automaton);
+
+            query.MultiTermRewriteMethod = MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE;
+            Assert.AreEqual(expected, AutomatonQueryNrHits(query));
+
+            query.MultiTermRewriteMethod = MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE;
+            Assert.AreEqual(expected, AutomatonQueryNrHits(query));
+
+            query.MultiTermRewriteMethod = MultiTermQuery.CONSTANT_SCORE_BOOLEAN_QUERY_REWRITE;
+            Assert.AreEqual(expected, AutomatonQueryNrHits(query));
+
+            query.MultiTermRewriteMethod = MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT;
+            Assert.AreEqual(expected, AutomatonQueryNrHits(query));
+        }
+
+        /// <summary>
+        /// Test some very simple automata.
+        /// </summary>
+        [Test]
+        public virtual void TestBasicAutomata()
+        {
+            AssertAutomatonHits(0, BasicAutomata.MakeEmpty());
+            AssertAutomatonHits(0, BasicAutomata.MakeEmptyString());
+            AssertAutomatonHits(2, BasicAutomata.MakeAnyChar());
+            AssertAutomatonHits(3, BasicAutomata.MakeAnyString());
+            AssertAutomatonHits(2, BasicAutomata.MakeString("doc"));
+            AssertAutomatonHits(1, BasicAutomata.MakeChar('a'));
+            AssertAutomatonHits(2, BasicAutomata.MakeCharRange('a', 'b'));
+            AssertAutomatonHits(2, BasicAutomata.MakeInterval(1233, 2346, 0));
+            AssertAutomatonHits(1, BasicAutomata.MakeInterval(0, 2000, 0));
+            AssertAutomatonHits(2, BasicOperations.Union(BasicAutomata.MakeChar('a'), BasicAutomata.MakeChar('b')));
+            AssertAutomatonHits(0, BasicOperations.Intersection(BasicAutomata.MakeChar('a'), BasicAutomata.MakeChar('b')));
+            AssertAutomatonHits(1, BasicOperations.Minus(BasicAutomata.MakeCharRange('a', 'b'), BasicAutomata.MakeChar('a')));
+        }
+
+        /// <summary>
+        /// Test that a nondeterministic automaton works correctly. (It should will be
+        /// determinized)
+        /// </summary>
+        [Test]
+        public virtual void TestNFA()
+        {
+            // accept this or three, the union is an NFA (two transitions for 't' from
+            // initial state)
+            Automaton nfa = BasicOperations.Union(BasicAutomata.MakeString("this"), BasicAutomata.MakeString("three"));
+            AssertAutomatonHits(2, nfa);
+        }
+
+        [Test]
+        public virtual void TestEquals()
+        {
+            AutomatonQuery a1 = new AutomatonQuery(NewTerm("foobar"), BasicAutomata.MakeString("foobar"));
+            // reference to a1
+            AutomatonQuery a2 = a1;
+            // same as a1 (accepts the same language, same term)
+            AutomatonQuery a3 = new AutomatonQuery(NewTerm("foobar"), BasicOperations.Concatenate(BasicAutomata.MakeString("foo"), BasicAutomata.MakeString("bar")));
+            // different than a1 (same term, but different language)
+            AutomatonQuery a4 = new AutomatonQuery(NewTerm("foobar"), BasicAutomata.MakeString("different"));
+            // different than a1 (different term, same language)
+            AutomatonQuery a5 = new AutomatonQuery(NewTerm("blah"), BasicAutomata.MakeString("foobar"));
+
+            Assert.AreEqual(a1.GetHashCode(), a2.GetHashCode());
+            Assert.AreEqual(a1, a2);
+
+            Assert.AreEqual(a1.GetHashCode(), a3.GetHashCode());
+            Assert.AreEqual(a1, a3);
+
+            // different class
+            AutomatonQuery w1 = new WildcardQuery(NewTerm("foobar"));
+            // different class
+            AutomatonQuery w2 = new RegexpQuery(NewTerm("foobar"));
+
+            Assert.IsFalse(a1.Equals(w1));
+            Assert.IsFalse(a1.Equals(w2));
+            Assert.IsFalse(w1.Equals(w2));
+            Assert.IsFalse(a1.Equals(a4));
+            Assert.IsFalse(a1.Equals(a5));
+            Assert.IsFalse(a1.Equals(null));
+        }
+
+        /// <summary>
+        /// Test that rewriting to a single term works as expected, preserves
+        /// MultiTermQuery semantics.
+        /// </summary>
+        [Test]
+        public virtual void TestRewriteSingleTerm()
+        {
+            AutomatonQuery aq = new AutomatonQuery(NewTerm("bogus"), BasicAutomata.MakeString("piece"));
+            Terms terms = MultiFields.GetTerms(Searcher.IndexReader, FN);
+            Assert.IsTrue(aq.GetTermsEnum(terms) is SingleTermsEnum);
+            Assert.AreEqual(1, AutomatonQueryNrHits(aq));
+        }
+
+        /// <summary>
+        /// Test that rewriting to a prefix query works as expected, preserves
+        /// MultiTermQuery semantics.
+        /// </summary>
+        [Test]
+        public virtual void TestRewritePrefix()
+        {
+            Automaton pfx = BasicAutomata.MakeString("do");
+            pfx.ExpandSingleton(); // expand singleton representation for testing
+            Automaton prefixAutomaton = BasicOperations.Concatenate(pfx, BasicAutomata.MakeAnyString());
+            AutomatonQuery aq = new AutomatonQuery(NewTerm("bogus"), prefixAutomaton);
+            Terms terms = MultiFields.GetTerms(Searcher.IndexReader, FN);
+
+            var en = aq.GetTermsEnum(terms);
+            Assert.IsTrue(en is PrefixTermsEnum, "Expected type PrefixTermEnum but was {0}", en.GetType().Name);
+            Assert.AreEqual(3, AutomatonQueryNrHits(aq));
+        }
+
+        /// <summary>
+        /// Test handling of the empty language
+        /// </summary>
+        [Test]
+        public virtual void TestEmptyOptimization()
+        {
+            AutomatonQuery aq = new AutomatonQuery(NewTerm("bogus"), BasicAutomata.MakeEmpty());
+            // not yet available: Assert.IsTrue(aq.getEnum(searcher.getIndexReader())
+            // instanceof EmptyTermEnum);
+            Terms terms = MultiFields.GetTerms(Searcher.IndexReader, FN);
+            Assert.AreSame(TermsEnum.EMPTY, aq.GetTermsEnum(terms));
+            Assert.AreEqual(0, AutomatonQueryNrHits(aq));
+        }
+
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(40000)]
+#endif
+        [Test, LongRunningTest, HasTimeout]
+        public virtual void TestHashCodeWithThreads()
+        {
+            AutomatonQuery[] queries = new AutomatonQuery[1000];
+            for (int i = 0; i < queries.Length; i++)
+            {
+                queries[i] = new AutomatonQuery(new Term("bogus", "bogus"), AutomatonTestUtil.RandomAutomaton(Random()));
+            }
+            CountdownEvent startingGun = new CountdownEvent(1);
+            int numThreads = TestUtil.NextInt(Random(), 2, 5);
+            ThreadClass[] threads = new ThreadClass[numThreads];
+            for (int threadID = 0; threadID < numThreads; threadID++)
+            {
+                ThreadClass thread = new ThreadAnonymousInnerClassHelper(this, queries, startingGun);
+                threads[threadID] = thread;
+                thread.Start();
+            }
+            startingGun.Signal();
+            foreach (ThreadClass thread in threads)
+            {
+                thread.Join();
+            }
+        }
+
+        private class ThreadAnonymousInnerClassHelper : ThreadClass
+        {
+            private readonly TestAutomatonQuery OuterInstance;
+
+            private AutomatonQuery[] Queries;
+            private CountdownEvent StartingGun;
+
+            public ThreadAnonymousInnerClassHelper(TestAutomatonQuery outerInstance, AutomatonQuery[] queries, CountdownEvent startingGun)
+            {
+                this.OuterInstance = outerInstance;
+                this.Queries = queries;
+                this.StartingGun = startingGun;
+            }
+
+            public override void Run()
+            {
+                StartingGun.Wait();
+                for (int i = 0; i < Queries.Length; i++)
+                {
+                    Queries[i].GetHashCode();
+                }
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestAutomatonQueryUnicode.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestAutomatonQueryUnicode.cs b/src/Lucene.Net.Tests/Search/TestAutomatonQueryUnicode.cs
new file mode 100644
index 0000000..207f243
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestAutomatonQueryUnicode.cs
@@ -0,0 +1,139 @@
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using NUnit.Framework;
+    using Automaton = Lucene.Net.Util.Automaton.Automaton;
+    using Directory = Lucene.Net.Store.Directory;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using RegExp = Lucene.Net.Util.Automaton.RegExp;
+    using Term = Lucene.Net.Index.Term;
+
+    /// <summary>
+    /// Test the automaton query for several unicode corner cases,
+    /// specifically enumerating strings/indexes containing supplementary characters,
+    /// and the differences between UTF-8/UTF-32 and UTF-16 binary sort order.
+    /// </summary>
+    [TestFixture]
+    public class TestAutomatonQueryUnicode : LuceneTestCase
+    {
+        private IndexReader Reader;
+        private IndexSearcher Searcher;
+        private Directory Directory;
+
+        private readonly string FN = "field";
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), Directory, Similarity, TimeZone);
+            Document doc = new Document();
+            Field titleField = NewTextField("title", "some title", Field.Store.NO);
+            Field field = NewTextField(FN, "", Field.Store.NO);
+            Field footerField = NewTextField("footer", "a footer", Field.Store.NO);
+            doc.Add(titleField);
+            doc.Add(field);
+            doc.Add(footerField);
+            field.SetStringValue("\uD866\uDF05abcdef");
+            writer.AddDocument(doc);
+            field.SetStringValue("\uD866\uDF06ghijkl");
+            writer.AddDocument(doc);
+            // this sorts before the previous two in UTF-8/UTF-32, but after in UTF-16!!!
+            field.SetStringValue("\uFB94mnopqr");
+            writer.AddDocument(doc);
+            field.SetStringValue("\uFB95stuvwx"); // this one too.
+            writer.AddDocument(doc);
+            field.SetStringValue("a\uFFFCbc");
+            writer.AddDocument(doc);
+            field.SetStringValue("a\uFFFDbc");
+            writer.AddDocument(doc);
+            field.SetStringValue("a\uFFFEbc");
+            writer.AddDocument(doc);
+            field.SetStringValue("a\uFB94bc");
+            writer.AddDocument(doc);
+            field.SetStringValue("bacadaba");
+            writer.AddDocument(doc);
+            field.SetStringValue("\uFFFD");
+            writer.AddDocument(doc);
+            field.SetStringValue("\uFFFD\uD866\uDF05");
+            writer.AddDocument(doc);
+            field.SetStringValue("\uFFFD\uFFFD");
+            writer.AddDocument(doc);
+            Reader = writer.Reader;
+            Searcher = NewSearcher(Reader);
+            writer.Dispose();
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            Reader.Dispose();
+            Directory.Dispose();
+            base.TearDown();
+        }
+
+        private Term NewTerm(string value)
+        {
+            return new Term(FN, value);
+        }
+
+        private int AutomatonQueryNrHits(AutomatonQuery query)
+        {
+            return Searcher.Search(query, 5).TotalHits;
+        }
+
+        private void AssertAutomatonHits(int expected, Automaton automaton)
+        {
+            AutomatonQuery query = new AutomatonQuery(NewTerm("bogus"), automaton);
+
+            query.MultiTermRewriteMethod = (MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
+            Assert.AreEqual(expected, AutomatonQueryNrHits(query));
+
+            query.MultiTermRewriteMethod = (MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE);
+            Assert.AreEqual(expected, AutomatonQueryNrHits(query));
+
+            query.MultiTermRewriteMethod = (MultiTermQuery.CONSTANT_SCORE_BOOLEAN_QUERY_REWRITE);
+            Assert.AreEqual(expected, AutomatonQueryNrHits(query));
+
+            query.MultiTermRewriteMethod = (MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT);
+            Assert.AreEqual(expected, AutomatonQueryNrHits(query));
+        }
+
+        /// <summary>
+        /// Test that AutomatonQuery interacts with lucene's sort order correctly.
+        ///
+        /// this expression matches something either starting with the arabic
+        /// presentation forms block, or a supplementary character.
+        /// </summary>
+        [Test]
+        public virtual void TestSortOrder()
+        {
+            Automaton a = (new RegExp("((\uD866\uDF05)|\uFB94).*")).ToAutomaton();
+            AssertAutomatonHits(2, a);
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestBoolean2.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestBoolean2.cs b/src/Lucene.Net.Tests/Search/TestBoolean2.cs
new file mode 100644
index 0000000..514f560
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestBoolean2.cs
@@ -0,0 +1,423 @@
+using System;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using Lucene.Net.Randomized.Generators;
+    using NUnit.Framework;
+    using DefaultSimilarity = Lucene.Net.Search.Similarities.DefaultSimilarity;
+    using Directory = Lucene.Net.Store.Directory;
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using IOContext = Lucene.Net.Store.IOContext;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+    using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Similarity = Lucene.Net.Search.Similarities.Similarity;
+    using Term = Lucene.Net.Index.Term;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    /// <summary>
+    /// Test BooleanQuery2 against BooleanQuery by overriding the standard query parser.
+    /// this also tests the scoring order of BooleanQuery.
+    /// </summary>
+    [TestFixture]
+    public class TestBoolean2 : LuceneTestCase
+    {
+        private static IndexSearcher Searcher;
+        private static IndexSearcher BigSearcher;
+        private static IndexReader Reader;
+        private static IndexReader LittleReader;
+        private static int NUM_EXTRA_DOCS = 6000;
+
+        public const string field = "field";
+        private static Directory Directory;
+        private static Directory Dir2;
+        private static int MulFactor;
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Is non-static because NewIndexWriterConfig is no longer static.
+        /// </summary>
+        [OneTimeSetUp]
+        public void BeforeClass()
+        {
+            Directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), Directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()));
+            for (int i = 0; i < DocFields.Length; i++)
+            {
+                Document doc = new Document();
+                doc.Add(NewTextField(field, DocFields[i], Field.Store.NO));
+                writer.AddDocument(doc);
+            }
+            writer.Dispose();
+            LittleReader = DirectoryReader.Open(Directory);
+            Searcher = NewSearcher(LittleReader);
+            // this is intentionally using the baseline sim, because it compares against bigSearcher (which uses a random one)
+            Searcher.Similarity = new DefaultSimilarity();
+
+            // Make big index
+            Dir2 = new MockDirectoryWrapper(Random(), new RAMDirectory(Directory, IOContext.DEFAULT));
+
+            // First multiply small test index:
+            MulFactor = 1;
+            int docCount = 0;
+            if (VERBOSE)
+            {
+                Console.WriteLine("\nTEST: now copy index...");
+            }
+            do
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("\nTEST: cycle...");
+                }
+                Directory copy = new MockDirectoryWrapper(Random(), new RAMDirectory(Dir2, IOContext.DEFAULT));
+                RandomIndexWriter w = new RandomIndexWriter(Random(), Dir2, Similarity, TimeZone);
+                w.AddIndexes(copy);
+                docCount = w.MaxDoc;
+                w.Dispose();
+                MulFactor *= 2;
+            } while (docCount < 3000);
+
+            RandomIndexWriter riw = new RandomIndexWriter(Random(), Dir2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(TestUtil.NextInt(Random(), 50, 1000)));
+            Document doc_ = new Document();
+            doc_.Add(NewTextField("field2", "xxx", Field.Store.NO));
+            for (int i = 0; i < NUM_EXTRA_DOCS / 2; i++)
+            {
+                riw.AddDocument(doc_);
+            }
+            doc_ = new Document();
+            doc_.Add(NewTextField("field2", "big bad bug", Field.Store.NO));
+            for (int i = 0; i < NUM_EXTRA_DOCS / 2; i++)
+            {
+                riw.AddDocument(doc_);
+            }
+            Reader = riw.Reader;
+            BigSearcher = NewSearcher(Reader);
+            riw.Dispose();
+        }
+
+        [OneTimeTearDown]
+        public static void AfterClass()
+        {
+            Reader.Dispose();
+            LittleReader.Dispose();
+            Dir2.Dispose();
+            Directory.Dispose();
+            Searcher = null;
+            Reader = null;
+            LittleReader = null;
+            Dir2 = null;
+            Directory = null;
+            BigSearcher = null;
+        }
+
+        private static string[] DocFields = new string[] { "w1 w2 w3 w4 w5", "w1 w3 w2 w3", "w1 xx w2 yy w3", "w1 w3 xx w2 yy w3" };
+
+        public virtual void QueriesTest(Query query, int[] expDocNrs)
+        {
+            TopScoreDocCollector collector = TopScoreDocCollector.Create(1000, false);
+            Searcher.Search(query, null, collector);
+            ScoreDoc[] hits1 = collector.GetTopDocs().ScoreDocs;
+
+            collector = TopScoreDocCollector.Create(1000, true);
+            Searcher.Search(query, null, collector);
+            ScoreDoc[] hits2 = collector.GetTopDocs().ScoreDocs;
+
+            Assert.AreEqual(MulFactor * collector.TotalHits, BigSearcher.Search(query, 1).TotalHits);
+
+            CheckHits.CheckHitsQuery(query, hits1, hits2, expDocNrs);
+        }
+
+        [Test]
+        public virtual void TestQueries01()
+        {
+            BooleanQuery query = new BooleanQuery();
+            query.Add(new TermQuery(new Term(field, "w3")), Occur.MUST);
+            query.Add(new TermQuery(new Term(field, "xx")), Occur.MUST);
+            int[] expDocNrs = new int[] { 2, 3 };
+            QueriesTest(query, expDocNrs);
+        }
+
+        [Test]
+        public virtual void TestQueries02()
+        {
+            BooleanQuery query = new BooleanQuery();
+            query.Add(new TermQuery(new Term(field, "w3")), Occur.MUST);
+            query.Add(new TermQuery(new Term(field, "xx")), Occur.SHOULD);
+            int[] expDocNrs = new int[] { 2, 3, 1, 0 };
+            QueriesTest(query, expDocNrs);
+        }
+
+        [Test]
+        public virtual void TestQueries03()
+        {
+            BooleanQuery query = new BooleanQuery();
+            query.Add(new TermQuery(new Term(field, "w3")), Occur.SHOULD);
+            query.Add(new TermQuery(new Term(field, "xx")), Occur.SHOULD);
+            int[] expDocNrs = new int[] { 2, 3, 1, 0 };
+            QueriesTest(query, expDocNrs);
+        }
+
+        [Test]
+        public virtual void TestQueries04()
+        {
+            BooleanQuery query = new BooleanQuery();
+            query.Add(new TermQuery(new Term(field, "w3")), Occur.SHOULD);
+            query.Add(new TermQuery(new Term(field, "xx")), Occur.MUST_NOT);
+            int[] expDocNrs = new int[] { 1, 0 };
+            QueriesTest(query, expDocNrs);
+        }
+
+        [Test]
+        public virtual void TestQueries05()
+        {
+            BooleanQuery query = new BooleanQuery();
+            query.Add(new TermQuery(new Term(field, "w3")), Occur.MUST);
+            query.Add(new TermQuery(new Term(field, "xx")), Occur.MUST_NOT);
+            int[] expDocNrs = new int[] { 1, 0 };
+            QueriesTest(query, expDocNrs);
+        }
+
+        [Test]
+        public virtual void TestQueries06()
+        {
+            BooleanQuery query = new BooleanQuery();
+            query.Add(new TermQuery(new Term(field, "w3")), Occur.MUST);
+            query.Add(new TermQuery(new Term(field, "xx")), Occur.MUST_NOT);
+            query.Add(new TermQuery(new Term(field, "w5")), Occur.MUST_NOT);
+            int[] expDocNrs = new int[] { 1 };
+            QueriesTest(query, expDocNrs);
+        }
+
+        [Test]
+        public virtual void TestQueries07()
+        {
+            BooleanQuery query = new BooleanQuery();
+            query.Add(new TermQuery(new Term(field, "w3")), Occur.MUST_NOT);
+            query.Add(new TermQuery(new Term(field, "xx")), Occur.MUST_NOT);
+            query.Add(new TermQuery(new Term(field, "w5")), Occur.MUST_NOT);
+            int[] expDocNrs = new int[] { };
+            QueriesTest(query, expDocNrs);
+        }
+
+        [Test]
+        public virtual void TestQueries08()
+        {
+            BooleanQuery query = new BooleanQuery();
+            query.Add(new TermQuery(new Term(field, "w3")), Occur.MUST);
+            query.Add(new TermQuery(new Term(field, "xx")), Occur.SHOULD);
+            query.Add(new TermQuery(new Term(field, "w5")), Occur.MUST_NOT);
+            int[] expDocNrs = new int[] { 2, 3, 1 };
+            QueriesTest(query, expDocNrs);
+        }
+
+        [Test]
+        public virtual void TestQueries09()
+        {
+            BooleanQuery query = new BooleanQuery();
+            query.Add(new TermQuery(new Term(field, "w3")), Occur.MUST);
+            query.Add(new TermQuery(new Term(field, "xx")), Occur.MUST);
+            query.Add(new TermQuery(new Term(field, "w2")), Occur.MUST);
+            query.Add(new TermQuery(new Term(field, "zz")), Occur.SHOULD);
+            int[] expDocNrs = new int[] { 2, 3 };
+            QueriesTest(query, expDocNrs);
+        }
+
+        [Test]
+        public virtual void TestQueries10()
+        {
+            BooleanQuery query = new BooleanQuery();
+            query.Add(new TermQuery(new Term(field, "w3")), Occur.MUST);
+            query.Add(new TermQuery(new Term(field, "xx")), Occur.MUST);
+            query.Add(new TermQuery(new Term(field, "w2")), Occur.MUST);
+            query.Add(new TermQuery(new Term(field, "zz")), Occur.SHOULD);
+
+            int[] expDocNrs = new int[] { 2, 3 };
+            Similarity oldSimilarity = Searcher.Similarity;
+            try
+            {
+                Searcher.Similarity = new DefaultSimilarityAnonymousInnerClassHelper(this);
+                QueriesTest(query, expDocNrs);
+            }
+            finally
+            {
+                Searcher.Similarity = oldSimilarity;
+            }
+        }
+
+        private class DefaultSimilarityAnonymousInnerClassHelper : DefaultSimilarity
+        {
+            private readonly TestBoolean2 OuterInstance;
+
+            public DefaultSimilarityAnonymousInnerClassHelper(TestBoolean2 outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            public override float Coord(int overlap, int maxOverlap)
+            {
+                return overlap / ((float)maxOverlap - 1);
+            }
+        }
+
+        [Test]
+        public virtual void TestRandomQueries()
+        {
+            string[] vals = new string[] { "w1", "w2", "w3", "w4", "w5", "xx", "yy", "zzz" };
+
+            int tot = 0;
+
+            BooleanQuery q1 = null;
+            try
+            {
+                // increase number of iterations for more complete testing
+                int num = AtLeast(20);
+                for (int i = 0; i < num; i++)
+                {
+                    int level = Random().Next(3);
+                    q1 = RandBoolQuery(new Random(Random().Next()), Random().NextBoolean(), level, field, vals, null);
+
+                    // Can't sort by relevance since floating point numbers may not quite
+                    // match up.
+                    Sort sort = Sort.INDEXORDER;
+
+                    QueryUtils.Check(Random(), q1, Searcher, Similarity); // baseline sim
+                    try
+                    {
+                        // a little hackish, QueryUtils.check is too costly to do on bigSearcher in this loop.
+                        Searcher.Similarity = BigSearcher.Similarity; // random sim
+                        QueryUtils.Check(Random(), q1, Searcher, Similarity);
+                    }
+                    finally
+                    {
+                        Searcher.Similarity = new DefaultSimilarity(); // restore
+                    }
+
+                    TopFieldCollector collector = TopFieldCollector.Create(sort, 1000, false, true, true, true);
+
+                    Searcher.Search(q1, null, collector);
+                    ScoreDoc[] hits1 = collector.GetTopDocs().ScoreDocs;
+
+                    collector = TopFieldCollector.Create(sort, 1000, false, true, true, false);
+
+                    Searcher.Search(q1, null, collector);
+                    ScoreDoc[] hits2 = collector.GetTopDocs().ScoreDocs;
+                    tot += hits2.Length;
+                    CheckHits.CheckEqual(q1, hits1, hits2);
+
+                    BooleanQuery q3 = new BooleanQuery();
+                    q3.Add(q1, Occur.SHOULD);
+                    q3.Add(new PrefixQuery(new Term("field2", "b")), Occur.SHOULD);
+                    TopDocs hits4 = BigSearcher.Search(q3, 1);
+                    Assert.AreEqual(MulFactor * collector.TotalHits + NUM_EXTRA_DOCS / 2, hits4.TotalHits);
+                }
+            }
+            catch (Exception)
+            {
+                // For easier debugging
+                Console.WriteLine("failed query: " + q1);
+                throw;
+            }
+
+            // System.out.println("Total hits:"+tot);
+        }
+
+        // used to set properties or change every BooleanQuery
+        // generated from randBoolQuery.
+        public interface Callback
+        {
+            void PostCreate(BooleanQuery q);
+        }
+
+        // Random rnd is passed in so that the exact same random query may be created
+        // more than once.
+        public static BooleanQuery RandBoolQuery(Random rnd, bool allowMust, int level, string field, string[] vals, Callback cb)
+        {
+            BooleanQuery current = new BooleanQuery(rnd.Next() < 0);
+            for (int i = 0; i < rnd.Next(vals.Length) + 1; i++)
+            {
+                int qType = 0; // term query
+                if (level > 0)
+                {
+                    qType = rnd.Next(10);
+                }
+                Query q;
+                if (qType < 3)
+                {
+                    q = new TermQuery(new Term(field, vals[rnd.Next(vals.Length)]));
+                }
+                else if (qType < 4)
+                {
+                    Term t1 = new Term(field, vals[rnd.Next(vals.Length)]);
+                    Term t2 = new Term(field, vals[rnd.Next(vals.Length)]);
+                    PhraseQuery pq = new PhraseQuery();
+                    pq.Add(t1);
+                    pq.Add(t2);
+                    pq.Slop = 10; // increase possibility of matching
+                    q = pq;
+                }
+                else if (qType < 7)
+                {
+                    q = new WildcardQuery(new Term(field, "w*"));
+                }
+                else
+                {
+                    q = RandBoolQuery(rnd, allowMust, level - 1, field, vals, cb);
+                }
+
+                int r = rnd.Next(10);
+                Occur occur;
+                if (r < 2)
+                {
+                    occur = Occur.MUST_NOT;
+                }
+                else if (r < 5)
+                {
+                    if (allowMust)
+                    {
+                        occur = Occur.MUST;
+                    }
+                    else
+                    {
+                        occur = Occur.SHOULD;
+                    }
+                }
+                else
+                {
+                    occur = Occur.SHOULD;
+                }
+
+                current.Add(q, occur);
+            }
+            if (cb != null)
+            {
+                cb.PostCreate(current);
+            }
+            return current;
+        }
+    }
+}
\ No newline at end of file


[62/72] [abbrv] lucenenet git commit: Lucene.Net.TestFramework: Renamed Codecs\lucene42\ to Codecs\Lucene42\

Posted by ni...@apache.org.
Lucene.Net.TestFramework: Renamed Codecs\lucene42\ to Codecs\Lucene42\


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/9e2f4c5b
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/9e2f4c5b
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/9e2f4c5b

Branch: refs/heads/api-work
Commit: 9e2f4c5b96e5d3c4cce0e38da1609f7b03a3dedb
Parents: 9138d1b
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 26 03:17:42 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Feb 27 06:17:58 2017 +0700

----------------------------------------------------------------------
 .../Lucene42/Lucene42DocValuesConsumer.cs       | 469 +++++++++++++++++++
 .../Codecs/Lucene42/Lucene42FieldInfosWriter.cs | 145 ++++++
 .../Codecs/Lucene42/Lucene42RWCodec.cs          |  99 ++++
 .../Lucene42/Lucene42RWDocValuesFormat.cs       |  67 +++
 .../lucene42/Lucene42DocValuesConsumer.cs       | 469 -------------------
 .../Codecs/lucene42/Lucene42FieldInfosWriter.cs | 145 ------
 .../Codecs/lucene42/Lucene42RWCodec.cs          |  99 ----
 .../lucene42/Lucene42RWDocValuesFormat.cs       |  67 ---
 .../Lucene.Net.TestFramework.csproj             |   8 +-
 9 files changed, 784 insertions(+), 784 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/9e2f4c5b/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42DocValuesConsumer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42DocValuesConsumer.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42DocValuesConsumer.cs
new file mode 100644
index 0000000..7441346
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42DocValuesConsumer.cs
@@ -0,0 +1,469 @@
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.IO;
+using System.Linq;
+
+namespace Lucene.Net.Codecs.Lucene42
+{
+    using Lucene.Net.Util.Fst;
+    using ArrayUtil = Lucene.Net.Util.ArrayUtil;
+    using BlockPackedWriter = Lucene.Net.Util.Packed.BlockPackedWriter;
+    using ByteArrayDataOutput = Lucene.Net.Store.ByteArrayDataOutput;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using FieldInfo = Lucene.Net.Index.FieldInfo;
+    using FormatAndBits = Lucene.Net.Util.Packed.PackedInt32s.FormatAndBits;
+    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
+    using IndexOutput = Lucene.Net.Store.IndexOutput;
+    using INPUT_TYPE = Lucene.Net.Util.Fst.FST.INPUT_TYPE;
+    using Int32sRef = Lucene.Net.Util.Int32sRef;
+    using IOUtils = Lucene.Net.Util.IOUtils;
+    using MathUtil = Lucene.Net.Util.MathUtil;
+    using MonotonicBlockPackedWriter = Lucene.Net.Util.Packed.MonotonicBlockPackedWriter;
+    using PackedInt32s = Lucene.Net.Util.Packed.PackedInt32s;
+    using PositiveInt32Outputs = Lucene.Net.Util.Fst.PositiveInt32Outputs;
+    using SegmentWriteState = Lucene.Net.Index.SegmentWriteState;
+    using Util = Lucene.Net.Util.Fst.Util;
+
+    //   Constants use Lucene42DocValuesProducer.
+
+    /// <summary>
+    /// Writer for <seealso cref="Lucene42DocValuesFormat"/>
+    /// </summary>
+#pragma warning disable 612, 618
+    internal class Lucene42DocValuesConsumer : DocValuesConsumer
+    {
+        internal readonly IndexOutput Data, Meta;
+        internal readonly int MaxDoc;
+        internal readonly float AcceptableOverheadRatio;
+
+        internal Lucene42DocValuesConsumer(SegmentWriteState state, string dataCodec, string dataExtension, string metaCodec, string metaExtension, float acceptableOverheadRatio)
+        {
+            this.AcceptableOverheadRatio = acceptableOverheadRatio;
+            MaxDoc = state.SegmentInfo.DocCount;
+            bool success = false;
+            try
+            {
+                string dataName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, dataExtension);
+                Data = state.Directory.CreateOutput(dataName, state.Context);
+                // this writer writes the format 4.2 did!
+                CodecUtil.WriteHeader(Data, dataCodec, Lucene42DocValuesProducer.VERSION_GCD_COMPRESSION);
+                string metaName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, metaExtension);
+                Meta = state.Directory.CreateOutput(metaName, state.Context);
+                CodecUtil.WriteHeader(Meta, metaCodec, Lucene42DocValuesProducer.VERSION_GCD_COMPRESSION);
+                success = true;
+            }
+            finally
+            {
+                if (!success)
+                {
+                    IOUtils.CloseWhileHandlingException(this);
+                }
+            }
+        }
+
+        public override void AddNumericField(FieldInfo field, IEnumerable<long?> values)
+        {
+            AddNumericField(field, values, true);
+        }
+
+        internal virtual void AddNumericField(FieldInfo field, IEnumerable<long?> values, bool optimizeStorage)
+        {
+            Meta.WriteVInt32(field.Number);
+            Meta.WriteByte((byte)Lucene42DocValuesProducer.NUMBER);
+            Meta.WriteInt64(Data.FilePointer);
+            long minValue = long.MaxValue;
+            long maxValue = long.MinValue;
+            long gcd = 0;
+            // TODO: more efficient?
+            HashSet<long> uniqueValues = null;
+            if (optimizeStorage)
+            {
+                uniqueValues = new HashSet<long>();
+
+                long count = 0;
+                foreach (long? nv in values)
+                {
+                    // TODO: support this as MemoryDVFormat (and be smart about missing maybe)
+                    long v = nv == null ? 0 : (long)nv;
+
+                    if (gcd != 1)
+                    {
+                        if (v < long.MinValue / 2 || v > long.MaxValue / 2)
+                        {
+                            // in that case v - minValue might overflow and make the GCD computation return
+                            // wrong results. Since these extreme values are unlikely, we just discard
+                            // GCD computation for them
+                            gcd = 1;
+                        } // minValue needs to be set first
+                        else if (count != 0)
+                        {
+                            gcd = MathUtil.Gcd(gcd, v - minValue);
+                        }
+                    }
+
+                    minValue = Math.Min(minValue, v);
+                    maxValue = Math.Max(maxValue, v);
+
+                    if (uniqueValues != null)
+                    {
+                        if (uniqueValues.Add(v))
+                        {
+                            if (uniqueValues.Count > 256)
+                            {
+                                uniqueValues = null;
+                            }
+                        }
+                    }
+
+                    ++count;
+                }
+                Debug.Assert(count == MaxDoc);
+            }
+
+            if (uniqueValues != null)
+            {
+                // small number of unique values
+                int bitsPerValue = PackedInt32s.BitsRequired(uniqueValues.Count - 1);
+                FormatAndBits formatAndBits = PackedInt32s.FastestFormatAndBits(MaxDoc, bitsPerValue, AcceptableOverheadRatio);
+                if (formatAndBits.BitsPerValue == 8 && minValue >= sbyte.MinValue && maxValue <= sbyte.MaxValue)
+                {
+                    Meta.WriteByte((byte)Lucene42DocValuesProducer.UNCOMPRESSED); // uncompressed
+                    foreach (long? nv in values)
+                    {
+                        Data.WriteByte(nv == null ? (byte)0 : (byte)nv);
+                    }
+                }
+                else
+                {
+                    Meta.WriteByte((byte)Lucene42DocValuesProducer.TABLE_COMPRESSED); // table-compressed
+                    long[] decode = uniqueValues.ToArray(/*new long?[uniqueValues.Count]*/);
+                    var encode = new Dictionary<long, int>();
+                    Data.WriteVInt32(decode.Length);
+                    for (int i = 0; i < decode.Length; i++)
+                    {
+                        Data.WriteInt64(decode[i]);
+                        encode[decode[i]] = i;
+                    }
+
+                    Meta.WriteVInt32(PackedInt32s.VERSION_CURRENT);
+                    Data.WriteVInt32(formatAndBits.Format.Id);
+                    Data.WriteVInt32(formatAndBits.BitsPerValue);
+
+                    PackedInt32s.Writer writer = PackedInt32s.GetWriterNoHeader(Data, formatAndBits.Format, MaxDoc, formatAndBits.BitsPerValue, PackedInt32s.DEFAULT_BUFFER_SIZE);
+                    foreach (long? nv in values)
+                    {
+                        writer.Add(encode[nv == null ? 0 : (long)nv]);
+                    }
+                    writer.Finish();
+                }
+            }
+            else if (gcd != 0 && gcd != 1)
+            {
+                Meta.WriteByte((byte)Lucene42DocValuesProducer.GCD_COMPRESSED);
+                Meta.WriteVInt32(PackedInt32s.VERSION_CURRENT);
+                Data.WriteInt64(minValue);
+                Data.WriteInt64(gcd);
+                Data.WriteVInt32(Lucene42DocValuesProducer.BLOCK_SIZE);
+
+                BlockPackedWriter writer = new BlockPackedWriter(Data, Lucene42DocValuesProducer.BLOCK_SIZE);
+                foreach (long? nv in values)
+                {
+                    long value = nv == null ? 0 : (long)nv;
+                    writer.Add((value - minValue) / gcd);
+                }
+                writer.Finish();
+            }
+            else
+            {
+                Meta.WriteByte((byte)Lucene42DocValuesProducer.DELTA_COMPRESSED); // delta-compressed
+
+                Meta.WriteVInt32(PackedInt32s.VERSION_CURRENT);
+                Data.WriteVInt32(Lucene42DocValuesProducer.BLOCK_SIZE);
+
+                BlockPackedWriter writer = new BlockPackedWriter(Data, Lucene42DocValuesProducer.BLOCK_SIZE);
+                foreach (long? nv in values)
+                {
+                    writer.Add(nv == null ? 0 : (long)nv);
+                }
+                writer.Finish();
+            }
+        }
+
+        protected override void Dispose(bool disposing)
+        {
+            if (disposing)
+            {
+                bool success = false;
+                try
+                {
+                    if (Meta != null)
+                    {
+                        Meta.WriteVInt32(-1); // write EOF marker
+                    }
+                    success = true;
+                }
+                finally
+                {
+                    if (success)
+                    {
+                        IOUtils.Close(Data, Meta);
+                    }
+                    else
+                    {
+                        IOUtils.CloseWhileHandlingException(Data, Meta);
+                    }
+                }
+            }
+        }
+
+        public override void AddBinaryField(FieldInfo field, IEnumerable<BytesRef> values)
+        {
+            // write the byte[] data
+            Meta.WriteVInt32(field.Number);
+            Meta.WriteByte((byte)Lucene42DocValuesProducer.BYTES);
+            int minLength = int.MaxValue;
+            int maxLength = int.MinValue;
+            long startFP = Data.FilePointer;
+            foreach (BytesRef v in values)
+            {
+                int length = v == null ? 0 : v.Length;
+                if (length > Lucene42DocValuesFormat.MAX_BINARY_FIELD_LENGTH)
+                {
+                    throw new System.ArgumentException("DocValuesField \"" + field.Name + "\" is too large, must be <= " + Lucene42DocValuesFormat.MAX_BINARY_FIELD_LENGTH);
+                }
+                minLength = Math.Min(minLength, length);
+                maxLength = Math.Max(maxLength, length);
+                if (v != null)
+                {
+                    Data.WriteBytes(v.Bytes, v.Offset, v.Length);
+                }
+            }
+            Meta.WriteInt64(startFP);
+            Meta.WriteInt64(Data.FilePointer - startFP);
+            Meta.WriteVInt32(minLength);
+            Meta.WriteVInt32(maxLength);
+
+            // if minLength == maxLength, its a fixed-length byte[], we are done (the addresses are implicit)
+            // otherwise, we need to record the length fields...
+            if (minLength != maxLength)
+            {
+                Meta.WriteVInt32(PackedInt32s.VERSION_CURRENT);
+                Meta.WriteVInt32(Lucene42DocValuesProducer.BLOCK_SIZE);
+
+                MonotonicBlockPackedWriter writer = new MonotonicBlockPackedWriter(Data, Lucene42DocValuesProducer.BLOCK_SIZE);
+                long addr = 0;
+                foreach (BytesRef v in values)
+                {
+                    if (v != null)
+                    {
+                        addr += v.Length;
+                    }
+                    writer.Add(addr);
+                }
+                writer.Finish();
+            }
+        }
+
+        private void WriteFST(FieldInfo field, IEnumerable<BytesRef> values)
+        {
+            Meta.WriteVInt32(field.Number);
+            Meta.WriteByte((byte)Lucene42DocValuesProducer.FST);
+            Meta.WriteInt64(Data.FilePointer);
+            PositiveInt32Outputs outputs = PositiveInt32Outputs.Singleton;
+            Builder<long?> builder = new Builder<long?>(INPUT_TYPE.BYTE1, outputs);
+            Int32sRef scratch = new Int32sRef();
+            long ord = 0;
+            foreach (BytesRef v in values)
+            {
+                builder.Add(Util.ToInt32sRef(v, scratch), ord);
+                ord++;
+            }
+
+            var fst = builder.Finish();
+            if (fst != null)
+            {
+                fst.Save(Data);
+            }
+            Meta.WriteVInt64(ord);
+        }
+
+        public override void AddSortedField(FieldInfo field, IEnumerable<BytesRef> values, IEnumerable<long?> docToOrd)
+        {
+            // three cases for simulating the old writer:
+            // 1. no missing
+            // 2. missing (and empty string in use): remap ord=-1 -> ord=0
+            // 3. missing (and empty string not in use): remap all ords +1, insert empty string into values
+            bool anyMissing = false;
+            foreach (long? n in docToOrd)
+            {
+                if (n.Value == -1)
+                {
+                    anyMissing = true;
+                    break;
+                }
+            }
+
+            bool hasEmptyString = false;
+            foreach (BytesRef b in values)
+            {
+                hasEmptyString = b.Length == 0;
+                break;
+            }
+
+            if (!anyMissing)
+            {
+                // nothing to do
+            }
+            else if (hasEmptyString)
+            {
+                docToOrd = MissingOrdRemapper.MapMissingToOrd0(docToOrd);
+            }
+            else
+            {
+                docToOrd = MissingOrdRemapper.MapAllOrds(docToOrd);
+                values = MissingOrdRemapper.InsertEmptyValue(values);
+            }
+
+            // write the ordinals as numerics
+            AddNumericField(field, docToOrd, false);
+
+            // write the values as FST
+            WriteFST(field, values);
+        }
+
+        // note: this might not be the most efficient... but its fairly simple
+        public override void AddSortedSetField(FieldInfo field, IEnumerable<BytesRef> values, IEnumerable<long?> docToOrdCount, IEnumerable<long?> ords)
+        {
+            // write the ordinals as a binary field
+            AddBinaryField(field, new IterableAnonymousInnerClassHelper(this, docToOrdCount, ords));
+
+            // write the values as FST
+            WriteFST(field, values);
+        }
+
+        private class IterableAnonymousInnerClassHelper : IEnumerable<BytesRef>
+        {
+            private readonly Lucene42DocValuesConsumer OuterInstance;
+
+            private IEnumerable<long?> DocToOrdCount;
+            private IEnumerable<long?> Ords;
+
+            public IterableAnonymousInnerClassHelper(Lucene42DocValuesConsumer outerInstance, IEnumerable<long?> docToOrdCount, IEnumerable<long?> ords)
+            {
+                this.OuterInstance = outerInstance;
+                this.DocToOrdCount = docToOrdCount;
+                this.Ords = ords;
+            }
+
+            public IEnumerator<BytesRef> GetEnumerator()
+            {
+                return new SortedSetIterator(DocToOrdCount.GetEnumerator(), Ords.GetEnumerator());
+            }
+
+            System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator()
+            {
+                return GetEnumerator();
+            }
+        }
+
+        // per-document vint-encoded byte[]
+        internal class SortedSetIterator : IEnumerator<BytesRef>
+        {
+            internal byte[] Buffer = new byte[10];
+            internal ByteArrayDataOutput @out = new ByteArrayDataOutput();
+            internal BytesRef @ref = new BytesRef();
+
+            internal readonly IEnumerator<long?> Counts;
+            internal readonly IEnumerator<long?> Ords;
+
+            internal SortedSetIterator(IEnumerator<long?> counts, IEnumerator<long?> ords)
+            {
+                this.Counts = counts;
+                this.Ords = ords;
+            }
+
+            public bool MoveNext()
+            {
+                if (!Counts.MoveNext())
+                {
+                    return false;
+                }
+
+                int count = (int)Counts.Current;
+                int maxSize = count * 9; //worst case
+                if (maxSize > Buffer.Length)
+                {
+                    Buffer = ArrayUtil.Grow(Buffer, maxSize);
+                }
+
+                try
+                {
+                    EncodeValues(count);
+                }
+                catch (IOException bogus)
+                {
+                    throw new Exception(bogus.Message, bogus);
+                }
+
+                @ref.Bytes = Buffer;
+                @ref.Offset = 0;
+                @ref.Length = @out.Position;
+
+                return true;
+            }
+
+            public BytesRef Current
+            {
+                get { return @ref; }
+            }
+
+            object System.Collections.IEnumerator.Current
+            {
+                get { return Current; }
+            }
+
+            // encodes count values to buffer
+            internal virtual void EncodeValues(int count)
+            {
+                @out.Reset(Buffer);
+                long lastOrd = 0;
+                for (int i = 0; i < count; i++)
+                {
+                    Ords.MoveNext();
+                    long ord = Ords.Current.Value;
+                    @out.WriteVInt64(ord - lastOrd);
+                    lastOrd = ord;
+                }
+            }
+
+            public void Reset()
+            {
+                throw new NotImplementedException();
+            }
+
+            public void Dispose()
+            {
+            }
+        }
+    }
+#pragma warning restore 612, 618
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/9e2f4c5b/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42FieldInfosWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42FieldInfosWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42FieldInfosWriter.cs
new file mode 100644
index 0000000..acdae7b
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42FieldInfosWriter.cs
@@ -0,0 +1,145 @@
+using System;
+using System.Diagnostics;
+
+namespace Lucene.Net.Codecs.Lucene42
+{
+    using Directory = Lucene.Net.Store.Directory;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using DocValuesType = Lucene.Net.Index.DocValuesType;
+    using FieldInfo = Lucene.Net.Index.FieldInfo;
+    using FieldInfos = Lucene.Net.Index.FieldInfos;
+    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
+    using IndexOutput = Lucene.Net.Store.IndexOutput;
+    using IndexOptions = Lucene.Net.Index.IndexOptions;
+    using IOContext = Lucene.Net.Store.IOContext;
+    using IOUtils = Lucene.Net.Util.IOUtils;
+
+    /// <summary>
+    /// Lucene 4.2 FieldInfos writer.
+    /// </summary>
+    /// <seealso> cref= Lucene42FieldInfosFormat
+    /// @lucene.experimental </seealso>
+    [Obsolete]
+    public sealed class Lucene42FieldInfosWriter : FieldInfosWriter
+    {
+        /// <summary>
+        /// Sole constructor. </summary>
+        public Lucene42FieldInfosWriter()
+        {
+        }
+
+        public override void Write(Directory directory, string segmentName, string segmentSuffix, FieldInfos infos, IOContext context)
+        {
+            string fileName = IndexFileNames.SegmentFileName(segmentName, "", Lucene42FieldInfosFormat.EXTENSION);
+            IndexOutput output = directory.CreateOutput(fileName, context);
+            bool success = false;
+            try
+            {
+                CodecUtil.WriteHeader(output, Lucene42FieldInfosFormat.CODEC_NAME, Lucene42FieldInfosFormat.FORMAT_CURRENT);
+                output.WriteVInt32(infos.Count);
+                foreach (FieldInfo fi in infos)
+                {
+                    IndexOptions? indexOptions = fi.IndexOptions;
+                    sbyte bits = 0x0;
+                    if (fi.HasVectors)
+                    {
+                        bits |= Lucene42FieldInfosFormat.STORE_TERMVECTOR;
+                    }
+                    if (fi.OmitsNorms)
+                    {
+                        bits |= Lucene42FieldInfosFormat.OMIT_NORMS;
+                    }
+                    if (fi.HasPayloads)
+                    {
+                        bits |= Lucene42FieldInfosFormat.STORE_PAYLOADS;
+                    }
+                    if (fi.IsIndexed)
+                    {
+                        bits |= Lucene42FieldInfosFormat.IS_INDEXED;
+                        Debug.Assert(indexOptions >= IndexOptions.DOCS_AND_FREQS_AND_POSITIONS || !fi.HasPayloads);
+                        if (indexOptions == IndexOptions.DOCS_ONLY)
+                        {
+                            bits |= Lucene42FieldInfosFormat.OMIT_TERM_FREQ_AND_POSITIONS;
+                        }
+                        else if (indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS)
+                        {
+                            bits |= Lucene42FieldInfosFormat.STORE_OFFSETS_IN_POSTINGS;
+                        }
+                        else if (indexOptions == IndexOptions.DOCS_AND_FREQS)
+                        {
+                            bits |= Lucene42FieldInfosFormat.OMIT_POSITIONS;
+                        }
+                    }
+                    output.WriteString(fi.Name);
+                    output.WriteVInt32(fi.Number);
+                    output.WriteByte((byte)bits);
+
+                    // pack the DV types in one byte
+                    var dv = DocValuesByte(fi.DocValuesType);
+                    var nrm = DocValuesByte(fi.NormType);
+                    Debug.Assert((dv & (~0xF)) == 0 && (nrm & (~0x0F)) == 0);
+                    var val = unchecked((sbyte)(0xff & ((nrm << 4) | dv)));
+                    output.WriteByte((byte)val);
+                    output.WriteStringStringMap(fi.Attributes);
+                }
+                success = true;
+            }
+            finally
+            {
+                if (success)
+                {
+                    output.Dispose();
+                }
+                else
+                {
+                    IOUtils.CloseWhileHandlingException(output);
+                }
+            }
+        }
+
+        private static sbyte DocValuesByte(DocValuesType? type)
+        {
+            if (type == null)
+            {
+                return 0;
+            }
+            else if (type == DocValuesType.NUMERIC)
+            {
+                return 1;
+            }
+            else if (type == DocValuesType.BINARY)
+            {
+                return 2;
+            }
+            else if (type == DocValuesType.SORTED)
+            {
+                return 3;
+            }
+            else if (type == DocValuesType.SORTED_SET)
+            {
+                return 4;
+            }
+            else
+            {
+                throw new InvalidOperationException();
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/9e2f4c5b/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42RWCodec.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42RWCodec.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42RWCodec.cs
new file mode 100644
index 0000000..39e3b66
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42RWCodec.cs
@@ -0,0 +1,99 @@
+namespace Lucene.Net.Codecs.Lucene42
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /// <summary>
+    /// Read-write version of <seealso cref="Lucene42Codec"/> for testing.
+    /// </summary>
+#pragma warning disable 612, 618
+    public class Lucene42RWCodec : Lucene42Codec
+    {
+        private readonly DocValuesFormat Dv;
+        private readonly NormsFormat Norms = new Lucene42NormsFormat();
+        private readonly FieldInfosFormat fieldInfosFormat;
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Creates the codec with OldFormatImpersonationIsActive = true.
+        /// </summary>
+        /// <remarks>
+        /// Added so that SPIClassIterator can locate this Codec.  The iterator
+        /// only recognises classes that have empty constructors.
+        /// </remarks>
+        public Lucene42RWCodec()
+            : this(true)
+        { }
+
+        /// <param name="oldFormatImpersonationIsActive">
+        /// LUCENENET specific
+        /// Added to remove dependency on then-static <see cref="LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/> 
+        /// </param>
+        public Lucene42RWCodec(bool oldFormatImpersonationIsActive) : base()
+        {
+            Dv = new Lucene42RWDocValuesFormat(oldFormatImpersonationIsActive);
+            fieldInfosFormat = new Lucene42FieldInfosFormatAnonymousInnerClassHelper(oldFormatImpersonationIsActive);
+        }
+
+        private class Lucene42FieldInfosFormatAnonymousInnerClassHelper : Lucene42FieldInfosFormat
+        {
+            private readonly bool _oldFormatImpersonationIsActive;
+
+            /// <param name="oldFormatImpersonationIsActive">
+            /// LUCENENET specific
+            /// Added to remove dependency on then-static <see cref="LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/> 
+            /// </param>
+            public Lucene42FieldInfosFormatAnonymousInnerClassHelper(bool oldFormatImpersonationIsActive) : base()
+            {
+                _oldFormatImpersonationIsActive = oldFormatImpersonationIsActive;
+            }
+
+            public override FieldInfosWriter FieldInfosWriter
+            {
+                get
+                {
+                    if (!_oldFormatImpersonationIsActive)
+                    {
+                        return base.FieldInfosWriter;
+                    }
+                    else
+                    {
+                        return new Lucene42FieldInfosWriter();
+                    }
+                }
+            }
+        }
+
+        public override DocValuesFormat GetDocValuesFormatForField(string field)
+        {
+            return Dv;
+        }
+
+        public override NormsFormat NormsFormat
+        {
+            get { return Norms; }
+        }
+
+        public override FieldInfosFormat FieldInfosFormat
+        {
+            get { return fieldInfosFormat; }
+        }
+    }
+#pragma warning restore 612, 618
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/9e2f4c5b/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42RWDocValuesFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42RWDocValuesFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42RWDocValuesFormat.cs
new file mode 100644
index 0000000..1a29fe6
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene42/Lucene42RWDocValuesFormat.cs
@@ -0,0 +1,67 @@
+namespace Lucene.Net.Codecs.Lucene42
+{
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using SegmentWriteState = Lucene.Net.Index.SegmentWriteState;
+
+    /// <summary>
+    /// Read-write version of <seealso cref="Lucene42DocValuesFormat"/> for testing.
+    /// </summary>
+#pragma warning disable 612, 618
+    public class Lucene42RWDocValuesFormat : Lucene42DocValuesFormat
+    {
+        private readonly bool _oldFormatImpersonationIsActive;
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Creates the codec with OldFormatImpersonationIsActive = true.
+        /// </summary>
+        /// <remarks>
+        /// Added so that SPIClassIterator can locate this Codec.  The iterator
+        /// only recognises classes that have empty constructors.
+        /// </remarks>
+        public Lucene42RWDocValuesFormat()
+            : this(true)
+        { }
+
+        /// <param name="oldFormatImpersonationIsActive">
+        /// LUCENENET specific
+        /// Added to remove dependency on then-static <see cref="LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/> 
+        /// </param>
+        public Lucene42RWDocValuesFormat(bool oldFormatImpersonationIsActive) : base()
+        {
+            _oldFormatImpersonationIsActive = oldFormatImpersonationIsActive;
+        }
+
+        public override DocValuesConsumer FieldsConsumer(SegmentWriteState state)
+        {
+            if (!_oldFormatImpersonationIsActive)
+            {
+                return base.FieldsConsumer(state);
+            }
+            else
+            {
+                // note: we choose DEFAULT here (its reasonably fast, and for small bpv has tiny waste)
+                return new Lucene42DocValuesConsumer(state, DATA_CODEC, DATA_EXTENSION, METADATA_CODEC, METADATA_EXTENSION, m_acceptableOverheadRatio);
+            }
+        }
+    }
+#pragma warning restore 612, 618
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/9e2f4c5b/src/Lucene.Net.TestFramework/Codecs/lucene42/Lucene42DocValuesConsumer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/lucene42/Lucene42DocValuesConsumer.cs b/src/Lucene.Net.TestFramework/Codecs/lucene42/Lucene42DocValuesConsumer.cs
deleted file mode 100644
index 7441346..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/lucene42/Lucene42DocValuesConsumer.cs
+++ /dev/null
@@ -1,469 +0,0 @@
-using System;
-using System.Collections.Generic;
-using System.Diagnostics;
-using System.IO;
-using System.Linq;
-
-namespace Lucene.Net.Codecs.Lucene42
-{
-    using Lucene.Net.Util.Fst;
-    using ArrayUtil = Lucene.Net.Util.ArrayUtil;
-    using BlockPackedWriter = Lucene.Net.Util.Packed.BlockPackedWriter;
-    using ByteArrayDataOutput = Lucene.Net.Store.ByteArrayDataOutput;
-    using BytesRef = Lucene.Net.Util.BytesRef;
-
-    /*
-         * Licensed to the Apache Software Foundation (ASF) under one or more
-         * contributor license agreements.  See the NOTICE file distributed with
-         * this work for additional information regarding copyright ownership.
-         * The ASF licenses this file to You under the Apache License, Version 2.0
-         * (the "License"); you may not use this file except in compliance with
-         * the License.  You may obtain a copy of the License at
-         *
-         *     http://www.apache.org/licenses/LICENSE-2.0
-         *
-         * Unless required by applicable law or agreed to in writing, software
-         * distributed under the License is distributed on an "AS IS" BASIS,
-         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-         * See the License for the specific language governing permissions and
-         * limitations under the License.
-         */
-
-    using FieldInfo = Lucene.Net.Index.FieldInfo;
-    using FormatAndBits = Lucene.Net.Util.Packed.PackedInt32s.FormatAndBits;
-    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
-    using IndexOutput = Lucene.Net.Store.IndexOutput;
-    using INPUT_TYPE = Lucene.Net.Util.Fst.FST.INPUT_TYPE;
-    using Int32sRef = Lucene.Net.Util.Int32sRef;
-    using IOUtils = Lucene.Net.Util.IOUtils;
-    using MathUtil = Lucene.Net.Util.MathUtil;
-    using MonotonicBlockPackedWriter = Lucene.Net.Util.Packed.MonotonicBlockPackedWriter;
-    using PackedInt32s = Lucene.Net.Util.Packed.PackedInt32s;
-    using PositiveInt32Outputs = Lucene.Net.Util.Fst.PositiveInt32Outputs;
-    using SegmentWriteState = Lucene.Net.Index.SegmentWriteState;
-    using Util = Lucene.Net.Util.Fst.Util;
-
-    //   Constants use Lucene42DocValuesProducer.
-
-    /// <summary>
-    /// Writer for <seealso cref="Lucene42DocValuesFormat"/>
-    /// </summary>
-#pragma warning disable 612, 618
-    internal class Lucene42DocValuesConsumer : DocValuesConsumer
-    {
-        internal readonly IndexOutput Data, Meta;
-        internal readonly int MaxDoc;
-        internal readonly float AcceptableOverheadRatio;
-
-        internal Lucene42DocValuesConsumer(SegmentWriteState state, string dataCodec, string dataExtension, string metaCodec, string metaExtension, float acceptableOverheadRatio)
-        {
-            this.AcceptableOverheadRatio = acceptableOverheadRatio;
-            MaxDoc = state.SegmentInfo.DocCount;
-            bool success = false;
-            try
-            {
-                string dataName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, dataExtension);
-                Data = state.Directory.CreateOutput(dataName, state.Context);
-                // this writer writes the format 4.2 did!
-                CodecUtil.WriteHeader(Data, dataCodec, Lucene42DocValuesProducer.VERSION_GCD_COMPRESSION);
-                string metaName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, metaExtension);
-                Meta = state.Directory.CreateOutput(metaName, state.Context);
-                CodecUtil.WriteHeader(Meta, metaCodec, Lucene42DocValuesProducer.VERSION_GCD_COMPRESSION);
-                success = true;
-            }
-            finally
-            {
-                if (!success)
-                {
-                    IOUtils.CloseWhileHandlingException(this);
-                }
-            }
-        }
-
-        public override void AddNumericField(FieldInfo field, IEnumerable<long?> values)
-        {
-            AddNumericField(field, values, true);
-        }
-
-        internal virtual void AddNumericField(FieldInfo field, IEnumerable<long?> values, bool optimizeStorage)
-        {
-            Meta.WriteVInt32(field.Number);
-            Meta.WriteByte((byte)Lucene42DocValuesProducer.NUMBER);
-            Meta.WriteInt64(Data.FilePointer);
-            long minValue = long.MaxValue;
-            long maxValue = long.MinValue;
-            long gcd = 0;
-            // TODO: more efficient?
-            HashSet<long> uniqueValues = null;
-            if (optimizeStorage)
-            {
-                uniqueValues = new HashSet<long>();
-
-                long count = 0;
-                foreach (long? nv in values)
-                {
-                    // TODO: support this as MemoryDVFormat (and be smart about missing maybe)
-                    long v = nv == null ? 0 : (long)nv;
-
-                    if (gcd != 1)
-                    {
-                        if (v < long.MinValue / 2 || v > long.MaxValue / 2)
-                        {
-                            // in that case v - minValue might overflow and make the GCD computation return
-                            // wrong results. Since these extreme values are unlikely, we just discard
-                            // GCD computation for them
-                            gcd = 1;
-                        } // minValue needs to be set first
-                        else if (count != 0)
-                        {
-                            gcd = MathUtil.Gcd(gcd, v - minValue);
-                        }
-                    }
-
-                    minValue = Math.Min(minValue, v);
-                    maxValue = Math.Max(maxValue, v);
-
-                    if (uniqueValues != null)
-                    {
-                        if (uniqueValues.Add(v))
-                        {
-                            if (uniqueValues.Count > 256)
-                            {
-                                uniqueValues = null;
-                            }
-                        }
-                    }
-
-                    ++count;
-                }
-                Debug.Assert(count == MaxDoc);
-            }
-
-            if (uniqueValues != null)
-            {
-                // small number of unique values
-                int bitsPerValue = PackedInt32s.BitsRequired(uniqueValues.Count - 1);
-                FormatAndBits formatAndBits = PackedInt32s.FastestFormatAndBits(MaxDoc, bitsPerValue, AcceptableOverheadRatio);
-                if (formatAndBits.BitsPerValue == 8 && minValue >= sbyte.MinValue && maxValue <= sbyte.MaxValue)
-                {
-                    Meta.WriteByte((byte)Lucene42DocValuesProducer.UNCOMPRESSED); // uncompressed
-                    foreach (long? nv in values)
-                    {
-                        Data.WriteByte(nv == null ? (byte)0 : (byte)nv);
-                    }
-                }
-                else
-                {
-                    Meta.WriteByte((byte)Lucene42DocValuesProducer.TABLE_COMPRESSED); // table-compressed
-                    long[] decode = uniqueValues.ToArray(/*new long?[uniqueValues.Count]*/);
-                    var encode = new Dictionary<long, int>();
-                    Data.WriteVInt32(decode.Length);
-                    for (int i = 0; i < decode.Length; i++)
-                    {
-                        Data.WriteInt64(decode[i]);
-                        encode[decode[i]] = i;
-                    }
-
-                    Meta.WriteVInt32(PackedInt32s.VERSION_CURRENT);
-                    Data.WriteVInt32(formatAndBits.Format.Id);
-                    Data.WriteVInt32(formatAndBits.BitsPerValue);
-
-                    PackedInt32s.Writer writer = PackedInt32s.GetWriterNoHeader(Data, formatAndBits.Format, MaxDoc, formatAndBits.BitsPerValue, PackedInt32s.DEFAULT_BUFFER_SIZE);
-                    foreach (long? nv in values)
-                    {
-                        writer.Add(encode[nv == null ? 0 : (long)nv]);
-                    }
-                    writer.Finish();
-                }
-            }
-            else if (gcd != 0 && gcd != 1)
-            {
-                Meta.WriteByte((byte)Lucene42DocValuesProducer.GCD_COMPRESSED);
-                Meta.WriteVInt32(PackedInt32s.VERSION_CURRENT);
-                Data.WriteInt64(minValue);
-                Data.WriteInt64(gcd);
-                Data.WriteVInt32(Lucene42DocValuesProducer.BLOCK_SIZE);
-
-                BlockPackedWriter writer = new BlockPackedWriter(Data, Lucene42DocValuesProducer.BLOCK_SIZE);
-                foreach (long? nv in values)
-                {
-                    long value = nv == null ? 0 : (long)nv;
-                    writer.Add((value - minValue) / gcd);
-                }
-                writer.Finish();
-            }
-            else
-            {
-                Meta.WriteByte((byte)Lucene42DocValuesProducer.DELTA_COMPRESSED); // delta-compressed
-
-                Meta.WriteVInt32(PackedInt32s.VERSION_CURRENT);
-                Data.WriteVInt32(Lucene42DocValuesProducer.BLOCK_SIZE);
-
-                BlockPackedWriter writer = new BlockPackedWriter(Data, Lucene42DocValuesProducer.BLOCK_SIZE);
-                foreach (long? nv in values)
-                {
-                    writer.Add(nv == null ? 0 : (long)nv);
-                }
-                writer.Finish();
-            }
-        }
-
-        protected override void Dispose(bool disposing)
-        {
-            if (disposing)
-            {
-                bool success = false;
-                try
-                {
-                    if (Meta != null)
-                    {
-                        Meta.WriteVInt32(-1); // write EOF marker
-                    }
-                    success = true;
-                }
-                finally
-                {
-                    if (success)
-                    {
-                        IOUtils.Close(Data, Meta);
-                    }
-                    else
-                    {
-                        IOUtils.CloseWhileHandlingException(Data, Meta);
-                    }
-                }
-            }
-        }
-
-        public override void AddBinaryField(FieldInfo field, IEnumerable<BytesRef> values)
-        {
-            // write the byte[] data
-            Meta.WriteVInt32(field.Number);
-            Meta.WriteByte((byte)Lucene42DocValuesProducer.BYTES);
-            int minLength = int.MaxValue;
-            int maxLength = int.MinValue;
-            long startFP = Data.FilePointer;
-            foreach (BytesRef v in values)
-            {
-                int length = v == null ? 0 : v.Length;
-                if (length > Lucene42DocValuesFormat.MAX_BINARY_FIELD_LENGTH)
-                {
-                    throw new System.ArgumentException("DocValuesField \"" + field.Name + "\" is too large, must be <= " + Lucene42DocValuesFormat.MAX_BINARY_FIELD_LENGTH);
-                }
-                minLength = Math.Min(minLength, length);
-                maxLength = Math.Max(maxLength, length);
-                if (v != null)
-                {
-                    Data.WriteBytes(v.Bytes, v.Offset, v.Length);
-                }
-            }
-            Meta.WriteInt64(startFP);
-            Meta.WriteInt64(Data.FilePointer - startFP);
-            Meta.WriteVInt32(minLength);
-            Meta.WriteVInt32(maxLength);
-
-            // if minLength == maxLength, its a fixed-length byte[], we are done (the addresses are implicit)
-            // otherwise, we need to record the length fields...
-            if (minLength != maxLength)
-            {
-                Meta.WriteVInt32(PackedInt32s.VERSION_CURRENT);
-                Meta.WriteVInt32(Lucene42DocValuesProducer.BLOCK_SIZE);
-
-                MonotonicBlockPackedWriter writer = new MonotonicBlockPackedWriter(Data, Lucene42DocValuesProducer.BLOCK_SIZE);
-                long addr = 0;
-                foreach (BytesRef v in values)
-                {
-                    if (v != null)
-                    {
-                        addr += v.Length;
-                    }
-                    writer.Add(addr);
-                }
-                writer.Finish();
-            }
-        }
-
-        private void WriteFST(FieldInfo field, IEnumerable<BytesRef> values)
-        {
-            Meta.WriteVInt32(field.Number);
-            Meta.WriteByte((byte)Lucene42DocValuesProducer.FST);
-            Meta.WriteInt64(Data.FilePointer);
-            PositiveInt32Outputs outputs = PositiveInt32Outputs.Singleton;
-            Builder<long?> builder = new Builder<long?>(INPUT_TYPE.BYTE1, outputs);
-            Int32sRef scratch = new Int32sRef();
-            long ord = 0;
-            foreach (BytesRef v in values)
-            {
-                builder.Add(Util.ToInt32sRef(v, scratch), ord);
-                ord++;
-            }
-
-            var fst = builder.Finish();
-            if (fst != null)
-            {
-                fst.Save(Data);
-            }
-            Meta.WriteVInt64(ord);
-        }
-
-        public override void AddSortedField(FieldInfo field, IEnumerable<BytesRef> values, IEnumerable<long?> docToOrd)
-        {
-            // three cases for simulating the old writer:
-            // 1. no missing
-            // 2. missing (and empty string in use): remap ord=-1 -> ord=0
-            // 3. missing (and empty string not in use): remap all ords +1, insert empty string into values
-            bool anyMissing = false;
-            foreach (long? n in docToOrd)
-            {
-                if (n.Value == -1)
-                {
-                    anyMissing = true;
-                    break;
-                }
-            }
-
-            bool hasEmptyString = false;
-            foreach (BytesRef b in values)
-            {
-                hasEmptyString = b.Length == 0;
-                break;
-            }
-
-            if (!anyMissing)
-            {
-                // nothing to do
-            }
-            else if (hasEmptyString)
-            {
-                docToOrd = MissingOrdRemapper.MapMissingToOrd0(docToOrd);
-            }
-            else
-            {
-                docToOrd = MissingOrdRemapper.MapAllOrds(docToOrd);
-                values = MissingOrdRemapper.InsertEmptyValue(values);
-            }
-
-            // write the ordinals as numerics
-            AddNumericField(field, docToOrd, false);
-
-            // write the values as FST
-            WriteFST(field, values);
-        }
-
-        // note: this might not be the most efficient... but its fairly simple
-        public override void AddSortedSetField(FieldInfo field, IEnumerable<BytesRef> values, IEnumerable<long?> docToOrdCount, IEnumerable<long?> ords)
-        {
-            // write the ordinals as a binary field
-            AddBinaryField(field, new IterableAnonymousInnerClassHelper(this, docToOrdCount, ords));
-
-            // write the values as FST
-            WriteFST(field, values);
-        }
-
-        private class IterableAnonymousInnerClassHelper : IEnumerable<BytesRef>
-        {
-            private readonly Lucene42DocValuesConsumer OuterInstance;
-
-            private IEnumerable<long?> DocToOrdCount;
-            private IEnumerable<long?> Ords;
-
-            public IterableAnonymousInnerClassHelper(Lucene42DocValuesConsumer outerInstance, IEnumerable<long?> docToOrdCount, IEnumerable<long?> ords)
-            {
-                this.OuterInstance = outerInstance;
-                this.DocToOrdCount = docToOrdCount;
-                this.Ords = ords;
-            }
-
-            public IEnumerator<BytesRef> GetEnumerator()
-            {
-                return new SortedSetIterator(DocToOrdCount.GetEnumerator(), Ords.GetEnumerator());
-            }
-
-            System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator()
-            {
-                return GetEnumerator();
-            }
-        }
-
-        // per-document vint-encoded byte[]
-        internal class SortedSetIterator : IEnumerator<BytesRef>
-        {
-            internal byte[] Buffer = new byte[10];
-            internal ByteArrayDataOutput @out = new ByteArrayDataOutput();
-            internal BytesRef @ref = new BytesRef();
-
-            internal readonly IEnumerator<long?> Counts;
-            internal readonly IEnumerator<long?> Ords;
-
-            internal SortedSetIterator(IEnumerator<long?> counts, IEnumerator<long?> ords)
-            {
-                this.Counts = counts;
-                this.Ords = ords;
-            }
-
-            public bool MoveNext()
-            {
-                if (!Counts.MoveNext())
-                {
-                    return false;
-                }
-
-                int count = (int)Counts.Current;
-                int maxSize = count * 9; //worst case
-                if (maxSize > Buffer.Length)
-                {
-                    Buffer = ArrayUtil.Grow(Buffer, maxSize);
-                }
-
-                try
-                {
-                    EncodeValues(count);
-                }
-                catch (IOException bogus)
-                {
-                    throw new Exception(bogus.Message, bogus);
-                }
-
-                @ref.Bytes = Buffer;
-                @ref.Offset = 0;
-                @ref.Length = @out.Position;
-
-                return true;
-            }
-
-            public BytesRef Current
-            {
-                get { return @ref; }
-            }
-
-            object System.Collections.IEnumerator.Current
-            {
-                get { return Current; }
-            }
-
-            // encodes count values to buffer
-            internal virtual void EncodeValues(int count)
-            {
-                @out.Reset(Buffer);
-                long lastOrd = 0;
-                for (int i = 0; i < count; i++)
-                {
-                    Ords.MoveNext();
-                    long ord = Ords.Current.Value;
-                    @out.WriteVInt64(ord - lastOrd);
-                    lastOrd = ord;
-                }
-            }
-
-            public void Reset()
-            {
-                throw new NotImplementedException();
-            }
-
-            public void Dispose()
-            {
-            }
-        }
-    }
-#pragma warning restore 612, 618
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/9e2f4c5b/src/Lucene.Net.TestFramework/Codecs/lucene42/Lucene42FieldInfosWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/lucene42/Lucene42FieldInfosWriter.cs b/src/Lucene.Net.TestFramework/Codecs/lucene42/Lucene42FieldInfosWriter.cs
deleted file mode 100644
index acdae7b..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/lucene42/Lucene42FieldInfosWriter.cs
+++ /dev/null
@@ -1,145 +0,0 @@
-using System;
-using System.Diagnostics;
-
-namespace Lucene.Net.Codecs.Lucene42
-{
-    using Directory = Lucene.Net.Store.Directory;
-
-    /*
-         * Licensed to the Apache Software Foundation (ASF) under one or more
-         * contributor license agreements.  See the NOTICE file distributed with
-         * this work for additional information regarding copyright ownership.
-         * The ASF licenses this file to You under the Apache License, Version 2.0
-         * (the "License"); you may not use this file except in compliance with
-         * the License.  You may obtain a copy of the License at
-         *
-         *     http://www.apache.org/licenses/LICENSE-2.0
-         *
-         * Unless required by applicable law or agreed to in writing, software
-         * distributed under the License is distributed on an "AS IS" BASIS,
-         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-         * See the License for the specific language governing permissions and
-         * limitations under the License.
-         */
-
-    using DocValuesType = Lucene.Net.Index.DocValuesType;
-    using FieldInfo = Lucene.Net.Index.FieldInfo;
-    using FieldInfos = Lucene.Net.Index.FieldInfos;
-    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
-    using IndexOutput = Lucene.Net.Store.IndexOutput;
-    using IndexOptions = Lucene.Net.Index.IndexOptions;
-    using IOContext = Lucene.Net.Store.IOContext;
-    using IOUtils = Lucene.Net.Util.IOUtils;
-
-    /// <summary>
-    /// Lucene 4.2 FieldInfos writer.
-    /// </summary>
-    /// <seealso> cref= Lucene42FieldInfosFormat
-    /// @lucene.experimental </seealso>
-    [Obsolete]
-    public sealed class Lucene42FieldInfosWriter : FieldInfosWriter
-    {
-        /// <summary>
-        /// Sole constructor. </summary>
-        public Lucene42FieldInfosWriter()
-        {
-        }
-
-        public override void Write(Directory directory, string segmentName, string segmentSuffix, FieldInfos infos, IOContext context)
-        {
-            string fileName = IndexFileNames.SegmentFileName(segmentName, "", Lucene42FieldInfosFormat.EXTENSION);
-            IndexOutput output = directory.CreateOutput(fileName, context);
-            bool success = false;
-            try
-            {
-                CodecUtil.WriteHeader(output, Lucene42FieldInfosFormat.CODEC_NAME, Lucene42FieldInfosFormat.FORMAT_CURRENT);
-                output.WriteVInt32(infos.Count);
-                foreach (FieldInfo fi in infos)
-                {
-                    IndexOptions? indexOptions = fi.IndexOptions;
-                    sbyte bits = 0x0;
-                    if (fi.HasVectors)
-                    {
-                        bits |= Lucene42FieldInfosFormat.STORE_TERMVECTOR;
-                    }
-                    if (fi.OmitsNorms)
-                    {
-                        bits |= Lucene42FieldInfosFormat.OMIT_NORMS;
-                    }
-                    if (fi.HasPayloads)
-                    {
-                        bits |= Lucene42FieldInfosFormat.STORE_PAYLOADS;
-                    }
-                    if (fi.IsIndexed)
-                    {
-                        bits |= Lucene42FieldInfosFormat.IS_INDEXED;
-                        Debug.Assert(indexOptions >= IndexOptions.DOCS_AND_FREQS_AND_POSITIONS || !fi.HasPayloads);
-                        if (indexOptions == IndexOptions.DOCS_ONLY)
-                        {
-                            bits |= Lucene42FieldInfosFormat.OMIT_TERM_FREQ_AND_POSITIONS;
-                        }
-                        else if (indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS)
-                        {
-                            bits |= Lucene42FieldInfosFormat.STORE_OFFSETS_IN_POSTINGS;
-                        }
-                        else if (indexOptions == IndexOptions.DOCS_AND_FREQS)
-                        {
-                            bits |= Lucene42FieldInfosFormat.OMIT_POSITIONS;
-                        }
-                    }
-                    output.WriteString(fi.Name);
-                    output.WriteVInt32(fi.Number);
-                    output.WriteByte((byte)bits);
-
-                    // pack the DV types in one byte
-                    var dv = DocValuesByte(fi.DocValuesType);
-                    var nrm = DocValuesByte(fi.NormType);
-                    Debug.Assert((dv & (~0xF)) == 0 && (nrm & (~0x0F)) == 0);
-                    var val = unchecked((sbyte)(0xff & ((nrm << 4) | dv)));
-                    output.WriteByte((byte)val);
-                    output.WriteStringStringMap(fi.Attributes);
-                }
-                success = true;
-            }
-            finally
-            {
-                if (success)
-                {
-                    output.Dispose();
-                }
-                else
-                {
-                    IOUtils.CloseWhileHandlingException(output);
-                }
-            }
-        }
-
-        private static sbyte DocValuesByte(DocValuesType? type)
-        {
-            if (type == null)
-            {
-                return 0;
-            }
-            else if (type == DocValuesType.NUMERIC)
-            {
-                return 1;
-            }
-            else if (type == DocValuesType.BINARY)
-            {
-                return 2;
-            }
-            else if (type == DocValuesType.SORTED)
-            {
-                return 3;
-            }
-            else if (type == DocValuesType.SORTED_SET)
-            {
-                return 4;
-            }
-            else
-            {
-                throw new InvalidOperationException();
-            }
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/9e2f4c5b/src/Lucene.Net.TestFramework/Codecs/lucene42/Lucene42RWCodec.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/lucene42/Lucene42RWCodec.cs b/src/Lucene.Net.TestFramework/Codecs/lucene42/Lucene42RWCodec.cs
deleted file mode 100644
index 39e3b66..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/lucene42/Lucene42RWCodec.cs
+++ /dev/null
@@ -1,99 +0,0 @@
-namespace Lucene.Net.Codecs.Lucene42
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
-
-    /// <summary>
-    /// Read-write version of <seealso cref="Lucene42Codec"/> for testing.
-    /// </summary>
-#pragma warning disable 612, 618
-    public class Lucene42RWCodec : Lucene42Codec
-    {
-        private readonly DocValuesFormat Dv;
-        private readonly NormsFormat Norms = new Lucene42NormsFormat();
-        private readonly FieldInfosFormat fieldInfosFormat;
-
-        /// <summary>
-        /// LUCENENET specific
-        /// Creates the codec with OldFormatImpersonationIsActive = true.
-        /// </summary>
-        /// <remarks>
-        /// Added so that SPIClassIterator can locate this Codec.  The iterator
-        /// only recognises classes that have empty constructors.
-        /// </remarks>
-        public Lucene42RWCodec()
-            : this(true)
-        { }
-
-        /// <param name="oldFormatImpersonationIsActive">
-        /// LUCENENET specific
-        /// Added to remove dependency on then-static <see cref="LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/> 
-        /// </param>
-        public Lucene42RWCodec(bool oldFormatImpersonationIsActive) : base()
-        {
-            Dv = new Lucene42RWDocValuesFormat(oldFormatImpersonationIsActive);
-            fieldInfosFormat = new Lucene42FieldInfosFormatAnonymousInnerClassHelper(oldFormatImpersonationIsActive);
-        }
-
-        private class Lucene42FieldInfosFormatAnonymousInnerClassHelper : Lucene42FieldInfosFormat
-        {
-            private readonly bool _oldFormatImpersonationIsActive;
-
-            /// <param name="oldFormatImpersonationIsActive">
-            /// LUCENENET specific
-            /// Added to remove dependency on then-static <see cref="LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/> 
-            /// </param>
-            public Lucene42FieldInfosFormatAnonymousInnerClassHelper(bool oldFormatImpersonationIsActive) : base()
-            {
-                _oldFormatImpersonationIsActive = oldFormatImpersonationIsActive;
-            }
-
-            public override FieldInfosWriter FieldInfosWriter
-            {
-                get
-                {
-                    if (!_oldFormatImpersonationIsActive)
-                    {
-                        return base.FieldInfosWriter;
-                    }
-                    else
-                    {
-                        return new Lucene42FieldInfosWriter();
-                    }
-                }
-            }
-        }
-
-        public override DocValuesFormat GetDocValuesFormatForField(string field)
-        {
-            return Dv;
-        }
-
-        public override NormsFormat NormsFormat
-        {
-            get { return Norms; }
-        }
-
-        public override FieldInfosFormat FieldInfosFormat
-        {
-            get { return fieldInfosFormat; }
-        }
-    }
-#pragma warning restore 612, 618
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/9e2f4c5b/src/Lucene.Net.TestFramework/Codecs/lucene42/Lucene42RWDocValuesFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/lucene42/Lucene42RWDocValuesFormat.cs b/src/Lucene.Net.TestFramework/Codecs/lucene42/Lucene42RWDocValuesFormat.cs
deleted file mode 100644
index 1a29fe6..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/lucene42/Lucene42RWDocValuesFormat.cs
+++ /dev/null
@@ -1,67 +0,0 @@
-namespace Lucene.Net.Codecs.Lucene42
-{
-    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
-
-    /*
-         * Licensed to the Apache Software Foundation (ASF) under one or more
-         * contributor license agreements.  See the NOTICE file distributed with
-         * this work for additional information regarding copyright ownership.
-         * The ASF licenses this file to You under the Apache License, Version 2.0
-         * (the "License"); you may not use this file except in compliance with
-         * the License.  You may obtain a copy of the License at
-         *
-         *     http://www.apache.org/licenses/LICENSE-2.0
-         *
-         * Unless required by applicable law or agreed to in writing, software
-         * distributed under the License is distributed on an "AS IS" BASIS,
-         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-         * See the License for the specific language governing permissions and
-         * limitations under the License.
-         */
-
-    using SegmentWriteState = Lucene.Net.Index.SegmentWriteState;
-
-    /// <summary>
-    /// Read-write version of <seealso cref="Lucene42DocValuesFormat"/> for testing.
-    /// </summary>
-#pragma warning disable 612, 618
-    public class Lucene42RWDocValuesFormat : Lucene42DocValuesFormat
-    {
-        private readonly bool _oldFormatImpersonationIsActive;
-
-        /// <summary>
-        /// LUCENENET specific
-        /// Creates the codec with OldFormatImpersonationIsActive = true.
-        /// </summary>
-        /// <remarks>
-        /// Added so that SPIClassIterator can locate this Codec.  The iterator
-        /// only recognises classes that have empty constructors.
-        /// </remarks>
-        public Lucene42RWDocValuesFormat()
-            : this(true)
-        { }
-
-        /// <param name="oldFormatImpersonationIsActive">
-        /// LUCENENET specific
-        /// Added to remove dependency on then-static <see cref="LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/> 
-        /// </param>
-        public Lucene42RWDocValuesFormat(bool oldFormatImpersonationIsActive) : base()
-        {
-            _oldFormatImpersonationIsActive = oldFormatImpersonationIsActive;
-        }
-
-        public override DocValuesConsumer FieldsConsumer(SegmentWriteState state)
-        {
-            if (!_oldFormatImpersonationIsActive)
-            {
-                return base.FieldsConsumer(state);
-            }
-            else
-            {
-                // note: we choose DEFAULT here (its reasonably fast, and for small bpv has tiny waste)
-                return new Lucene42DocValuesConsumer(state, DATA_CODEC, DATA_EXTENSION, METADATA_CODEC, METADATA_EXTENSION, m_acceptableOverheadRatio);
-            }
-        }
-    }
-#pragma warning restore 612, 618
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/9e2f4c5b/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj b/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
index 64cf3e5..c7ac221 100644
--- a/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
+++ b/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
@@ -232,16 +232,16 @@
     <Compile Include="Codecs\Lucene41\Lucene41RWCodec.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Codecs\lucene42\Lucene42DocValuesConsumer.cs">
+    <Compile Include="Codecs\Lucene42\Lucene42DocValuesConsumer.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Codecs\lucene42\Lucene42FieldInfosWriter.cs">
+    <Compile Include="Codecs\Lucene42\Lucene42FieldInfosWriter.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Codecs\lucene42\Lucene42RWCodec.cs">
+    <Compile Include="Codecs\Lucene42\Lucene42RWCodec.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Codecs\lucene42\Lucene42RWDocValuesFormat.cs">
+    <Compile Include="Codecs\Lucene42\Lucene42RWDocValuesFormat.cs">
       <SubType>Code</SubType>
     </Compile>
     <Compile Include="Codecs\lucene45\Lucene45RWCodec.cs">


[21/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestPayloads.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestPayloads.cs b/src/Lucene.Net.Tests/Index/TestPayloads.cs
new file mode 100644
index 0000000..5c106d9
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestPayloads.cs
@@ -0,0 +1,738 @@
+using Lucene.Net.Analysis.TokenAttributes;
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.Text;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    using Lucene.Net.Analysis;
+    using Lucene.Net.Support;
+    using Lucene.Net.Util;
+    using NUnit.Framework;
+    using System.IO;
+    using IBits = Lucene.Net.Util.IBits;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using Document = Documents.Document;
+    using Field = Field;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using PayloadAttribute = Lucene.Net.Analysis.TokenAttributes.PayloadAttribute;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using TextField = TextField;
+
+    [TestFixture]
+    public class TestPayloads : LuceneTestCase
+    {
+        // Simple tests to test the payloads
+        [Test]
+        public virtual void TestPayload()
+        {
+            BytesRef payload = new BytesRef("this is a test!");
+            Assert.AreEqual(payload.Length, "this is a test!".Length, "Wrong payload length.");
+
+            BytesRef clone = (BytesRef)payload.Clone();
+            Assert.AreEqual(payload.Length, clone.Length);
+            for (int i = 0; i < payload.Length; i++)
+            {
+                Assert.AreEqual(payload.Bytes[i + payload.Offset], clone.Bytes[i + clone.Offset]);
+            }
+        }
+
+        // Tests whether the DocumentWriter and SegmentMerger correctly enable the
+        // payload bit in the FieldInfo
+        [Test]
+        public virtual void TestPayloadFieldBit()
+        {
+            Directory ram = NewDirectory();
+            PayloadAnalyzer analyzer = new PayloadAnalyzer();
+            IndexWriter writer = new IndexWriter(ram, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
+            Document d = new Document();
+            // this field won't have any payloads
+            d.Add(NewTextField("f1", "this field has no payloads", Field.Store.NO));
+            // this field will have payloads in all docs, however not for all term positions,
+            // so this field is used to check if the DocumentWriter correctly enables the payloads bit
+            // even if only some term positions have payloads
+            d.Add(NewTextField("f2", "this field has payloads in all docs", Field.Store.NO));
+            d.Add(NewTextField("f2", "this field has payloads in all docs NO PAYLOAD", Field.Store.NO));
+            // this field is used to verify if the SegmentMerger enables payloads for a field if it has payloads
+            // enabled in only some documents
+            d.Add(NewTextField("f3", "this field has payloads in some docs", Field.Store.NO));
+            // only add payload data for field f2
+#pragma warning disable 612, 618
+            analyzer.SetPayloadData("f2", "somedata".GetBytes(IOUtils.CHARSET_UTF_8), 0, 1);
+#pragma warning restore 612, 618
+            writer.AddDocument(d);
+            // flush
+            writer.Dispose();
+
+            SegmentReader reader = GetOnlySegmentReader(DirectoryReader.Open(ram));
+            FieldInfos fi = reader.FieldInfos;
+            Assert.IsFalse(fi.FieldInfo("f1").HasPayloads, "Payload field bit should not be set.");
+            Assert.IsTrue(fi.FieldInfo("f2").HasPayloads, "Payload field bit should be set.");
+            Assert.IsFalse(fi.FieldInfo("f3").HasPayloads, "Payload field bit should not be set.");
+            reader.Dispose();
+
+            // now we add another document which has payloads for field f3 and verify if the SegmentMerger
+            // enabled payloads for that field
+            analyzer = new PayloadAnalyzer(); // Clear payload state for each field
+            writer = new IndexWriter(ram, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetOpenMode(OpenMode.CREATE));
+            d = new Document();
+            d.Add(NewTextField("f1", "this field has no payloads", Field.Store.NO));
+            d.Add(NewTextField("f2", "this field has payloads in all docs", Field.Store.NO));
+            d.Add(NewTextField("f2", "this field has payloads in all docs", Field.Store.NO));
+            d.Add(NewTextField("f3", "this field has payloads in some docs", Field.Store.NO));
+            // add payload data for field f2 and f3
+#pragma warning disable 612, 618
+            analyzer.SetPayloadData("f2", "somedata".GetBytes(IOUtils.CHARSET_UTF_8), 0, 1);
+            analyzer.SetPayloadData("f3", "somedata".GetBytes(IOUtils.CHARSET_UTF_8), 0, 3);
+#pragma warning restore 612, 618
+            writer.AddDocument(d);
+
+            // force merge
+            writer.ForceMerge(1);
+            // flush
+            writer.Dispose();
+
+            reader = GetOnlySegmentReader(DirectoryReader.Open(ram));
+            fi = reader.FieldInfos;
+            Assert.IsFalse(fi.FieldInfo("f1").HasPayloads, "Payload field bit should not be set.");
+            Assert.IsTrue(fi.FieldInfo("f2").HasPayloads, "Payload field bit should be set.");
+            Assert.IsTrue(fi.FieldInfo("f3").HasPayloads, "Payload field bit should be set.");
+            reader.Dispose();
+            ram.Dispose();
+        }
+
+        // Tests if payloads are correctly stored and loaded using both RamDirectory and FSDirectory
+        [Test]
+        public virtual void TestPayloadsEncoding()
+        {
+            Directory dir = NewDirectory();
+            PerformTest(dir);
+            dir.Dispose();
+        }
+
+        // builds an index with payloads in the given Directory and performs
+        // different tests to verify the payload encoding
+        private void PerformTest(Directory dir)
+        {
+            PayloadAnalyzer analyzer = new PayloadAnalyzer();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetOpenMode(OpenMode.CREATE).SetMergePolicy(NewLogMergePolicy()));
+
+            // should be in sync with value in TermInfosWriter
+            const int skipInterval = 16;
+
+            const int numTerms = 5;
+            const string fieldName = "f1";
+
+            int numDocs = skipInterval + 1;
+            // create content for the test documents with just a few terms
+            Term[] terms = GenerateTerms(fieldName, numTerms);
+            StringBuilder sb = new StringBuilder();
+            for (int i = 0; i < terms.Length; i++)
+            {
+                sb.Append(terms[i].Text());
+                sb.Append(" ");
+            }
+            string content = sb.ToString();
+
+            int payloadDataLength = numTerms * numDocs * 2 + numTerms * numDocs * (numDocs - 1) / 2;
+            var payloadData = GenerateRandomData(payloadDataLength);
+
+            Document d = new Document();
+            d.Add(NewTextField(fieldName, content, Field.Store.NO));
+            // add the same document multiple times to have the same payload lengths for all
+            // occurrences within two consecutive skip intervals
+            int offset = 0;
+            for (int i = 0; i < 2 * numDocs; i++)
+            {
+                analyzer = new PayloadAnalyzer(fieldName, payloadData, offset, 1);
+                offset += numTerms;
+                writer.AddDocument(d, analyzer);
+            }
+
+            // make sure we create more than one segment to test merging
+            writer.Commit();
+
+            // now we make sure to have different payload lengths next at the next skip point
+            for (int i = 0; i < numDocs; i++)
+            {
+                analyzer = new PayloadAnalyzer(fieldName, payloadData, offset, i);
+                offset += i * numTerms;
+                writer.AddDocument(d, analyzer);
+            }
+
+            writer.ForceMerge(1);
+            // flush
+            writer.Dispose();
+
+            /*
+             * Verify the index
+             * first we test if all payloads are stored correctly
+             */
+            IndexReader reader = DirectoryReader.Open(dir);
+
+            var verifyPayloadData = new byte[payloadDataLength];
+            offset = 0;
+            var tps = new DocsAndPositionsEnum[numTerms];
+            for (int i = 0; i < numTerms; i++)
+            {
+                tps[i] = MultiFields.GetTermPositionsEnum(reader, MultiFields.GetLiveDocs(reader), terms[i].Field, new BytesRef(terms[i].Text()));
+            }
+
+            while (tps[0].NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
+            {
+                for (int i = 1; i < numTerms; i++)
+                {
+                    tps[i].NextDoc();
+                }
+                int freq = tps[0].Freq;
+
+                for (int i = 0; i < freq; i++)
+                {
+                    for (int j = 0; j < numTerms; j++)
+                    {
+                        tps[j].NextPosition();
+                        BytesRef br = tps[j].GetPayload();
+                        if (br != null)
+                        {
+                            Array.Copy(br.Bytes, br.Offset, verifyPayloadData, offset, br.Length);
+                            offset += br.Length;
+                        }
+                    }
+                }
+            }
+
+            AssertByteArrayEquals(payloadData, verifyPayloadData);
+
+            /*
+             *  test lazy skipping
+             */
+            DocsAndPositionsEnum tp = MultiFields.GetTermPositionsEnum(reader, MultiFields.GetLiveDocs(reader), terms[0].Field, new BytesRef(terms[0].Text()));
+            tp.NextDoc();
+            tp.NextPosition();
+            // NOTE: prior rev of this test was failing to first
+            // call next here:
+            tp.NextDoc();
+            // now we don't read this payload
+            tp.NextPosition();
+            BytesRef payload = tp.GetPayload();
+            Assert.AreEqual(1, payload.Length, "Wrong payload length.");
+            Assert.AreEqual(payload.Bytes[payload.Offset], payloadData[numTerms]);
+            tp.NextDoc();
+            tp.NextPosition();
+
+            // we don't read this payload and skip to a different document
+            tp.Advance(5);
+            tp.NextPosition();
+            payload = tp.GetPayload();
+            Assert.AreEqual(1, payload.Length, "Wrong payload length.");
+            Assert.AreEqual(payload.Bytes[payload.Offset], payloadData[5 * numTerms]);
+
+            /*
+             * Test different lengths at skip points
+             */
+            tp = MultiFields.GetTermPositionsEnum(reader, MultiFields.GetLiveDocs(reader), terms[1].Field, new BytesRef(terms[1].Text()));
+            tp.NextDoc();
+            tp.NextPosition();
+            Assert.AreEqual(1, tp.GetPayload().Length, "Wrong payload length.");
+            tp.Advance(skipInterval - 1);
+            tp.NextPosition();
+            Assert.AreEqual(1, tp.GetPayload().Length, "Wrong payload length.");
+            tp.Advance(2 * skipInterval - 1);
+            tp.NextPosition();
+            Assert.AreEqual(1, tp.GetPayload().Length, "Wrong payload length.");
+            tp.Advance(3 * skipInterval - 1);
+            tp.NextPosition();
+            Assert.AreEqual(3 * skipInterval - 2 * numDocs - 1, tp.GetPayload().Length, "Wrong payload length.");
+
+            reader.Dispose();
+
+            // test long payload
+            analyzer = new PayloadAnalyzer();
+            writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetOpenMode(OpenMode.CREATE));
+            string singleTerm = "lucene";
+
+            d = new Document();
+            d.Add(NewTextField(fieldName, singleTerm, Field.Store.NO));
+            // add a payload whose length is greater than the buffer size of BufferedIndexOutput
+            payloadData = GenerateRandomData(2000);
+            analyzer.SetPayloadData(fieldName, payloadData, 100, 1500);
+            writer.AddDocument(d);
+
+            writer.ForceMerge(1);
+            // flush
+            writer.Dispose();
+
+            reader = DirectoryReader.Open(dir);
+            tp = MultiFields.GetTermPositionsEnum(reader, MultiFields.GetLiveDocs(reader), fieldName, new BytesRef(singleTerm));
+            tp.NextDoc();
+            tp.NextPosition();
+
+            BytesRef bref = tp.GetPayload();
+            verifyPayloadData = new byte[bref.Length];
+            var portion = new byte[1500];
+            Array.Copy(payloadData, 100, portion, 0, 1500);
+
+            AssertByteArrayEquals(portion, bref.Bytes, bref.Offset, bref.Length);
+            reader.Dispose();
+        }
+
+#pragma warning disable 612, 618
+        internal static readonly Encoding Utf8 = IOUtils.CHARSET_UTF_8;
+#pragma warning restore 612, 618
+
+        private void GenerateRandomData(byte[] data)
+        {
+            // this test needs the random data to be valid unicode
+            string s = TestUtil.RandomFixedByteLengthUnicodeString(Random(), data.Length);
+            var b = s.GetBytes(Utf8);
+            Debug.Assert(b.Length == data.Length);
+            System.Buffer.BlockCopy(b, 0, data, 0, b.Length);
+        }
+
+        private byte[] GenerateRandomData(int n)
+        {
+            var data = new byte[n];
+            GenerateRandomData(data);
+            return data;
+        }
+
+        private Term[] GenerateTerms(string fieldName, int n)
+        {
+            int maxDigits = (int)(Math.Log(n) / Math.Log(10));
+            Term[] terms = new Term[n];
+            StringBuilder sb = new StringBuilder();
+            for (int i = 0; i < n; i++)
+            {
+                sb.Length = 0;
+                sb.Append("t");
+                int zeros = maxDigits - (int)(Math.Log(i) / Math.Log(10));
+                for (int j = 0; j < zeros; j++)
+                {
+                    sb.Append("0");
+                }
+                sb.Append(i);
+                terms[i] = new Term(fieldName, sb.ToString());
+            }
+            return terms;
+        }
+
+        internal virtual void AssertByteArrayEquals(byte[] b1, byte[] b2)
+        {
+            if (b1.Length != b2.Length)
+            {
+                Assert.Fail("Byte arrays have different lengths: " + b1.Length + ", " + b2.Length);
+            }
+
+            for (int i = 0; i < b1.Length; i++)
+            {
+                if (b1[i] != b2[i])
+                {
+                    Assert.Fail("Byte arrays different at index " + i + ": " + b1[i] + ", " + b2[i]);
+                }
+            }
+        }
+
+        internal virtual void AssertByteArrayEquals(byte[] b1, byte[] b2, int b2offset, int b2length)
+        {
+            if (b1.Length != b2length)
+            {
+                Assert.Fail("Byte arrays have different lengths: " + b1.Length + ", " + b2length);
+            }
+
+            for (int i = 0; i < b1.Length; i++)
+            {
+                if (b1[i] != b2[b2offset + i])
+                {
+                    Assert.Fail("Byte arrays different at index " + i + ": " + b1[i] + ", " + b2[b2offset + i]);
+                }
+            }
+        }
+
+        /// <summary>
+        /// this Analyzer uses an WhitespaceTokenizer and PayloadFilter.
+        /// </summary>
+        private class PayloadAnalyzer : Analyzer
+        {
+            internal readonly IDictionary<string, PayloadData> FieldToData = new Dictionary<string, PayloadData>();
+
+            public PayloadAnalyzer()
+                : base(PER_FIELD_REUSE_STRATEGY)
+            {
+            }
+
+            public PayloadAnalyzer(string field, byte[] data, int offset, int length)
+                : base(PER_FIELD_REUSE_STRATEGY)
+            {
+                SetPayloadData(field, data, offset, length);
+            }
+
+            internal virtual void SetPayloadData(string field, byte[] data, int offset, int length)
+            {
+                FieldToData[field] = new PayloadData(data, offset, length);
+            }
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                PayloadData payload;
+                FieldToData.TryGetValue(fieldName, out payload);
+                Tokenizer ts = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
+                TokenStream tokenStream = (payload != null) ? (TokenStream)new PayloadFilter(ts, payload.Data, payload.Offset, payload.Length) : ts;
+                return new TokenStreamComponents(ts, tokenStream);
+            }
+
+            internal class PayloadData
+            {
+                internal byte[] Data;
+                internal int Offset;
+                internal int Length;
+
+                internal PayloadData(byte[] data, int offset, int length)
+                {
+                    this.Data = data;
+                    this.Offset = offset;
+                    this.Length = length;
+                }
+            }
+        }
+
+        /// <summary>
+        /// this Filter adds payloads to the tokens.
+        /// </summary>
+        private class PayloadFilter : TokenFilter
+        {
+            internal byte[] Data;
+            internal int Length;
+            internal int Offset;
+            internal int StartOffset;
+            internal IPayloadAttribute PayloadAtt;
+            internal ICharTermAttribute TermAttribute;
+
+            public PayloadFilter(TokenStream @in, byte[] data, int offset, int length)
+                : base(@in)
+            {
+                this.Data = data;
+                this.Length = length;
+                this.Offset = offset;
+                this.StartOffset = offset;
+                PayloadAtt = AddAttribute<IPayloadAttribute>();
+                TermAttribute = AddAttribute<ICharTermAttribute>();
+            }
+
+            public sealed override bool IncrementToken()
+            {
+                bool hasNext = m_input.IncrementToken();
+                if (!hasNext)
+                {
+                    return false;
+                }
+
+                // Some values of the same field are to have payloads and others not
+                if (Offset + Length <= Data.Length && !TermAttribute.ToString().EndsWith("NO PAYLOAD"))
+                {
+                    BytesRef p = new BytesRef(Data, Offset, Length);
+                    PayloadAtt.Payload = p;
+                    Offset += Length;
+                }
+                else
+                {
+                    PayloadAtt.Payload = null;
+                }
+
+                return true;
+            }
+
+            public override void Reset()
+            {
+                base.Reset();
+                this.Offset = StartOffset;
+            }
+        }
+
+        [Test]
+        public virtual void TestThreadSafety()
+        {
+            const int numThreads = 5;
+            int numDocs = AtLeast(50);
+            ByteArrayPool pool = new ByteArrayPool(numThreads, 5);
+
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            const string field = "test";
+
+            ThreadClass[] ingesters = new ThreadClass[numThreads];
+            for (int i = 0; i < numThreads; i++)
+            {
+                ingesters[i] = new ThreadAnonymousInnerClassHelper(this, numDocs, pool, writer, field);
+                ingesters[i].Start();
+            }
+
+            for (int i = 0; i < numThreads; i++)
+            {
+                ingesters[i].Join();
+            }
+            writer.Dispose();
+            IndexReader reader = DirectoryReader.Open(dir);
+            TermsEnum terms = MultiFields.GetFields(reader).GetTerms(field).GetIterator(null);
+            IBits liveDocs = MultiFields.GetLiveDocs(reader);
+            DocsAndPositionsEnum tp = null;
+            while (terms.Next() != null)
+            {
+                string termText = terms.Term.Utf8ToString();
+                tp = terms.DocsAndPositions(liveDocs, tp);
+                while (tp.NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
+                {
+                    int freq = tp.Freq;
+                    for (int i = 0; i < freq; i++)
+                    {
+                        tp.NextPosition();
+                        BytesRef payload = tp.GetPayload();
+                        Assert.AreEqual(termText, payload.Utf8ToString());
+                    }
+                }
+            }
+            reader.Dispose();
+            dir.Dispose();
+            Assert.AreEqual(pool.Count, numThreads);
+        }
+
+        private class ThreadAnonymousInnerClassHelper : ThreadClass
+        {
+            private readonly TestPayloads OuterInstance;
+
+            private int NumDocs;
+            private Lucene.Net.Index.TestPayloads.ByteArrayPool Pool;
+            private IndexWriter Writer;
+            private string Field;
+
+            public ThreadAnonymousInnerClassHelper(TestPayloads outerInstance, int numDocs, Lucene.Net.Index.TestPayloads.ByteArrayPool pool, IndexWriter writer, string field)
+            {
+                this.OuterInstance = outerInstance;
+                this.NumDocs = numDocs;
+                this.Pool = pool;
+                this.Writer = writer;
+                this.Field = field;
+            }
+
+            public override void Run()
+            {
+                try
+                {
+                    for (int j = 0; j < NumDocs; j++)
+                    {
+                        Document d = new Document();
+                        d.Add(new TextField(Field, new PoolingPayloadTokenStream(OuterInstance, Pool)));
+                        Writer.AddDocument(d);
+                    }
+                }
+                catch (Exception e)
+                {
+                    Console.WriteLine(e.ToString());
+                    Console.Write(e.StackTrace);
+                    Assert.Fail(e.ToString());
+                }
+            }
+        }
+
+        private class PoolingPayloadTokenStream : TokenStream
+        {
+            private readonly TestPayloads OuterInstance;
+
+            private byte[] Payload;
+            internal bool First;
+            internal ByteArrayPool Pool;
+            internal string Term;
+
+            internal ICharTermAttribute TermAtt;
+            internal IPayloadAttribute PayloadAtt;
+
+            internal PoolingPayloadTokenStream(TestPayloads outerInstance, ByteArrayPool pool)
+            {
+                this.OuterInstance = outerInstance;
+                this.Pool = pool;
+                Payload = pool.Get();
+                OuterInstance.GenerateRandomData(Payload);
+                Term = Encoding.UTF8.GetString((byte[])(Array)Payload);
+                First = true;
+                PayloadAtt = AddAttribute<IPayloadAttribute>();
+                TermAtt = AddAttribute<ICharTermAttribute>();
+            }
+
+            public sealed override bool IncrementToken()
+            {
+                if (!First)
+                {
+                    return false;
+                }
+                First = false;
+                ClearAttributes();
+                TermAtt.Append(Term);
+                PayloadAtt.Payload = new BytesRef(Payload);
+                return true;
+            }
+
+            public override void Dispose()
+            {
+                Pool.Release(Payload);
+            }
+        }
+
+        private class ByteArrayPool
+        {
+            internal readonly IList<byte[]> Pool;
+
+            internal ByteArrayPool(int capacity, int size)
+            {
+                Pool = new List<byte[]>();
+                for (int i = 0; i < capacity; i++)
+                {
+                    Pool.Add(new byte[size]);
+                }
+            }
+
+            internal virtual byte[] Get()
+            {
+                lock (this) // TODO use BlockingCollection / BCL datastructures instead
+                {
+                    var retArray = Pool[0];
+                    Pool.RemoveAt(0);
+                    return retArray;
+                }
+            }
+
+            internal virtual void Release(byte[] b)
+            {
+                lock (this)
+                {
+                    Pool.Add(b);
+                }
+            }
+
+            internal virtual int Count
+            {
+                get
+                {
+                    lock (this)
+                    {
+                        return Pool.Count;
+                    }
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestAcrossFields()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, true), Similarity, TimeZone);
+            Document doc = new Document();
+            doc.Add(new TextField("hasMaybepayload", "here we go", Field.Store.YES));
+            writer.AddDocument(doc);
+            writer.Dispose();
+
+            writer = new RandomIndexWriter(Random(), dir, new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, true), Similarity, TimeZone);
+            doc = new Document();
+            doc.Add(new TextField("hasMaybepayload2", "here we go", Field.Store.YES));
+            writer.AddDocument(doc);
+            writer.AddDocument(doc);
+            writer.ForceMerge(1);
+            writer.Dispose();
+
+            dir.Dispose();
+        }
+
+        /// <summary>
+        /// some docs have payload att, some not </summary>
+        [Test]
+        public virtual void TestMixupDocs()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, null);
+            iwc.SetMergePolicy(NewLogMergePolicy());
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, iwc);
+            Document doc = new Document();
+            Field field = new TextField("field", "", Field.Store.NO);
+            TokenStream ts = new MockTokenizer(new StringReader("here we go"), MockTokenizer.WHITESPACE, true);
+            Assert.IsFalse(ts.HasAttribute<PayloadAttribute>());
+            field.SetTokenStream(ts);
+            doc.Add(field);
+            writer.AddDocument(doc);
+            Token withPayload = new Token("withPayload", 0, 11);
+            withPayload.Payload = new BytesRef("test");
+            ts = new CannedTokenStream(withPayload);
+            Assert.IsTrue(ts.HasAttribute<IPayloadAttribute>());
+            field.SetTokenStream(ts);
+            writer.AddDocument(doc);
+            ts = new MockTokenizer(new StringReader("another"), MockTokenizer.WHITESPACE, true);
+            Assert.IsFalse(ts.HasAttribute<PayloadAttribute>());
+            field.SetTokenStream(ts);
+            writer.AddDocument(doc);
+            DirectoryReader reader = writer.Reader;
+            AtomicReader sr = SlowCompositeReaderWrapper.Wrap(reader);
+            DocsAndPositionsEnum de = sr.TermPositionsEnum(new Term("field", "withPayload"));
+            de.NextDoc();
+            de.NextPosition();
+            Assert.AreEqual(new BytesRef("test"), de.GetPayload());
+            writer.Dispose();
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        /// <summary>
+        /// some field instances have payload att, some not </summary>
+        [Test]
+        public virtual void TestMixupMultiValued()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            Document doc = new Document();
+            Field field = new TextField("field", "", Field.Store.NO);
+            TokenStream ts = new MockTokenizer(new StringReader("here we go"), MockTokenizer.WHITESPACE, true);
+            Assert.IsFalse(ts.HasAttribute<PayloadAttribute>());
+            field.SetTokenStream(ts);
+            doc.Add(field);
+            Field field2 = new TextField("field", "", Field.Store.NO);
+            Token withPayload = new Token("withPayload", 0, 11);
+            withPayload.Payload = new BytesRef("test");
+            ts = new CannedTokenStream(withPayload);
+            Assert.IsTrue(ts.HasAttribute<IPayloadAttribute>());
+            field2.SetTokenStream(ts);
+            doc.Add(field2);
+            Field field3 = new TextField("field", "", Field.Store.NO);
+            ts = new MockTokenizer(new StringReader("nopayload"), MockTokenizer.WHITESPACE, true);
+            Assert.IsFalse(ts.HasAttribute<PayloadAttribute>());
+            field3.SetTokenStream(ts);
+            doc.Add(field3);
+            writer.AddDocument(doc);
+            DirectoryReader reader = writer.Reader;
+            SegmentReader sr = GetOnlySegmentReader(reader);
+            DocsAndPositionsEnum de = sr.TermPositionsEnum(new Term("field", "withPayload"));
+            de.NextDoc();
+            de.NextPosition();
+            Assert.AreEqual(new BytesRef("test"), de.GetPayload());
+            writer.Dispose();
+            reader.Dispose();
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestPayloadsOnVectors.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestPayloadsOnVectors.cs b/src/Lucene.Net.Tests/Index/TestPayloadsOnVectors.cs
new file mode 100644
index 0000000..7e26232
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestPayloadsOnVectors.cs
@@ -0,0 +1,165 @@
+using Lucene.Net.Analysis.TokenAttributes;
+using System.Diagnostics;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Randomized.Generators;
+    using NUnit.Framework;
+    using System.IO;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using CannedTokenStream = Lucene.Net.Analysis.CannedTokenStream;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldType = FieldType;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockTokenizer = Lucene.Net.Analysis.MockTokenizer;
+    using TextField = TextField;
+    using Token = Lucene.Net.Analysis.Token;
+    using TokenStream = Lucene.Net.Analysis.TokenStream;
+
+    [SuppressCodecs("Lucene3x")]
+    [TestFixture]
+    public class TestPayloadsOnVectors : LuceneTestCase
+    {
+        /// <summary>
+        /// some docs have payload att, some not </summary>
+        [Test]
+        public virtual void TestMixupDocs()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            iwc.SetMergePolicy(NewLogMergePolicy());
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, iwc);
+            Document doc = new Document();
+            FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
+            customType.StoreTermVectors = true;
+            customType.StoreTermVectorPositions = true;
+            customType.StoreTermVectorPayloads = true;
+            customType.StoreTermVectorOffsets = Random().NextBoolean();
+            Field field = new Field("field", "", customType);
+            TokenStream ts = new MockTokenizer(new StringReader("here we go"), MockTokenizer.WHITESPACE, true);
+            Assert.IsFalse(ts.HasAttribute<IPayloadAttribute>());
+            field.SetTokenStream(ts);
+            doc.Add(field);
+            writer.AddDocument(doc);
+
+            Token withPayload = new Token("withPayload", 0, 11);
+            withPayload.Payload = new BytesRef("test");
+            ts = new CannedTokenStream(withPayload);
+            Assert.IsTrue(ts.HasAttribute<IPayloadAttribute>());
+            field.SetTokenStream(ts);
+            writer.AddDocument(doc);
+
+            ts = new MockTokenizer(new StringReader("another"), MockTokenizer.WHITESPACE, true);
+            Assert.IsFalse(ts.HasAttribute<IPayloadAttribute>());
+            field.SetTokenStream(ts);
+            writer.AddDocument(doc);
+
+            DirectoryReader reader = writer.Reader;
+            Terms terms = reader.GetTermVector(1, "field");
+            Debug.Assert(terms != null);
+            TermsEnum termsEnum = terms.GetIterator(null);
+            Assert.IsTrue(termsEnum.SeekExact(new BytesRef("withPayload")));
+            DocsAndPositionsEnum de = termsEnum.DocsAndPositions(null, null);
+            Assert.AreEqual(0, de.NextDoc());
+            Assert.AreEqual(0, de.NextPosition());
+            Assert.AreEqual(new BytesRef("test"), de.GetPayload());
+            writer.Dispose();
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        /// <summary>
+        /// some field instances have payload att, some not </summary>
+        [Test]
+        public virtual void TestMixupMultiValued()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            Document doc = new Document();
+            FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
+            customType.StoreTermVectors = true;
+            customType.StoreTermVectorPositions = true;
+            customType.StoreTermVectorPayloads = true;
+            customType.StoreTermVectorOffsets = Random().NextBoolean();
+            Field field = new Field("field", "", customType);
+            TokenStream ts = new MockTokenizer(new StringReader("here we go"), MockTokenizer.WHITESPACE, true);
+            Assert.IsFalse(ts.HasAttribute<IPayloadAttribute>());
+            field.SetTokenStream(ts);
+            doc.Add(field);
+            Field field2 = new Field("field", "", customType);
+            Token withPayload = new Token("withPayload", 0, 11);
+            withPayload.Payload = new BytesRef("test");
+            ts = new CannedTokenStream(withPayload);
+            Assert.IsTrue(ts.HasAttribute<IPayloadAttribute>());
+            field2.SetTokenStream(ts);
+            doc.Add(field2);
+            Field field3 = new Field("field", "", customType);
+            ts = new MockTokenizer(new StringReader("nopayload"), MockTokenizer.WHITESPACE, true);
+            Assert.IsFalse(ts.HasAttribute<IPayloadAttribute>());
+            field3.SetTokenStream(ts);
+            doc.Add(field3);
+            writer.AddDocument(doc);
+            DirectoryReader reader = writer.Reader;
+            Terms terms = reader.GetTermVector(0, "field");
+            Debug.Assert(terms != null);
+            TermsEnum termsEnum = terms.GetIterator(null);
+            Assert.IsTrue(termsEnum.SeekExact(new BytesRef("withPayload")));
+            DocsAndPositionsEnum de = termsEnum.DocsAndPositions(null, null);
+            Assert.AreEqual(0, de.NextDoc());
+            Assert.AreEqual(3, de.NextPosition());
+            Assert.AreEqual(new BytesRef("test"), de.GetPayload());
+            writer.Dispose();
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestPayloadsWithoutPositions()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            Document doc = new Document();
+            FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
+            customType.StoreTermVectors = true;
+            customType.StoreTermVectorPositions = false;
+            customType.StoreTermVectorPayloads = true;
+            customType.StoreTermVectorOffsets = Random().NextBoolean();
+            doc.Add(new Field("field", "foo", customType));
+            try
+            {
+                writer.AddDocument(doc);
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException expected)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            writer.Dispose();
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestPerSegmentDeletes.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestPerSegmentDeletes.cs b/src/Lucene.Net.Tests/Index/TestPerSegmentDeletes.cs
new file mode 100644
index 0000000..ce7e767
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestPerSegmentDeletes.cs
@@ -0,0 +1,318 @@
+using System;
+using System.Collections.Generic;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using ArrayUtil = Lucene.Net.Util.ArrayUtil;
+    using IBits = Lucene.Net.Util.IBits;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+    using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    [TestFixture]
+    public class TestPerSegmentDeletes : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestDeletes1()
+        {
+            //IndexWriter.debug2 = System.out;
+            Directory dir = new MockDirectoryWrapper(new Random(Random().Next()), new RAMDirectory());
+            IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            iwc.SetMergeScheduler(new SerialMergeScheduler());
+            iwc.SetMaxBufferedDocs(5000);
+            iwc.SetRAMBufferSizeMB(100);
+            RangeMergePolicy fsmp = new RangeMergePolicy(this, false);
+            iwc.SetMergePolicy(fsmp);
+            IndexWriter writer = new IndexWriter(dir, iwc);
+            for (int x = 0; x < 5; x++)
+            {
+                writer.AddDocument(DocHelper.CreateDocument(x, "1", 2));
+                //System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs());
+            }
+            //System.out.println("commit1");
+            writer.Commit();
+            Assert.AreEqual(1, writer.SegmentCount);
+            for (int x = 5; x < 10; x++)
+            {
+                writer.AddDocument(DocHelper.CreateDocument(x, "2", 2));
+                //System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs());
+            }
+            //System.out.println("commit2");
+            writer.Commit();
+            Assert.AreEqual(2, writer.SegmentCount);
+
+            for (int x = 10; x < 15; x++)
+            {
+                writer.AddDocument(DocHelper.CreateDocument(x, "3", 2));
+                //System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs());
+            }
+
+            writer.DeleteDocuments(new Term("id", "1"));
+
+            writer.DeleteDocuments(new Term("id", "11"));
+
+            // flushing without applying deletes means
+            // there will still be deletes in the segment infos
+            writer.Flush(false, false);
+            Assert.IsTrue(writer.bufferedUpdatesStream.Any());
+
+            // get reader flushes pending deletes
+            // so there should not be anymore
+            IndexReader r1 = writer.Reader;
+            Assert.IsFalse(writer.bufferedUpdatesStream.Any());
+            r1.Dispose();
+
+            // delete id:2 from the first segment
+            // merge segments 0 and 1
+            // which should apply the delete id:2
+            writer.DeleteDocuments(new Term("id", "2"));
+            writer.Flush(false, false);
+            fsmp = (RangeMergePolicy)writer.Config.MergePolicy;
+            fsmp.DoMerge = true;
+            fsmp.Start = 0;
+            fsmp.Length = 2;
+            writer.MaybeMerge();
+
+            Assert.AreEqual(2, writer.SegmentCount);
+
+            // id:2 shouldn't exist anymore because
+            // it's been applied in the merge and now it's gone
+            IndexReader r2 = writer.Reader;
+            int[] id2docs = ToDocsArray(new Term("id", "2"), null, r2);
+            Assert.IsTrue(id2docs == null);
+            r2.Dispose();
+
+            /*
+            /// // added docs are in the ram buffer
+            /// for (int x = 15; x < 20; x++) {
+            ///  writer.AddDocument(TestIndexWriterReader.CreateDocument(x, "4", 2));
+            ///  System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs());
+            /// }
+            /// Assert.IsTrue(writer.numRamDocs() > 0);
+            /// // delete from the ram buffer
+            /// writer.DeleteDocuments(new Term("id", Integer.toString(13)));
+            ///
+            /// Term id3 = new Term("id", Integer.toString(3));
+            ///
+            /// // delete from the 1st segment
+            /// writer.DeleteDocuments(id3);
+            ///
+            /// Assert.IsTrue(writer.numRamDocs() > 0);
+            ///
+            /// //System.out
+            /// //    .println("segdels1:" + writer.docWriter.deletesToString());
+            ///
+            /// //Assert.IsTrue(writer.docWriter.segmentDeletes.Size() > 0);
+            ///
+            /// // we cause a merge to happen
+            /// fsmp.doMerge = true;
+            /// fsmp.start = 0;
+            /// fsmp.Length = 2;
+            /// System.out.println("maybeMerge "+writer.SegmentInfos);
+            ///
+            /// SegmentInfo info0 = writer.SegmentInfos.Info(0);
+            /// SegmentInfo info1 = writer.SegmentInfos.Info(1);
+            ///
+            /// writer.MaybeMerge();
+            /// System.out.println("maybeMerge after "+writer.SegmentInfos);
+            /// // there should be docs in RAM
+            /// Assert.IsTrue(writer.numRamDocs() > 0);
+            ///
+            /// // assert we've merged the 1 and 2 segments
+            /// // and still have a segment leftover == 2
+            /// Assert.AreEqual(2, writer.SegmentInfos.Size());
+            /// Assert.IsFalse(segThere(info0, writer.SegmentInfos));
+            /// Assert.IsFalse(segThere(info1, writer.SegmentInfos));
+            ///
+            /// //System.out.println("segdels2:" + writer.docWriter.deletesToString());
+            ///
+            /// //Assert.IsTrue(writer.docWriter.segmentDeletes.Size() > 0);
+            ///
+            /// IndexReader r = writer.GetReader();
+            /// IndexReader r1 = r.getSequentialSubReaders()[0];
+            /// printDelDocs(r1.GetLiveDocs());
+            /// int[] docs = toDocsArray(id3, null, r);
+            /// System.out.println("id3 docs:"+Arrays.toString(docs));
+            /// // there shouldn't be any docs for id:3
+            /// Assert.IsTrue(docs == null);
+            /// r.Dispose();
+            ///
+            /// part2(writer, fsmp);
+            ///
+            */
+            // System.out.println("segdels2:"+writer.docWriter.segmentDeletes.toString());
+            //System.out.println("close");
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        /// <summary>
+        /// static boolean hasPendingDeletes(SegmentInfos infos) {
+        ///  for (SegmentInfo info : infos) {
+        ///    if (info.deletes.Any()) {
+        ///      return true;
+        ///    }
+        ///  }
+        ///  return false;
+        /// }
+        ///
+        /// </summary>
+        internal virtual void Part2(IndexWriter writer, RangeMergePolicy fsmp)
+        {
+            for (int x = 20; x < 25; x++)
+            {
+                writer.AddDocument(DocHelper.CreateDocument(x, "5", 2));
+                //System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs());
+            }
+            writer.Flush(false, false);
+            for (int x = 25; x < 30; x++)
+            {
+                writer.AddDocument(DocHelper.CreateDocument(x, "5", 2));
+                //System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs());
+            }
+            writer.Flush(false, false);
+
+            //System.out.println("infos3:"+writer.SegmentInfos);
+
+            Term delterm = new Term("id", "8");
+            writer.DeleteDocuments(delterm);
+            //System.out.println("segdels3:" + writer.docWriter.deletesToString());
+
+            fsmp.DoMerge = true;
+            fsmp.Start = 1;
+            fsmp.Length = 2;
+            writer.MaybeMerge();
+
+            // deletes for info1, the newly created segment from the
+            // merge should have no deletes because they were applied in
+            // the merge
+            //SegmentInfo info1 = writer.SegmentInfos.Info(1);
+            //Assert.IsFalse(exists(info1, writer.docWriter.segmentDeletes));
+
+            //System.out.println("infos4:"+writer.SegmentInfos);
+            //System.out.println("segdels4:" + writer.docWriter.deletesToString());
+        }
+
+        internal virtual bool SegThere(SegmentCommitInfo info, SegmentInfos infos)
+        {
+            foreach (SegmentCommitInfo si in infos.Segments)
+            {
+                if (si.Info.Name.Equals(info.Info.Name))
+                {
+                    return true;
+                }
+            }
+            return false;
+        }
+
+        public static void PrintDelDocs(IBits bits)
+        {
+            if (bits == null)
+            {
+                return;
+            }
+            for (int x = 0; x < bits.Length; x++)
+            {
+                Console.WriteLine(x + ":" + bits.Get(x));
+            }
+        }
+
+        public virtual int[] ToDocsArray(Term term, IBits bits, IndexReader reader)
+        {
+            Fields fields = MultiFields.GetFields(reader);
+            Terms cterms = fields.GetTerms(term.Field);
+            TermsEnum ctermsEnum = cterms.GetIterator(null);
+            if (ctermsEnum.SeekExact(new BytesRef(term.Text())))
+            {
+                DocsEnum docsEnum = TestUtil.Docs(Random(), ctermsEnum, bits, null, DocsEnum.FLAG_NONE);
+                return ToArray(docsEnum);
+            }
+            return null;
+        }
+
+        public static int[] ToArray(DocsEnum docsEnum)
+        {
+            IList<int?> docs = new List<int?>();
+            while (docsEnum.NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
+            {
+                int docID = docsEnum.DocID;
+                docs.Add(docID);
+            }
+            return ArrayUtil.ToInt32Array(docs);
+        }
+
+        public class RangeMergePolicy : MergePolicy
+        {
+            private readonly TestPerSegmentDeletes OuterInstance;
+
+            internal bool DoMerge = false;
+            internal int Start;
+            internal int Length;
+
+            internal readonly bool UseCompoundFile_Renamed;
+
+            internal RangeMergePolicy(TestPerSegmentDeletes outerInstance, bool useCompoundFile)
+            {
+                this.OuterInstance = outerInstance;
+                this.UseCompoundFile_Renamed = useCompoundFile;
+            }
+
+            public override void Dispose()
+            {
+            }
+
+            public override MergeSpecification FindMerges(MergeTrigger? mergeTrigger, SegmentInfos segmentInfos)
+            {
+                MergeSpecification ms = new MergeSpecification();
+                if (DoMerge)
+                {
+                    OneMerge om = new OneMerge(segmentInfos.AsList().SubList(Start, Start + Length));
+                    ms.Add(om);
+                    DoMerge = false;
+                    return ms;
+                }
+                return null;
+            }
+
+            public override MergeSpecification FindForcedMerges(SegmentInfos segmentInfos, int maxSegmentCount, IDictionary<SegmentCommitInfo, bool?> segmentsToMerge)
+            {
+                return null;
+            }
+
+            public override MergeSpecification FindForcedDeletesMerges(SegmentInfos segmentInfos)
+            {
+                return null;
+            }
+
+            public override bool UseCompoundFile(SegmentInfos segments, SegmentCommitInfo newSegment)
+            {
+                return UseCompoundFile_Renamed;
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestPersistentSnapshotDeletionPolicy.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestPersistentSnapshotDeletionPolicy.cs b/src/Lucene.Net.Tests/Index/TestPersistentSnapshotDeletionPolicy.cs
new file mode 100644
index 0000000..84c7a59
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestPersistentSnapshotDeletionPolicy.cs
@@ -0,0 +1,260 @@
+using System.Diagnostics;
+
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+    using System;
+    using System.IO;
+    using Util;
+    using Directory = Lucene.Net.Store.Directory;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements. See the NOTICE file distributed with this
+         * work for additional information regarding copyright ownership. The ASF
+         * licenses this file to You under the Apache License, Version 2.0 (the
+         * "License"); you may not use this file except in compliance with the License.
+         * You may obtain a copy of the License at
+         *
+         * http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+         * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+         * License for the specific language governing permissions and limitations under
+         * the License.
+         */
+
+    using Document = Documents.Document;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+
+    [TestFixture]
+    public class TestPersistentSnapshotDeletionPolicy : TestSnapshotDeletionPolicy
+    {
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            base.TearDown();
+        }
+
+        private SnapshotDeletionPolicy GetDeletionPolicy(Directory dir)
+        {
+            return new PersistentSnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy(), dir, OpenMode.CREATE);
+        }
+
+        [Test]
+        public virtual void TestExistingSnapshots()
+        {
+            int numSnapshots = 3;
+            MockDirectoryWrapper dir = NewMockDirectory();
+            IndexWriter writer = new IndexWriter(dir, GetConfig(Random(), GetDeletionPolicy(dir)));
+            PersistentSnapshotDeletionPolicy psdp = (PersistentSnapshotDeletionPolicy)writer.Config.IndexDeletionPolicy;
+            Assert.IsNull(psdp.LastSaveFile);
+            PrepareIndexAndSnapshots(psdp, writer, numSnapshots);
+            Assert.IsNotNull(psdp.LastSaveFile);
+            writer.Dispose();
+
+            // Make sure only 1 save file exists:
+            int count = 0;
+            foreach (string file in dir.ListAll())
+            {
+                if (file.StartsWith(PersistentSnapshotDeletionPolicy.SNAPSHOTS_PREFIX))
+                {
+                    count++;
+                }
+            }
+            Assert.AreEqual(1, count);
+
+            // Make sure we fsync:
+            dir.Crash();
+            dir.ClearCrash();
+
+            // Re-initialize and verify snapshots were persisted
+            psdp = new PersistentSnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy(), dir, OpenMode.APPEND);
+
+            writer = new IndexWriter(dir, GetConfig(Random(), psdp));
+            psdp = (PersistentSnapshotDeletionPolicy)writer.Config.IndexDeletionPolicy;
+
+            Assert.AreEqual(numSnapshots, psdp.GetSnapshots().Count);
+            Assert.AreEqual(numSnapshots, psdp.SnapshotCount);
+            AssertSnapshotExists(dir, psdp, numSnapshots, false);
+
+            writer.AddDocument(new Document());
+            writer.Commit();
+            Snapshots.Add(psdp.Snapshot());
+            Assert.AreEqual(numSnapshots + 1, psdp.GetSnapshots().Count);
+            Assert.AreEqual(numSnapshots + 1, psdp.SnapshotCount);
+            AssertSnapshotExists(dir, psdp, numSnapshots + 1, false);
+
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestNoSnapshotInfos()
+        {
+            Directory dir = NewDirectory();
+            new PersistentSnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy(), dir, OpenMode.CREATE);
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestMissingSnapshots()
+        {
+            Directory dir = NewDirectory();
+            try
+            {
+                new PersistentSnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy(), dir, OpenMode.APPEND);
+                Assert.Fail("did not hit expected exception");
+            }
+#pragma warning disable 168
+            catch (InvalidOperationException ise)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestExceptionDuringSave()
+        {
+            MockDirectoryWrapper dir = NewMockDirectory();
+            dir.FailOn(new FailureAnonymousInnerClassHelper(this, dir));
+            IndexWriter writer = new IndexWriter(dir, GetConfig(Random(), new PersistentSnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy(), dir, OpenMode.CREATE_OR_APPEND)));
+            writer.AddDocument(new Document());
+            writer.Commit();
+
+            PersistentSnapshotDeletionPolicy psdp = (PersistentSnapshotDeletionPolicy)writer.Config.IndexDeletionPolicy;
+            try
+            {
+                psdp.Snapshot();
+            }
+            catch (IOException ioe)
+            {
+                if (ioe.Message.Equals("now fail on purpose"))
+                {
+                    // ok
+                }
+                else
+                {
+                    throw ioe;
+                }
+            }
+            Assert.AreEqual(0, psdp.SnapshotCount);
+            writer.Dispose();
+            Assert.AreEqual(1, DirectoryReader.ListCommits(dir).Count);
+            dir.Dispose();
+        }
+
+        private class FailureAnonymousInnerClassHelper : MockDirectoryWrapper.Failure
+        {
+            private readonly TestPersistentSnapshotDeletionPolicy OuterInstance;
+
+            private MockDirectoryWrapper Dir;
+
+            public FailureAnonymousInnerClassHelper(TestPersistentSnapshotDeletionPolicy outerInstance, MockDirectoryWrapper dir)
+            {
+                this.OuterInstance = outerInstance;
+                this.Dir = dir;
+            }
+
+            public override void Eval(MockDirectoryWrapper dir)
+            {
+                /*typeof(PersistentSnapshotDeletionPolicy).Name.Equals(frame.GetType().Name) && */
+                if (StackTraceHelper.DoesStackTraceContainMethod("Persist"))
+                {
+                    throw new IOException("now fail on purpose");
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestSnapshotRelease()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, GetConfig(Random(), GetDeletionPolicy(dir)));
+            PersistentSnapshotDeletionPolicy psdp = (PersistentSnapshotDeletionPolicy)writer.Config.IndexDeletionPolicy;
+            PrepareIndexAndSnapshots(psdp, writer, 1);
+            writer.Dispose();
+
+            psdp.Release(Snapshots[0]);
+
+            psdp = new PersistentSnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy(), dir, OpenMode.APPEND);
+            Assert.AreEqual(0, psdp.SnapshotCount, "Should have no snapshots !");
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestSnapshotReleaseByGeneration()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, GetConfig(Random(), GetDeletionPolicy(dir)));
+            PersistentSnapshotDeletionPolicy psdp = (PersistentSnapshotDeletionPolicy)writer.Config.IndexDeletionPolicy;
+            PrepareIndexAndSnapshots(psdp, writer, 1);
+            writer.Dispose();
+
+            psdp.Release(Snapshots[0].Generation);
+
+            psdp = new PersistentSnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy(), dir, OpenMode.APPEND);
+            Assert.AreEqual(0, psdp.SnapshotCount, "Should have no snapshots !");
+            dir.Dispose();
+        }
+
+
+        #region TestSnapshotDeletionPolicy
+        // LUCENENET NOTE: Tests in a base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestSnapshotDeletionPolicy_Mem()
+        {
+            base.TestSnapshotDeletionPolicy_Mem();
+        }
+
+        [Test]
+        public override void TestBasicSnapshots()
+        {
+            base.TestBasicSnapshots();
+        }
+
+        [Test]
+        public override void TestMultiThreadedSnapshotting()
+        {
+            base.TestMultiThreadedSnapshotting();
+        }
+
+        [Test]
+        public override void TestRollbackToOldSnapshot()
+        {
+            base.TestRollbackToOldSnapshot();
+        }
+
+        [Test]
+        public override void TestReleaseSnapshot()
+        {
+            base.TestReleaseSnapshot();
+        }
+
+        [Test]
+        public override void TestSnapshotLastCommitTwice()
+        {
+            base.TestSnapshotLastCommitTwice();
+        }
+
+        [Test]
+        public override void TestMissingCommits()
+        {
+            base.TestMissingCommits();
+        }
+
+        #endregion
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestPostingsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestPostingsFormat.cs b/src/Lucene.Net.Tests/Index/TestPostingsFormat.cs
new file mode 100644
index 0000000..20c6b07
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestPostingsFormat.cs
@@ -0,0 +1,95 @@
+using NUnit.Framework;
+
+namespace Lucene.Net.Index
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    using Codec = Lucene.Net.Codecs.Codec;
+
+    /// <summary>
+    /// Tests the codec configuration defined by LuceneTestCase randomly
+    ///  (typically a mix across different fields).
+    /// </summary>
+    [TestFixture]
+    public class TestPostingsFormat : BasePostingsFormatTestCase
+    {
+        protected override Codec Codec
+        {
+            get
+            {
+                return Codec.Default;
+            }
+        }
+
+        [Test]
+        public override void TestMergeStability()
+        {
+            AssumeTrue("The MockRandom PF randomizes content on the fly, so we can't check it", false);
+        }
+
+
+        #region BasePostingsFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestDocsOnly()
+        {
+            base.TestDocsOnly();
+        }
+
+        [Test]
+        public override void TestDocsAndFreqs()
+        {
+            base.TestDocsAndFreqs();
+        }
+
+        [Test]
+        public override void TestDocsAndFreqsAndPositions()
+        {
+            base.TestDocsAndFreqsAndPositions();
+        }
+
+        [Test]
+        public override void TestDocsAndFreqsAndPositionsAndPayloads()
+        {
+            base.TestDocsAndFreqsAndPositionsAndPayloads();
+        }
+
+        [Test]
+        public override void TestDocsAndFreqsAndPositionsAndOffsets()
+        {
+            base.TestDocsAndFreqsAndPositionsAndOffsets();
+        }
+
+        [Test]
+        public override void TestDocsAndFreqsAndPositionsAndOffsetsAndPayloads()
+        {
+            base.TestDocsAndFreqsAndPositionsAndOffsetsAndPayloads();
+        }
+
+        [Test]
+        public override void TestRandom()
+        {
+            base.TestRandom();
+        }
+
+        #endregion
+    }
+}
\ No newline at end of file


[58/72] [abbrv] lucenenet git commit: Lucene.Net.TestFramework: Renamed Codecs\lucene40\ to Codecs\Lucene40\

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/c0e9469c/src/Lucene.Net.TestFramework/Codecs/lucene40/Lucene40DocValuesWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/lucene40/Lucene40DocValuesWriter.cs b/src/Lucene.Net.TestFramework/Codecs/lucene40/Lucene40DocValuesWriter.cs
deleted file mode 100644
index 42856fc..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/lucene40/Lucene40DocValuesWriter.cs
+++ /dev/null
@@ -1,624 +0,0 @@
-using System;
-using System.Collections.Generic;
-using System.Diagnostics;
-using System.Linq;
-
-namespace Lucene.Net.Codecs.Lucene40
-{
-    using BytesRef = Lucene.Net.Util.BytesRef;
-    using CompoundFileDirectory = Lucene.Net.Store.CompoundFileDirectory;
-    using Directory = Lucene.Net.Store.Directory;
-    using FieldInfo = Lucene.Net.Index.FieldInfo;
-    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
-    using IndexOutput = Lucene.Net.Store.IndexOutput;
-    using IOUtils = Lucene.Net.Util.IOUtils;
-
-    /*
-         * Licensed to the Apache Software Foundation (ASF) under one or more
-         * contributor license agreements.  See the NOTICE file distributed with
-         * this work for additional information regarding copyright ownership.
-         * The ASF licenses this file to You under the Apache License, Version 2.0
-         * (the "License"); you may not use this file except in compliance with
-         * the License.  You may obtain a copy of the License at
-         *
-         *     http://www.apache.org/licenses/LICENSE-2.0
-         *
-         * Unless required by applicable law or agreed to in writing, software
-         * distributed under the License is distributed on an "AS IS" BASIS,
-         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-         * See the License for the specific language governing permissions and
-         * limitations under the License.
-         */
-
-    using LegacyDocValuesType = Lucene.Net.Codecs.Lucene40.Lucene40FieldInfosReader.LegacyDocValuesType;
-    using PackedInt32s = Lucene.Net.Util.Packed.PackedInt32s;
-    using SegmentWriteState = Lucene.Net.Index.SegmentWriteState;
-
-#pragma warning disable 612, 618
-    internal class Lucene40DocValuesWriter : DocValuesConsumer
-    {
-        private readonly Directory Dir;
-        private readonly SegmentWriteState State;
-        private readonly string LegacyKey;
-        private const string SegmentSuffix = "dv";
-
-        // note: intentionally ignores seg suffix
-        internal Lucene40DocValuesWriter(SegmentWriteState state, string filename, string legacyKey)
-        {
-            this.State = state;
-            this.LegacyKey = legacyKey;
-            this.Dir = new CompoundFileDirectory(state.Directory, filename, state.Context, true);
-        }
-
-        public override void AddNumericField(FieldInfo field, IEnumerable<long?> values)
-        {
-            // examine the values to determine best type to use
-            long minValue = long.MaxValue;
-            long maxValue = long.MinValue;
-            foreach (long? n in values)
-            {
-                long v = n == null ? 0 : (long)n;
-                minValue = Math.Min(minValue, v);
-                maxValue = Math.Max(maxValue, v);
-            }
-
-            string fileName = IndexFileNames.SegmentFileName(State.SegmentInfo.Name + "_" + Convert.ToString(field.Number), SegmentSuffix, "dat");
-            IndexOutput data = Dir.CreateOutput(fileName, State.Context);
-            bool success = false;
-            try
-            {
-                if (minValue >= sbyte.MinValue && maxValue <= sbyte.MaxValue && PackedInt32s.BitsRequired(maxValue - minValue) > 4)
-                {
-                    // fits in a byte[], would be more than 4bpv, just write byte[]
-                    AddBytesField(field, data, values);
-                }
-                else if (minValue >= short.MinValue && maxValue <= short.MaxValue && PackedInt32s.BitsRequired(maxValue - minValue) > 8)
-                {
-                    // fits in a short[], would be more than 8bpv, just write short[]
-                    AddShortsField(field, data, values);
-                }
-                else if (minValue >= int.MinValue && maxValue <= int.MaxValue && PackedInt32s.BitsRequired(maxValue - minValue) > 16)
-                {
-                    // fits in a int[], would be more than 16bpv, just write int[]
-                    AddIntsField(field, data, values);
-                }
-                else
-                {
-                    AddVarIntsField(field, data, values, minValue, maxValue);
-                }
-                success = true;
-            }
-            finally
-            {
-                if (success)
-                {
-                    IOUtils.Close(data);
-                }
-                else
-                {
-                    IOUtils.CloseWhileHandlingException(data);
-                }
-            }
-        }
-
-        private void AddBytesField(FieldInfo field, IndexOutput output, IEnumerable<long?> values)
-        {
-            field.PutAttribute(LegacyKey, LegacyDocValuesType.FIXED_INTS_8.Name);
-            CodecUtil.WriteHeader(output, Lucene40DocValuesFormat.INTS_CODEC_NAME, Lucene40DocValuesFormat.INTS_VERSION_CURRENT);
-            output.WriteInt32(1); // size
-            foreach (long? n in values)
-            {
-                output.WriteByte(n == null ? (byte)0 : (byte)n);
-            }
-        }
-
-        private void AddShortsField(FieldInfo field, IndexOutput output, IEnumerable<long?> values)
-        {
-            field.PutAttribute(LegacyKey, LegacyDocValuesType.FIXED_INTS_16.Name);
-            CodecUtil.WriteHeader(output, Lucene40DocValuesFormat.INTS_CODEC_NAME, Lucene40DocValuesFormat.INTS_VERSION_CURRENT);
-            output.WriteInt32(2); // size
-            foreach (long? n in values)
-            {
-                output.WriteInt16(n == null ? (short)0 : (short)n);
-            }
-        }
-
-        private void AddIntsField(FieldInfo field, IndexOutput output, IEnumerable<long?> values)
-        {
-            field.PutAttribute(LegacyKey, LegacyDocValuesType.FIXED_INTS_32.Name);
-            CodecUtil.WriteHeader(output, Lucene40DocValuesFormat.INTS_CODEC_NAME, Lucene40DocValuesFormat.INTS_VERSION_CURRENT);
-            output.WriteInt32(4); // size
-            foreach (long? n in values)
-            {
-                output.WriteInt32(n == null ? 0 : (int)n);
-            }
-        }
-
-        private void AddVarIntsField(FieldInfo field, IndexOutput output, IEnumerable<long?> values, long minValue, long maxValue)
-        {
-            field.PutAttribute(LegacyKey, LegacyDocValuesType.VAR_INTS.Name);
-
-            CodecUtil.WriteHeader(output, Lucene40DocValuesFormat.VAR_INTS_CODEC_NAME, Lucene40DocValuesFormat.VAR_INTS_VERSION_CURRENT);
-
-            long delta = maxValue - minValue;
-
-            if (delta < 0)
-            {
-                // writes longs
-                output.WriteByte((byte)Lucene40DocValuesFormat.VAR_INTS_FIXED_64);
-                foreach (long? n in values)
-                {
-                    output.WriteInt64(n == null ? 0 : n.Value);
-                }
-            }
-            else
-            {
-                // writes packed ints
-                output.WriteByte((byte)Lucene40DocValuesFormat.VAR_INTS_PACKED);
-                output.WriteInt64(minValue);
-                output.WriteInt64(0 - minValue); // default value (representation of 0)
-                PackedInt32s.Writer writer = PackedInt32s.GetWriter(output, State.SegmentInfo.DocCount, PackedInt32s.BitsRequired(delta), PackedInt32s.DEFAULT);
-                foreach (long? n in values)
-                {
-                    long v = n == null ? 0 : (long)n;
-                    writer.Add(v - minValue);
-                }
-                writer.Finish();
-            }
-        }
-
-        public override void AddBinaryField(FieldInfo field, IEnumerable<BytesRef> values)
-        {
-            // examine the values to determine best type to use
-            HashSet<BytesRef> uniqueValues = new HashSet<BytesRef>();
-            int minLength = int.MaxValue;
-            int maxLength = int.MinValue;
-
-            var vals = values.ToArray();
-
-            for (int i = 0; i < vals.Length; i++)
-            {
-                var b = vals[i];
-
-                if (b == null)
-                {
-                    b = vals[i] = new BytesRef(); // 4.0 doesnt distinguish
-                }
-                if (b.Length > Lucene40DocValuesFormat.MAX_BINARY_FIELD_LENGTH)
-                {
-                    throw new System.ArgumentException("DocValuesField \"" + field.Name + "\" is too large, must be <= " + Lucene40DocValuesFormat.MAX_BINARY_FIELD_LENGTH);
-                }
-                minLength = Math.Min(minLength, b.Length);
-                maxLength = Math.Max(maxLength, b.Length);
-                if (uniqueValues != null)
-                {
-                    if (uniqueValues.Add(BytesRef.DeepCopyOf(b)))
-                    {
-                        if (uniqueValues.Count > 256)
-                        {
-                            uniqueValues = null;
-                        }
-                    }
-                }
-            }
-
-            int maxDoc = State.SegmentInfo.DocCount;
-            bool @fixed = minLength == maxLength;
-            bool dedup = uniqueValues != null && uniqueValues.Count * 2 < maxDoc;
-
-            if (dedup)
-            {
-                // we will deduplicate and deref values
-                bool success = false;
-                IndexOutput data = null;
-                IndexOutput index = null;
-                string dataName = IndexFileNames.SegmentFileName(State.SegmentInfo.Name + "_" + Convert.ToString(field.Number), SegmentSuffix, "dat");
-                string indexName = IndexFileNames.SegmentFileName(State.SegmentInfo.Name + "_" + Convert.ToString(field.Number), SegmentSuffix, "idx");
-                try
-                {
-                    data = Dir.CreateOutput(dataName, State.Context);
-                    index = Dir.CreateOutput(indexName, State.Context);
-                    if (@fixed)
-                    {
-                        AddFixedDerefBytesField(field, data, index, values, minLength);
-                    }
-                    else
-                    {
-                        AddVarDerefBytesField(field, data, index, values);
-                    }
-                    success = true;
-                }
-                finally
-                {
-                    if (success)
-                    {
-                        IOUtils.Close(data, index);
-                    }
-                    else
-                    {
-                        IOUtils.CloseWhileHandlingException(data, index);
-                    }
-                }
-            }
-            else
-            {
-                // we dont deduplicate, just write values straight
-                if (@fixed)
-                {
-                    // fixed byte[]
-                    string fileName = IndexFileNames.SegmentFileName(State.SegmentInfo.Name + "_" + Convert.ToString(field.Number), SegmentSuffix, "dat");
-                    IndexOutput data = Dir.CreateOutput(fileName, State.Context);
-                    bool success = false;
-                    try
-                    {
-                        AddFixedStraightBytesField(field, data, values, minLength);
-                        success = true;
-                    }
-                    finally
-                    {
-                        if (success)
-                        {
-                            IOUtils.Close(data);
-                        }
-                        else
-                        {
-                            IOUtils.CloseWhileHandlingException(data);
-                        }
-                    }
-                }
-                else
-                {
-                    // variable byte[]
-                    bool success = false;
-                    IndexOutput data = null;
-                    IndexOutput index = null;
-                    string dataName = IndexFileNames.SegmentFileName(State.SegmentInfo.Name + "_" + Convert.ToString(field.Number), SegmentSuffix, "dat");
-                    string indexName = IndexFileNames.SegmentFileName(State.SegmentInfo.Name + "_" + Convert.ToString(field.Number), SegmentSuffix, "idx");
-                    try
-                    {
-                        data = Dir.CreateOutput(dataName, State.Context);
-                        index = Dir.CreateOutput(indexName, State.Context);
-                        AddVarStraightBytesField(field, data, index, values);
-                        success = true;
-                    }
-                    finally
-                    {
-                        if (success)
-                        {
-                            IOUtils.Close(data, index);
-                        }
-                        else
-                        {
-                            IOUtils.CloseWhileHandlingException(data, index);
-                        }
-                    }
-                }
-            }
-        }
-
-        private void AddFixedStraightBytesField(FieldInfo field, IndexOutput output, IEnumerable<BytesRef> values, int length)
-        {
-            field.PutAttribute(LegacyKey, LegacyDocValuesType.BYTES_FIXED_STRAIGHT.Name);
-
-            CodecUtil.WriteHeader(output, Lucene40DocValuesFormat.BYTES_FIXED_STRAIGHT_CODEC_NAME, Lucene40DocValuesFormat.BYTES_FIXED_STRAIGHT_VERSION_CURRENT);
-
-            output.WriteInt32(length);
-            foreach (BytesRef v in values)
-            {
-                if (v != null)
-                {
-                    output.WriteBytes(v.Bytes, v.Offset, v.Length);
-                }
-            }
-        }
-
-        // NOTE: 4.0 file format docs are crazy/wrong here...
-        private void AddVarStraightBytesField(FieldInfo field, IndexOutput data, IndexOutput index, IEnumerable<BytesRef> values)
-        {
-            field.PutAttribute(LegacyKey, LegacyDocValuesType.BYTES_VAR_STRAIGHT.Name);
-
-            CodecUtil.WriteHeader(data, Lucene40DocValuesFormat.BYTES_VAR_STRAIGHT_CODEC_NAME_DAT, Lucene40DocValuesFormat.BYTES_VAR_STRAIGHT_VERSION_CURRENT);
-
-            CodecUtil.WriteHeader(index, Lucene40DocValuesFormat.BYTES_VAR_STRAIGHT_CODEC_NAME_IDX, Lucene40DocValuesFormat.BYTES_VAR_STRAIGHT_VERSION_CURRENT);
-
-            /* values */
-
-            long startPos = data.FilePointer;
-
-            foreach (BytesRef v in values)
-            {
-                if (v != null)
-                {
-                    data.WriteBytes(v.Bytes, v.Offset, v.Length);
-                }
-            }
-
-            /* addresses */
-
-            long maxAddress = data.FilePointer - startPos;
-            index.WriteVInt64(maxAddress);
-
-            int maxDoc = State.SegmentInfo.DocCount;
-            Debug.Assert(maxDoc != int.MaxValue); // unsupported by the 4.0 impl
-
-            PackedInt32s.Writer w = PackedInt32s.GetWriter(index, maxDoc + 1, PackedInt32s.BitsRequired(maxAddress), PackedInt32s.DEFAULT);
-            long currentPosition = 0;
-            foreach (BytesRef v in values)
-            {
-                w.Add(currentPosition);
-                if (v != null)
-                {
-                    currentPosition += v.Length;
-                }
-            }
-            // write sentinel
-            Debug.Assert(currentPosition == maxAddress);
-            w.Add(currentPosition);
-            w.Finish();
-        }
-
-        private void AddFixedDerefBytesField(FieldInfo field, IndexOutput data, IndexOutput index, IEnumerable<BytesRef> values, int length)
-        {
-            field.PutAttribute(LegacyKey, LegacyDocValuesType.BYTES_FIXED_DEREF.Name);
-
-            CodecUtil.WriteHeader(data, Lucene40DocValuesFormat.BYTES_FIXED_DEREF_CODEC_NAME_DAT, Lucene40DocValuesFormat.BYTES_FIXED_DEREF_VERSION_CURRENT);
-
-            CodecUtil.WriteHeader(index, Lucene40DocValuesFormat.BYTES_FIXED_DEREF_CODEC_NAME_IDX, Lucene40DocValuesFormat.BYTES_FIXED_DEREF_VERSION_CURRENT);
-
-            // deduplicate
-            SortedSet<BytesRef> dictionary = new SortedSet<BytesRef>();
-            foreach (BytesRef v in values)
-            {
-                dictionary.Add(v == null ? new BytesRef() : BytesRef.DeepCopyOf(v));
-            }
-
-            /* values */
-            data.WriteInt32(length);
-            foreach (BytesRef v in dictionary)
-            {
-                data.WriteBytes(v.Bytes, v.Offset, v.Length);
-            }
-
-            /* ordinals */
-            int valueCount = dictionary.Count;
-            Debug.Assert(valueCount > 0);
-            index.WriteInt32(valueCount);
-            int maxDoc = State.SegmentInfo.DocCount;
-            PackedInt32s.Writer w = PackedInt32s.GetWriter(index, maxDoc, PackedInt32s.BitsRequired(valueCount - 1), PackedInt32s.DEFAULT);
-
-            BytesRef brefDummy;
-            foreach (BytesRef v in values)
-            {
-                brefDummy = v;
-
-                if (v == null)
-                {
-                    brefDummy = new BytesRef();
-                }
-                //int ord = dictionary.HeadSet(brefDummy).Size();
-                int ord = dictionary.Count(@ref => @ref.CompareTo(brefDummy) < 0);
-                w.Add(ord);
-            }
-            w.Finish();
-        }
-
-        private void AddVarDerefBytesField(FieldInfo field, IndexOutput data, IndexOutput index, IEnumerable<BytesRef> values)
-        {
-            field.PutAttribute(LegacyKey, LegacyDocValuesType.BYTES_VAR_DEREF.Name);
-
-            CodecUtil.WriteHeader(data, Lucene40DocValuesFormat.BYTES_VAR_DEREF_CODEC_NAME_DAT, Lucene40DocValuesFormat.BYTES_VAR_DEREF_VERSION_CURRENT);
-
-            CodecUtil.WriteHeader(index, Lucene40DocValuesFormat.BYTES_VAR_DEREF_CODEC_NAME_IDX, Lucene40DocValuesFormat.BYTES_VAR_DEREF_VERSION_CURRENT);
-
-            // deduplicate
-            SortedSet<BytesRef> dictionary = new SortedSet<BytesRef>();
-            foreach (BytesRef v in values)
-            {
-                dictionary.Add(v == null ? new BytesRef() : BytesRef.DeepCopyOf(v));
-            }
-
-            /* values */
-            long startPosition = data.FilePointer;
-            long currentAddress = 0;
-            Dictionary<BytesRef, long> valueToAddress = new Dictionary<BytesRef, long>();
-            foreach (BytesRef v in dictionary)
-            {
-                currentAddress = data.FilePointer - startPosition;
-                valueToAddress[v] = currentAddress;
-                WriteVShort(data, v.Length);
-                data.WriteBytes(v.Bytes, v.Offset, v.Length);
-            }
-
-            /* ordinals */
-            long totalBytes = data.FilePointer - startPosition;
-            index.WriteInt64(totalBytes);
-            int maxDoc = State.SegmentInfo.DocCount;
-            PackedInt32s.Writer w = PackedInt32s.GetWriter(index, maxDoc, PackedInt32s.BitsRequired(currentAddress), PackedInt32s.DEFAULT);
-
-            foreach (BytesRef v in values)
-            {
-                w.Add(valueToAddress[v == null ? new BytesRef() : v]);
-            }
-            w.Finish();
-        }
-
-        // the little vint encoding used for var-deref
-        private static void WriteVShort(IndexOutput o, int i)
-        {
-            Debug.Assert(i >= 0 && i <= short.MaxValue);
-            if (i < 128)
-            {
-                o.WriteByte((byte)(sbyte)i);
-            }
-            else
-            {
-                o.WriteByte((byte)unchecked((sbyte)(0x80 | (i >> 8))));
-                o.WriteByte((byte)unchecked((sbyte)(i & 0xff)));
-            }
-        }
-
-        public override void AddSortedField(FieldInfo field, IEnumerable<BytesRef> values, IEnumerable<long?> docToOrd)
-        {
-            // examine the values to determine best type to use
-            int minLength = int.MaxValue;
-            int maxLength = int.MinValue;
-            foreach (BytesRef b in values)
-            {
-                minLength = Math.Min(minLength, b.Length);
-                maxLength = Math.Max(maxLength, b.Length);
-            }
-
-            // but dont use fixed if there are missing values (we are simulating how lucene40 wrote dv...)
-            bool anyMissing = false;
-            foreach (long n in docToOrd)
-            {
-                if ((long)n == -1)
-                {
-                    anyMissing = true;
-                    break;
-                }
-            }
-
-            bool success = false;
-            IndexOutput data = null;
-            IndexOutput index = null;
-            string dataName = IndexFileNames.SegmentFileName(State.SegmentInfo.Name + "_" + Convert.ToString(field.Number), SegmentSuffix, "dat");
-            string indexName = IndexFileNames.SegmentFileName(State.SegmentInfo.Name + "_" + Convert.ToString(field.Number), SegmentSuffix, "idx");
-
-            try
-            {
-                data = Dir.CreateOutput(dataName, State.Context);
-                index = Dir.CreateOutput(indexName, State.Context);
-                if (minLength == maxLength && !anyMissing)
-                {
-                    // fixed byte[]
-                    AddFixedSortedBytesField(field, data, index, values, docToOrd, minLength);
-                }
-                else
-                {
-                    // var byte[]
-                    // three cases for simulating the old writer:
-                    // 1. no missing
-                    // 2. missing (and empty string in use): remap ord=-1 -> ord=0
-                    // 3. missing (and empty string not in use): remap all ords +1, insert empty string into values
-                    if (!anyMissing)
-                    {
-                        AddVarSortedBytesField(field, data, index, values, docToOrd);
-                    }
-                    else if (minLength == 0)
-                    {
-                        AddVarSortedBytesField(field, data, index, values, MissingOrdRemapper.MapMissingToOrd0(docToOrd));
-                    }
-                    else
-                    {
-                        AddVarSortedBytesField(field, data, index, MissingOrdRemapper.InsertEmptyValue(values), MissingOrdRemapper.MapAllOrds(docToOrd));
-                    }
-                }
-                success = true;
-            }
-            finally
-            {
-                if (success)
-                {
-                    IOUtils.Close(data, index);
-                }
-                else
-                {
-                    IOUtils.CloseWhileHandlingException(data, index);
-                }
-            }
-        }
-
-        private void AddFixedSortedBytesField(FieldInfo field, IndexOutput data, IndexOutput index, IEnumerable<BytesRef> values, IEnumerable<long?> docToOrd, int length)
-        {
-            field.PutAttribute(LegacyKey, LegacyDocValuesType.BYTES_FIXED_SORTED.Name);
-
-            CodecUtil.WriteHeader(data, Lucene40DocValuesFormat.BYTES_FIXED_SORTED_CODEC_NAME_DAT, Lucene40DocValuesFormat.BYTES_FIXED_SORTED_VERSION_CURRENT);
-
-            CodecUtil.WriteHeader(index, Lucene40DocValuesFormat.BYTES_FIXED_SORTED_CODEC_NAME_IDX, Lucene40DocValuesFormat.BYTES_FIXED_SORTED_VERSION_CURRENT);
-
-            /* values */
-
-            data.WriteInt32(length);
-            int valueCount = 0;
-            foreach (BytesRef v in values)
-            {
-                data.WriteBytes(v.Bytes, v.Offset, v.Length);
-                valueCount++;
-            }
-
-            /* ordinals */
-
-            index.WriteInt32(valueCount);
-            int maxDoc = State.SegmentInfo.DocCount;
-            Debug.Assert(valueCount > 0);
-            PackedInt32s.Writer w = PackedInt32s.GetWriter(index, maxDoc, PackedInt32s.BitsRequired(valueCount - 1), PackedInt32s.DEFAULT);
-            foreach (long n in docToOrd)
-            {
-                w.Add((long)n);
-            }
-            w.Finish();
-        }
-
-        private void AddVarSortedBytesField(FieldInfo field, IndexOutput data, IndexOutput index, IEnumerable<BytesRef> values, IEnumerable<long?> docToOrd)
-        {
-            field.PutAttribute(LegacyKey, LegacyDocValuesType.BYTES_VAR_SORTED.Name);
-
-            CodecUtil.WriteHeader(data, Lucene40DocValuesFormat.BYTES_VAR_SORTED_CODEC_NAME_DAT, Lucene40DocValuesFormat.BYTES_VAR_SORTED_VERSION_CURRENT);
-
-            CodecUtil.WriteHeader(index, Lucene40DocValuesFormat.BYTES_VAR_SORTED_CODEC_NAME_IDX, Lucene40DocValuesFormat.BYTES_VAR_SORTED_VERSION_CURRENT);
-
-            /* values */
-
-            long startPos = data.FilePointer;
-
-            int valueCount = 0;
-            foreach (BytesRef v in values)
-            {
-                data.WriteBytes(v.Bytes, v.Offset, v.Length);
-                valueCount++;
-            }
-
-            /* addresses */
-
-            long maxAddress = data.FilePointer - startPos;
-            index.WriteInt64(maxAddress);
-
-            Debug.Assert(valueCount != int.MaxValue); // unsupported by the 4.0 impl
-
-            PackedInt32s.Writer w = PackedInt32s.GetWriter(index, valueCount + 1, PackedInt32s.BitsRequired(maxAddress), PackedInt32s.DEFAULT);
-            long currentPosition = 0;
-            foreach (BytesRef v in values)
-            {
-                w.Add(currentPosition);
-                currentPosition += v.Length;
-            }
-            // write sentinel
-            Debug.Assert(currentPosition == maxAddress);
-            w.Add(currentPosition);
-            w.Finish();
-
-            /* ordinals */
-
-            int maxDoc = State.SegmentInfo.DocCount;
-            Debug.Assert(valueCount > 0);
-            PackedInt32s.Writer ords = PackedInt32s.GetWriter(index, maxDoc, PackedInt32s.BitsRequired(valueCount - 1), PackedInt32s.DEFAULT);
-            foreach (long n in docToOrd)
-            {
-                ords.Add((long)n);
-            }
-            ords.Finish();
-        }
-
-        public override void AddSortedSetField(FieldInfo field, IEnumerable<BytesRef> values, IEnumerable<long?> docToOrdCount, IEnumerable<long?> ords)
-        {
-            throw new System.NotSupportedException("Lucene 4.0 does not support SortedSet docvalues");
-        }
-
-        protected override void Dispose(bool disposing)
-        {
-            Dir.Dispose();
-        }
-    }
-#pragma warning restore 612, 618
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/c0e9469c/src/Lucene.Net.TestFramework/Codecs/lucene40/Lucene40FieldInfosWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/lucene40/Lucene40FieldInfosWriter.cs b/src/Lucene.Net.TestFramework/Codecs/lucene40/Lucene40FieldInfosWriter.cs
deleted file mode 100644
index 688e365..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/lucene40/Lucene40FieldInfosWriter.cs
+++ /dev/null
@@ -1,134 +0,0 @@
-using System;
-using System.Diagnostics;
-
-namespace Lucene.Net.Codecs.Lucene40
-{
-    using Directory = Lucene.Net.Store.Directory;
-    using DocValuesType = Lucene.Net.Index.DocValuesType;
-    using FieldInfo = Lucene.Net.Index.FieldInfo;
-    using FieldInfos = Lucene.Net.Index.FieldInfos;
-    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
-    using IndexOutput = Lucene.Net.Store.IndexOutput;
-    using IOContext = Lucene.Net.Store.IOContext;
-    using IOUtils = Lucene.Net.Util.IOUtils;
-    using IndexOptions = Lucene.Net.Index.IndexOptions;
-
-    /*
-         * Licensed to the Apache Software Foundation (ASF) under one or more
-         * contributor license agreements.  See the NOTICE file distributed with
-         * this work for additional information regarding copyright ownership.
-         * The ASF licenses this file to You under the Apache License, Version 2.0
-         * (the "License"); you may not use this file except in compliance with
-         * the License.  You may obtain a copy of the License at
-         *
-         *     http://www.apache.org/licenses/LICENSE-2.0
-         *
-         * Unless required by applicable law or agreed to in writing, software
-         * distributed under the License is distributed on an "AS IS" BASIS,
-         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-         * See the License for the specific language governing permissions and
-         * limitations under the License.
-         */
-
-    using LegacyDocValuesType = Lucene.Net.Codecs.Lucene40.Lucene40FieldInfosReader.LegacyDocValuesType;
-
-    /// <summary>
-    /// Lucene 4.0 FieldInfos writer.
-    /// </summary>
-    /// <seealso> cref= Lucene40FieldInfosFormat
-    /// @lucene.experimental </seealso>
-    [Obsolete]
-    public class Lucene40FieldInfosWriter : FieldInfosWriter
-    {
-        /// <summary>
-        /// Sole constructor. </summary>
-        public Lucene40FieldInfosWriter()
-        {
-        }
-
-        public override void Write(Directory directory, string segmentName, string segmentSuffix, FieldInfos infos, IOContext context)
-        {
-            string fileName = IndexFileNames.SegmentFileName(segmentName, "", Lucene40FieldInfosFormat.FIELD_INFOS_EXTENSION);
-            IndexOutput output = directory.CreateOutput(fileName, context);
-            bool success = false;
-            try
-            {
-                CodecUtil.WriteHeader(output, Lucene40FieldInfosFormat.CODEC_NAME, Lucene40FieldInfosFormat.FORMAT_CURRENT);
-                output.WriteVInt32(infos.Count);
-                foreach (FieldInfo fi in infos)
-                {
-                    IndexOptions? indexOptions = fi.IndexOptions;
-                    sbyte bits = 0x0;
-                    if (fi.HasVectors)
-                    {
-                        bits |= Lucene40FieldInfosFormat.STORE_TERMVECTOR;
-                    }
-                    if (fi.OmitsNorms)
-                    {
-                        bits |= Lucene40FieldInfosFormat.OMIT_NORMS;
-                    }
-                    if (fi.HasPayloads)
-                    {
-                        bits |= Lucene40FieldInfosFormat.STORE_PAYLOADS;
-                    }
-                    if (fi.IsIndexed)
-                    {
-                        bits |= Lucene40FieldInfosFormat.IS_INDEXED;
-                        Debug.Assert(indexOptions >= IndexOptions.DOCS_AND_FREQS_AND_POSITIONS || !fi.HasPayloads);
-                        if (indexOptions == IndexOptions.DOCS_ONLY)
-                        {
-                            bits |= Lucene40FieldInfosFormat.OMIT_TERM_FREQ_AND_POSITIONS;
-                        }
-                        else if (indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS)
-                        {
-                            bits |= Lucene40FieldInfosFormat.STORE_OFFSETS_IN_POSTINGS;
-                        }
-                        else if (indexOptions == IndexOptions.DOCS_AND_FREQS)
-                        {
-                            bits |= Lucene40FieldInfosFormat.OMIT_POSITIONS;
-                        }
-                    }
-                    output.WriteString(fi.Name);
-                    output.WriteVInt32(fi.Number);
-                    output.WriteByte((byte)bits);
-
-                    // pack the DV types in one byte
-                    sbyte dv = DocValuesByte(fi.DocValuesType, fi.GetAttribute(Lucene40FieldInfosReader.LEGACY_DV_TYPE_KEY));
-                    sbyte nrm = DocValuesByte(fi.NormType, fi.GetAttribute(Lucene40FieldInfosReader.LEGACY_NORM_TYPE_KEY));
-                    Debug.Assert((dv & (~0xF)) == 0 && (nrm & (~0x0F)) == 0);
-                    var val = unchecked((sbyte)(0xff & ((nrm << 4) | dv)));
-                    output.WriteByte((byte)val);
-                    output.WriteStringStringMap(fi.Attributes);
-                }
-                success = true;
-            }
-            finally
-            {
-                if (success)
-                {
-                    output.Dispose();
-                }
-                else
-                {
-                    IOUtils.CloseWhileHandlingException(output);
-                }
-            }
-        }
-
-        /// <summary>
-        /// 4.0-style docvalues byte </summary>
-        public virtual sbyte DocValuesByte(DocValuesType? type, string legacyTypeAtt)
-        {
-            if (type == null)
-            {
-                Debug.Assert(legacyTypeAtt == null);
-                return 0;
-            }
-            else
-            {
-                Debug.Assert(legacyTypeAtt != null);
-                return (sbyte)LegacyDocValuesType.ordinalLookup[legacyTypeAtt];
-            }
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/c0e9469c/src/Lucene.Net.TestFramework/Codecs/lucene40/Lucene40PostingsWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/lucene40/Lucene40PostingsWriter.cs b/src/Lucene.Net.TestFramework/Codecs/lucene40/Lucene40PostingsWriter.cs
deleted file mode 100644
index 11e2dc0..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/lucene40/Lucene40PostingsWriter.cs
+++ /dev/null
@@ -1,381 +0,0 @@
-using System.Diagnostics;
-
-namespace Lucene.Net.Codecs.Lucene40
-{
-    using BytesRef = Lucene.Net.Util.BytesRef;
-
-    /*
-         * Licensed to the Apache Software Foundation (ASF) under one or more
-         * contributor license agreements.  See the NOTICE file distributed with
-         * this work for additional information regarding copyright ownership.
-         * The ASF licenses this file to You under the Apache License, Version 2.0
-         * (the "License"); you may not use this file except in compliance with
-         * the License.  You may obtain a copy of the License at
-         *
-         *     http://www.apache.org/licenses/LICENSE-2.0
-         *
-         * Unless required by applicable law or agreed to in writing, software
-         * distributed under the License is distributed on an "AS IS" BASIS,
-         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-         * See the License for the specific language governing permissions and
-         * limitations under the License.
-         */
-
-    /// <summary>
-    /// Consumes doc & freq, writing them using the current
-    ///  index file format
-    /// </summary>
-
-    using CorruptIndexException = Lucene.Net.Index.CorruptIndexException;
-    using DataOutput = Lucene.Net.Store.DataOutput;
-    using FieldInfo = Lucene.Net.Index.FieldInfo;
-    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
-    using IndexOptions = Lucene.Net.Index.IndexOptions;
-    using IndexOutput = Lucene.Net.Store.IndexOutput;
-    using IOUtils = Lucene.Net.Util.IOUtils;
-    using SegmentWriteState = Lucene.Net.Index.SegmentWriteState;
-
-    /// <summary>
-    /// Concrete class that writes the 4.0 frq/prx postings format.
-    /// </summary>
-    /// <seealso> cref= Lucene40PostingsFormat
-    /// @lucene.experimental  </seealso>
-#pragma warning disable 612, 618
-    public sealed class Lucene40PostingsWriter : PostingsWriterBase
-    {
-        internal readonly IndexOutput FreqOut;
-        internal readonly IndexOutput ProxOut;
-        internal readonly Lucene40SkipListWriter SkipListWriter;
-
-        /// <summary>
-        /// Expert: The fraction of TermDocs entries stored in skip tables,
-        /// used to accelerate <seealso cref="DocsEnum#advance(int)"/>.  Larger values result in
-        /// smaller indexes, greater acceleration, but fewer accelerable cases, while
-        /// smaller values result in bigger indexes, less acceleration and more
-        /// accelerable cases. More detailed experiments would be useful here.
-        /// </summary>
-        internal const int DEFAULT_SKIP_INTERVAL = 16;
-
-        internal readonly int SkipInterval;
-
-        /// <summary>
-        /// Expert: minimum docFreq to write any skip data at all
-        /// </summary>
-        internal readonly int SkipMinimum;
-
-        /// <summary>
-        /// Expert: The maximum number of skip levels. Smaller values result in
-        /// slightly smaller indexes, but slower skipping in big posting lists.
-        /// </summary>
-        internal readonly int MaxSkipLevels = 10;
-
-        internal readonly int TotalNumDocs;
-
-        internal IndexOptions? IndexOptions;
-        internal bool StorePayloads;
-        internal bool StoreOffsets;
-
-        // Starts a new term
-        internal long FreqStart;
-
-        internal long ProxStart;
-        internal FieldInfo FieldInfo;
-        internal int LastPayloadLength;
-        internal int LastOffsetLength;
-        internal int LastPosition;
-        internal int LastOffset;
-
-        internal static readonly StandardTermState EmptyState = new StandardTermState();
-        internal StandardTermState LastState;
-
-        // private String segment;
-
-        /// <summary>
-        /// Creates a <seealso cref="Lucene40PostingsWriter"/>, with the
-        ///  <seealso cref="#DEFAULT_SKIP_INTERVAL"/>.
-        /// </summary>
-        public Lucene40PostingsWriter(SegmentWriteState state)
-            : this(state, DEFAULT_SKIP_INTERVAL)
-        {
-        }
-
-        /// <summary>
-        /// Creates a <seealso cref="Lucene40PostingsWriter"/>, with the
-        ///  specified {@code skipInterval}.
-        /// </summary>
-        public Lucene40PostingsWriter(SegmentWriteState state, int skipInterval)
-            : base()
-        {
-            this.SkipInterval = skipInterval;
-            this.SkipMinimum = skipInterval; // set to the same for now
-            // this.segment = state.segmentName;
-            string fileName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, Lucene40PostingsFormat.FREQ_EXTENSION);
-            FreqOut = state.Directory.CreateOutput(fileName, state.Context);
-            bool success = false;
-            IndexOutput proxOut = null;
-            try
-            {
-                CodecUtil.WriteHeader(FreqOut, Lucene40PostingsReader.FRQ_CODEC, Lucene40PostingsReader.VERSION_CURRENT);
-                // TODO: this is a best effort, if one of these fields has no postings
-                // then we make an empty prx file, same as if we are wrapped in
-                // per-field postingsformat. maybe... we shouldn't
-                // bother w/ this opto?  just create empty prx file...?
-                if (state.FieldInfos.HasProx)
-                {
-                    // At least one field does not omit TF, so create the
-                    // prox file
-                    fileName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, Lucene40PostingsFormat.PROX_EXTENSION);
-                    proxOut = state.Directory.CreateOutput(fileName, state.Context);
-                    CodecUtil.WriteHeader(proxOut, Lucene40PostingsReader.PRX_CODEC, Lucene40PostingsReader.VERSION_CURRENT);
-                }
-                else
-                {
-                    // Every field omits TF so we will write no prox file
-                    proxOut = null;
-                }
-                this.ProxOut = proxOut;
-                success = true;
-            }
-            finally
-            {
-                if (!success)
-                {
-                    IOUtils.CloseWhileHandlingException(FreqOut, proxOut);
-                }
-            }
-
-            TotalNumDocs = state.SegmentInfo.DocCount;
-
-            SkipListWriter = new Lucene40SkipListWriter(skipInterval, MaxSkipLevels, TotalNumDocs, FreqOut, proxOut);
-        }
-
-        public override void Init(IndexOutput termsOut)
-        {
-            CodecUtil.WriteHeader(termsOut, Lucene40PostingsReader.TERMS_CODEC, Lucene40PostingsReader.VERSION_CURRENT);
-            termsOut.WriteInt32(SkipInterval); // write skipInterval
-            termsOut.WriteInt32(MaxSkipLevels); // write maxSkipLevels
-            termsOut.WriteInt32(SkipMinimum); // write skipMinimum
-        }
-
-        public override BlockTermState NewTermState()
-        {
-            return new StandardTermState();
-        }
-
-        public override void StartTerm()
-        {
-            FreqStart = FreqOut.FilePointer;
-            //if (DEBUG) System.out.println("SPW: startTerm freqOut.fp=" + freqStart);
-            if (ProxOut != null)
-            {
-                ProxStart = ProxOut.FilePointer;
-            }
-            // force first payload to write its length
-            LastPayloadLength = -1;
-            // force first offset to write its length
-            LastOffsetLength = -1;
-            SkipListWriter.ResetSkip();
-        }
-
-        // Currently, this instance is re-used across fields, so
-        // our parent calls setField whenever the field changes
-        public override int SetField(FieldInfo fieldInfo)
-        {
-            //System.out.println("SPW: setField");
-            /*
-            if (BlockTreeTermsWriter.DEBUG && fieldInfo.Name.equals("id")) {
-              DEBUG = true;
-            } else {
-              DEBUG = false;
-            }
-            */
-            this.FieldInfo = fieldInfo;
-            IndexOptions = fieldInfo.IndexOptions;
-
-            StoreOffsets = IndexOptions >= Index.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS;
-            StorePayloads = fieldInfo.HasPayloads;
-            LastState = EmptyState;
-            //System.out.println("  set init blockFreqStart=" + freqStart);
-            //System.out.println("  set init blockProxStart=" + proxStart);
-            return 0;
-        }
-
-        internal int LastDocID;
-        internal int Df;
-
-        public override void StartDoc(int docID, int termDocFreq)
-        {
-            // if (DEBUG) System.out.println("SPW:   startDoc seg=" + segment + " docID=" + docID + " tf=" + termDocFreq + " freqOut.fp=" + freqOut.getFilePointer());
-
-            int delta = docID - LastDocID;
-
-            if (docID < 0 || (Df > 0 && delta <= 0))
-            {
-                throw new CorruptIndexException("docs out of order (" + docID + " <= " + LastDocID + " ) (freqOut: " + FreqOut + ")");
-            }
-
-            if ((++Df % SkipInterval) == 0)
-            {
-                SkipListWriter.SetSkipData(LastDocID, StorePayloads, LastPayloadLength, StoreOffsets, LastOffsetLength);
-                SkipListWriter.BufferSkip(Df);
-            }
-
-            Debug.Assert(docID < TotalNumDocs, "docID=" + docID + " totalNumDocs=" + TotalNumDocs);
-
-            LastDocID = docID;
-            if (IndexOptions == Index.IndexOptions.DOCS_ONLY)
-            {
-                FreqOut.WriteVInt32(delta);
-            }
-            else if (1 == termDocFreq)
-            {
-                FreqOut.WriteVInt32((delta << 1) | 1);
-            }
-            else
-            {
-                FreqOut.WriteVInt32(delta << 1);
-                FreqOut.WriteVInt32(termDocFreq);
-            }
-
-            LastPosition = 0;
-            LastOffset = 0;
-        }
-
-        /// <summary>
-        /// Add a new position & payload </summary>
-        public override void AddPosition(int position, BytesRef payload, int startOffset, int endOffset)
-        {
-            //if (DEBUG) System.out.println("SPW:     addPos pos=" + position + " payload=" + (payload == null ? "null" : (payload.Length + " bytes")) + " proxFP=" + proxOut.getFilePointer());
-            Debug.Assert(IndexOptions >= Index.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, "invalid indexOptions: " + IndexOptions);
-            Debug.Assert(ProxOut != null);
-
-            int delta = position - LastPosition;
-
-            Debug.Assert(delta >= 0, "position=" + position + " lastPosition=" + LastPosition); // not quite right (if pos=0 is repeated twice we don't catch it)
-
-            LastPosition = position;
-
-            int payloadLength = 0;
-
-            if (StorePayloads)
-            {
-                payloadLength = payload == null ? 0 : payload.Length;
-
-                if (payloadLength != LastPayloadLength)
-                {
-                    LastPayloadLength = payloadLength;
-                    ProxOut.WriteVInt32((delta << 1) | 1);
-                    ProxOut.WriteVInt32(payloadLength);
-                }
-                else
-                {
-                    ProxOut.WriteVInt32(delta << 1);
-                }
-            }
-            else
-            {
-                ProxOut.WriteVInt32(delta);
-            }
-
-            if (StoreOffsets)
-            {
-                // don't use startOffset - lastEndOffset, because this creates lots of negative vints for synonyms,
-                // and the numbers aren't that much smaller anyways.
-                int offsetDelta = startOffset - LastOffset;
-                int offsetLength = endOffset - startOffset;
-                Debug.Assert(offsetDelta >= 0 && offsetLength >= 0, "startOffset=" + startOffset + ",lastOffset=" + LastOffset + ",endOffset=" + endOffset);
-                if (offsetLength != LastOffsetLength)
-                {
-                    ProxOut.WriteVInt32(offsetDelta << 1 | 1);
-                    ProxOut.WriteVInt32(offsetLength);
-                }
-                else
-                {
-                    ProxOut.WriteVInt32(offsetDelta << 1);
-                }
-                LastOffset = startOffset;
-                LastOffsetLength = offsetLength;
-            }
-
-            if (payloadLength > 0)
-            {
-                ProxOut.WriteBytes(payload.Bytes, payload.Offset, payloadLength);
-            }
-        }
-
-        public override void FinishDoc()
-        {
-        }
-
-        internal class StandardTermState : BlockTermState
-        {
-            public long FreqStart;
-            public long ProxStart;
-            public long SkipOffset;
-        }
-
-        /// <summary>
-        /// Called when we are done adding docs to this term </summary>
-        public override void FinishTerm(BlockTermState _state)
-        {
-            StandardTermState state = (StandardTermState)_state;
-            // if (DEBUG) System.out.println("SPW: finishTerm seg=" + segment + " freqStart=" + freqStart);
-            Debug.Assert(state.DocFreq > 0);
-
-            // TODO: wasteful we are counting this (counting # docs
-            // for this term) in two places?
-            Debug.Assert(state.DocFreq == Df);
-            state.FreqStart = FreqStart;
-            state.ProxStart = ProxStart;
-            if (Df >= SkipMinimum)
-            {
-                state.SkipOffset = SkipListWriter.WriteSkip(FreqOut) - FreqStart;
-            }
-            else
-            {
-                state.SkipOffset = -1;
-            }
-            LastDocID = 0;
-            Df = 0;
-        }
-
-        public override void EncodeTerm(long[] empty, DataOutput @out, FieldInfo fieldInfo, BlockTermState _state, bool absolute)
-        {
-            StandardTermState state = (StandardTermState)_state;
-            if (absolute)
-            {
-                LastState = EmptyState;
-            }
-            @out.WriteVInt64(state.FreqStart - LastState.FreqStart);
-            if (state.SkipOffset != -1)
-            {
-                Debug.Assert(state.SkipOffset > 0);
-                @out.WriteVInt64(state.SkipOffset);
-            }
-            if (IndexOptions >= Index.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS)
-            {
-                @out.WriteVInt64(state.ProxStart - LastState.ProxStart);
-            }
-            LastState = state;
-        }
-
-        protected override void Dispose(bool disposing)
-        {
-            if (disposing)
-            {
-                try
-                {
-                    FreqOut.Dispose();
-                }
-                finally
-                {
-                    if (ProxOut != null)
-                    {
-                        ProxOut.Dispose();
-                    }
-                }
-            }
-        }
-    }
-#pragma warning restore 612, 618
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/c0e9469c/src/Lucene.Net.TestFramework/Codecs/lucene40/Lucene40RWCodec.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/lucene40/Lucene40RWCodec.cs b/src/Lucene.Net.TestFramework/Codecs/lucene40/Lucene40RWCodec.cs
deleted file mode 100644
index 79fbb42..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/lucene40/Lucene40RWCodec.cs
+++ /dev/null
@@ -1,100 +0,0 @@
-namespace Lucene.Net.Codecs.Lucene40
-{
-    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
-
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    /// <summary>
-    /// Read-write version of Lucene40Codec for testing </summary>
-#pragma warning disable 612, 618
-    public sealed class Lucene40RWCodec : Lucene40Codec
-    {
-        private readonly FieldInfosFormat fieldInfos;
-
-        /// <summary>
-        /// LUCENENET specific
-        /// Creates the codec with OldFormatImpersonationIsActive = true.
-        /// </summary>
-        /// <remarks>
-        /// Added so that SPIClassIterator can locate this Codec.  The iterator
-        /// only recognises classes that have empty constructors.
-        /// </remarks>
-        public Lucene40RWCodec()
-            : this(true)
-        { }
-
-        /// <param name="oldFormatImpersonationIsActive">
-        /// LUCENENET specific
-        /// Added to remove dependency on then-static <see cref="LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/> 
-        /// </param>
-        public Lucene40RWCodec(bool oldFormatImpersonationIsActive) : base()
-        {
-            fieldInfos = new Lucene40FieldInfosFormatAnonymousInnerClassHelper(oldFormatImpersonationIsActive);
-            DocValues = new Lucene40RWDocValuesFormat(oldFormatImpersonationIsActive);
-            Norms = new Lucene40RWNormsFormat(oldFormatImpersonationIsActive);
-        }
-
-        private class Lucene40FieldInfosFormatAnonymousInnerClassHelper : Lucene40FieldInfosFormat
-        {
-            private readonly bool _oldFormatImpersonationIsActive;
-
-            /// <param name="oldFormatImpersonationIsActive">
-            /// LUCENENET specific
-            /// Added to remove dependency on then-static <see cref="LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/> 
-            /// </param>
-            public Lucene40FieldInfosFormatAnonymousInnerClassHelper(bool oldFormatImpersonationIsActive) : base()
-            {
-                _oldFormatImpersonationIsActive = oldFormatImpersonationIsActive;
-            }
-
-            public override FieldInfosWriter FieldInfosWriter
-            {
-                get
-                {
-                    if (!_oldFormatImpersonationIsActive)
-                    {
-                        return base.FieldInfosWriter;
-                    }
-                    else
-                    {
-                        return new Lucene40FieldInfosWriter();
-                    }
-                }
-            }
-        }
-
-        private readonly DocValuesFormat DocValues;
-        private readonly NormsFormat Norms;
-
-        public override FieldInfosFormat FieldInfosFormat
-        {
-            get { return fieldInfos; }
-        }
-
-        public override DocValuesFormat DocValuesFormat
-        {
-            get { return DocValues; }
-        }
-
-        public override NormsFormat NormsFormat
-        {
-            get { return Norms; }
-        }
-    }
-#pragma warning restore 612, 618
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/c0e9469c/src/Lucene.Net.TestFramework/Codecs/lucene40/Lucene40RWDocValuesFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/lucene40/Lucene40RWDocValuesFormat.cs b/src/Lucene.Net.TestFramework/Codecs/lucene40/Lucene40RWDocValuesFormat.cs
deleted file mode 100644
index 2281475..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/lucene40/Lucene40RWDocValuesFormat.cs
+++ /dev/null
@@ -1,66 +0,0 @@
-namespace Lucene.Net.Codecs.Lucene40
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
-    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
-    using SegmentWriteState = Lucene.Net.Index.SegmentWriteState;
-
-    /// <summary>
-    /// Read-write version of <seealso cref="Lucene40DocValuesFormat"/> for testing </summary>
-#pragma warning disable 612, 618
-    public class Lucene40RWDocValuesFormat : Lucene40DocValuesFormat
-    {
-        private readonly bool _oldFormatImpersonationIsActive;
-
-        /// <summary>
-        /// LUCENENET specific
-        /// Creates the codec with OldFormatImpersonationIsActive = true.
-        /// </summary>
-        /// <remarks>
-        /// Added so that SPIClassIterator can locate this Codec.  The iterator
-        /// only recognises classes that have empty constructors.
-        /// </remarks>
-        public Lucene40RWDocValuesFormat()
-            : this(true)
-        { }
-
-        /// <param name="oldFormatImpersonationIsActive">
-        /// LUCENENET specific
-        /// Added to remove dependency on then-static <see cref="LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/> 
-        /// </param>
-        public Lucene40RWDocValuesFormat(bool oldFormatImpersonationIsActive) : base()
-        {
-            _oldFormatImpersonationIsActive = oldFormatImpersonationIsActive;
-        }
-
-        public override DocValuesConsumer FieldsConsumer(SegmentWriteState state)
-        {
-            if (!_oldFormatImpersonationIsActive)
-            {
-                return base.FieldsConsumer(state);
-            }
-            else
-            {
-                string filename = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, "dv", IndexFileNames.COMPOUND_FILE_EXTENSION);
-                return new Lucene40DocValuesWriter(state, filename, Lucene40FieldInfosReader.LEGACY_DV_TYPE_KEY);
-            }
-        }
-    }
-#pragma warning restore 612, 618
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/c0e9469c/src/Lucene.Net.TestFramework/Codecs/lucene40/Lucene40RWNormsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/lucene40/Lucene40RWNormsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/lucene40/Lucene40RWNormsFormat.cs
deleted file mode 100644
index 0830c86..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/lucene40/Lucene40RWNormsFormat.cs
+++ /dev/null
@@ -1,66 +0,0 @@
-namespace Lucene.Net.Codecs.Lucene40
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
-    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
-    using SegmentWriteState = Lucene.Net.Index.SegmentWriteState;
-
-    /// <summary>
-    /// Read-write version of <seealso cref="Lucene40NormsFormat"/> for testing </summary>
-#pragma warning disable 612, 618
-    public class Lucene40RWNormsFormat : Lucene40NormsFormat
-    {
-        private readonly bool _oldFormatImpersonationIsActive;
-
-        /// <summary>
-        /// LUCENENET specific
-        /// Creates the codec with OldFormatImpersonationIsActive = true.
-        /// </summary>
-        /// <remarks>
-        /// Added so that SPIClassIterator can locate this Codec.  The iterator
-        /// only recognises classes that have empty constructors.
-        /// </remarks>
-        public Lucene40RWNormsFormat()
-            : this(true)
-        { }
-
-        /// <param name="oldFormatImpersonationIsActive">
-        /// LUCENENET specific
-        /// Added to remove dependency on then-static <see cref="LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/> 
-        /// </param>
-        public Lucene40RWNormsFormat(bool oldFormatImpersonationIsActive) : base()
-        {
-            _oldFormatImpersonationIsActive = oldFormatImpersonationIsActive;
-        }
-
-        public override DocValuesConsumer NormsConsumer(SegmentWriteState state)
-        {
-            if (!_oldFormatImpersonationIsActive)
-            {
-                return base.NormsConsumer(state);
-            }
-            else
-            {
-                string filename = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, "nrm", IndexFileNames.COMPOUND_FILE_EXTENSION);
-                return new Lucene40DocValuesWriter(state, filename, Lucene40FieldInfosReader.LEGACY_NORM_TYPE_KEY);
-            }
-        }
-    }
-#pragma warning restore 612, 618
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/c0e9469c/src/Lucene.Net.TestFramework/Codecs/lucene40/Lucene40RWPostingsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/lucene40/Lucene40RWPostingsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/lucene40/Lucene40RWPostingsFormat.cs
deleted file mode 100644
index 7a2c9cf..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/lucene40/Lucene40RWPostingsFormat.cs
+++ /dev/null
@@ -1,84 +0,0 @@
-namespace Lucene.Net.Codecs.Lucene40
-{
-    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
-    using SegmentWriteState = Lucene.Net.Index.SegmentWriteState;
-
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    /// <summary>
-    /// Read-write version of <seealso cref="Lucene40PostingsFormat"/> for testing.
-    /// </summary>
-#pragma warning disable 612, 618
-    public class Lucene40RWPostingsFormat : Lucene40PostingsFormat
-    {
-        private readonly bool _oldFormatImpersonationIsActive;
-
-        /// <summary>
-        /// LUCENENET specific
-        /// Creates the codec with OldFormatImpersonationIsActive = true.
-        /// </summary>
-        /// <remarks>
-        /// Added so that SPIClassIterator can locate this Codec.  The iterator
-        /// only recognises classes that have empty constructors.
-        /// </remarks>
-        public Lucene40RWPostingsFormat()
-            : this(true)
-        { }
-
-        /// <param name="oldFormatImpersonationIsActive">
-        /// LUCENENET specific
-        /// Added to remove dependency on then-static <see cref="LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/> 
-        /// </param>
-        public Lucene40RWPostingsFormat(bool oldFormatImpersonationIsActive) : base()
-        {
-            _oldFormatImpersonationIsActive = oldFormatImpersonationIsActive;
-        }
-
-        public override FieldsConsumer FieldsConsumer(SegmentWriteState state)
-        {
-            if (!_oldFormatImpersonationIsActive)
-            {
-                return base.FieldsConsumer(state);
-            }
-            else
-            {
-                PostingsWriterBase docs = new Lucene40PostingsWriter(state);
-
-                // TODO: should we make the terms index more easily
-                // pluggable?  Ie so that this codec would record which
-                // index impl was used, and switch on loading?
-                // Or... you must make a new Codec for this?
-                bool success = false;
-                try
-                {
-                    FieldsConsumer ret = new BlockTreeTermsWriter(state, docs, m_minBlockSize, m_maxBlockSize);
-                    success = true;
-                    return ret;
-                }
-                finally
-                {
-                    if (!success)
-                    {
-                        docs.Dispose();
-                    }
-                }
-            }
-        }
-    }
-#pragma warning restore 612, 618
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/c0e9469c/src/Lucene.Net.TestFramework/Codecs/lucene40/Lucene40SkipListWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/lucene40/Lucene40SkipListWriter.cs b/src/Lucene.Net.TestFramework/Codecs/lucene40/Lucene40SkipListWriter.cs
deleted file mode 100644
index aa8e52e..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/lucene40/Lucene40SkipListWriter.cs
+++ /dev/null
@@ -1,168 +0,0 @@
-using System;
-using System.Diagnostics;
-
-namespace Lucene.Net.Codecs.Lucene40
-{
-    using Lucene.Net.Support;
-
-    /*
-         * Licensed to the Apache Software Foundation (ASF) under one or more
-         * contributor license agreements.  See the NOTICE file distributed with
-         * this work for additional information regarding copyright ownership.
-         * The ASF licenses this file to You under the Apache License, Version 2.0
-         * (the "License"); you may not use this file except in compliance with
-         * the License.  You may obtain a copy of the License at
-         *
-         *     http://www.apache.org/licenses/LICENSE-2.0
-         *
-         * Unless required by applicable law or agreed to in writing, software
-         * distributed under the License is distributed on an "AS IS" BASIS,
-         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-         * See the License for the specific language governing permissions and
-         * limitations under the License.
-         */
-
-    using IndexOutput = Lucene.Net.Store.IndexOutput;
-
-    /// <summary>
-    /// Implements the skip list writer for the 4.0 posting list format
-    /// that stores positions and payloads.
-    /// </summary>
-    /// <seealso> cref= Lucene40PostingsFormat </seealso>
-    /// @deprecated Only for reading old 4.0 segments
-    [Obsolete("Only for reading old 4.0 segments")]
-    public class Lucene40SkipListWriter : MultiLevelSkipListWriter
-    {
-        private int[] LastSkipDoc;
-        private int[] LastSkipPayloadLength;
-        private int[] LastSkipOffsetLength;
-        private long[] LastSkipFreqPointer;
-        private long[] LastSkipProxPointer;
-
-        private IndexOutput FreqOutput;
-        private IndexOutput ProxOutput;
-
-        private int CurDoc;
-        private bool CurStorePayloads;
-        private bool CurStoreOffsets;
-        private int CurPayloadLength;
-        private int CurOffsetLength;
-        private long CurFreqPointer;
-        private long CurProxPointer;
-
-        /// <summary>
-        /// Sole constructor. </summary>
-        public Lucene40SkipListWriter(int skipInterval, int numberOfSkipLevels, int docCount, IndexOutput freqOutput, IndexOutput proxOutput)
-            : base(skipInterval, numberOfSkipLevels, docCount)
-        {
-            this.FreqOutput = freqOutput;
-            this.ProxOutput = proxOutput;
-
-            LastSkipDoc = new int[numberOfSkipLevels];
-            LastSkipPayloadLength = new int[numberOfSkipLevels];
-            LastSkipOffsetLength = new int[numberOfSkipLevels];
-            LastSkipFreqPointer = new long[numberOfSkipLevels];
-            LastSkipProxPointer = new long[numberOfSkipLevels];
-        }
-
-        /// <summary>
-        /// Sets the values for the current skip data.
-        /// </summary>
-        public virtual void SetSkipData(int doc, bool storePayloads, int payloadLength, bool storeOffsets, int offsetLength)
-        {
-            Debug.Assert(storePayloads || payloadLength == -1);
-            Debug.Assert(storeOffsets || offsetLength == -1);
-            this.CurDoc = doc;
-            this.CurStorePayloads = storePayloads;
-            this.CurPayloadLength = payloadLength;
-            this.CurStoreOffsets = storeOffsets;
-            this.CurOffsetLength = offsetLength;
-            this.CurFreqPointer = FreqOutput.FilePointer;
-            if (ProxOutput != null)
-            {
-                this.CurProxPointer = ProxOutput.FilePointer;
-            }
-        }
-
-        public override void ResetSkip()
-        {
-            base.ResetSkip();
-            Arrays.Fill(LastSkipDoc, 0);
-            Arrays.Fill(LastSkipPayloadLength, -1); // we don't have to write the first length in the skip list
-            Arrays.Fill(LastSkipOffsetLength, -1); // we don't have to write the first length in the skip list
-            Arrays.Fill(LastSkipFreqPointer, FreqOutput.FilePointer);
-            if (ProxOutput != null)
-            {
-                Arrays.Fill(LastSkipProxPointer, ProxOutput.FilePointer);
-            }
-        }
-
-        protected override void WriteSkipData(int level, IndexOutput skipBuffer)
-        {
-            // To efficiently store payloads/offsets in the posting lists we do not store the length of
-            // every payload/offset. Instead we omit the length if the previous lengths were the same
-            //
-            // However, in order to support skipping, the length at every skip point must be known.
-            // So we use the same length encoding that we use for the posting lists for the skip data as well:
-            // Case 1: current field does not store payloads/offsets
-            //           SkipDatum                 --> DocSkip, FreqSkip, ProxSkip
-            //           DocSkip,FreqSkip,ProxSkip --> VInt
-            //           DocSkip records the document number before every SkipInterval th  document in TermFreqs.
-            //           Document numbers are represented as differences from the previous value in the sequence.
-            // Case 2: current field stores payloads/offsets
-            //           SkipDatum                 --> DocSkip, PayloadLength?,OffsetLength?,FreqSkip,ProxSkip
-            //           DocSkip,FreqSkip,ProxSkip --> VInt
-            //           PayloadLength,OffsetLength--> VInt
-            //         In this case DocSkip/2 is the difference between
-            //         the current and the previous value. If DocSkip
-            //         is odd, then a PayloadLength encoded as VInt follows,
-            //         if DocSkip is even, then it is assumed that the
-            //         current payload/offset lengths equals the lengths at the previous
-            //         skip point
-            int delta = CurDoc - LastSkipDoc[level];
-
-            if (CurStorePayloads || CurStoreOffsets)
-            {
-                Debug.Assert(CurStorePayloads || CurPayloadLength == LastSkipPayloadLength[level]);
-                Debug.Assert(CurStoreOffsets || CurOffsetLength == LastSkipOffsetLength[level]);
-
-                if (CurPayloadLength == LastSkipPayloadLength[level] && CurOffsetLength == LastSkipOffsetLength[level])
-                {
-                    // the current payload/offset lengths equals the lengths at the previous skip point,
-                    // so we don't store the lengths again
-                    skipBuffer.WriteVInt32(delta << 1);
-                }
-                else
-                {
-                    // the payload and/or offset length is different from the previous one. We shift the DocSkip,
-                    // set the lowest bit and store the current payload and/or offset lengths as VInts.
-                    skipBuffer.WriteVInt32(delta << 1 | 1);
-
-                    if (CurStorePayloads)
-                    {
-                        skipBuffer.WriteVInt32(CurPayloadLength);
-                        LastSkipPayloadLength[level] = CurPayloadLength;
-                    }
-                    if (CurStoreOffsets)
-                    {
-                        skipBuffer.WriteVInt32(CurOffsetLength);
-                        LastSkipOffsetLength[level] = CurOffsetLength;
-                    }
-                }
-            }
-            else
-            {
-                // current field does not store payloads or offsets
-                skipBuffer.WriteVInt32(delta);
-            }
-
-            skipBuffer.WriteVInt32((int)(CurFreqPointer - LastSkipFreqPointer[level]));
-            skipBuffer.WriteVInt32((int)(CurProxPointer - LastSkipProxPointer[level]));
-
-            LastSkipDoc[level] = CurDoc;
-
-            LastSkipFreqPointer[level] = CurFreqPointer;
-            LastSkipProxPointer[level] = CurProxPointer;
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/c0e9469c/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj b/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
index 4839e58..f23c9e2 100644
--- a/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
+++ b/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
@@ -204,28 +204,28 @@
     <Compile Include="Codecs\Lucene3x\TermInfosWriter.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Codecs\lucene40\Lucene40DocValuesWriter.cs">
+    <Compile Include="Codecs\Lucene40\Lucene40DocValuesWriter.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Codecs\lucene40\Lucene40FieldInfosWriter.cs">
+    <Compile Include="Codecs\Lucene40\Lucene40FieldInfosWriter.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Codecs\lucene40\Lucene40PostingsWriter.cs">
+    <Compile Include="Codecs\Lucene40\Lucene40PostingsWriter.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Codecs\lucene40\Lucene40RWCodec.cs">
+    <Compile Include="Codecs\Lucene40\Lucene40RWCodec.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Codecs\lucene40\Lucene40RWDocValuesFormat.cs">
+    <Compile Include="Codecs\Lucene40\Lucene40RWDocValuesFormat.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Codecs\lucene40\Lucene40RWNormsFormat.cs">
+    <Compile Include="Codecs\Lucene40\Lucene40RWNormsFormat.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Codecs\lucene40\Lucene40RWPostingsFormat.cs">
+    <Compile Include="Codecs\Lucene40\Lucene40RWPostingsFormat.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Codecs\lucene40\Lucene40SkipListWriter.cs">
+    <Compile Include="Codecs\Lucene40\Lucene40SkipListWriter.cs">
       <SubType>Code</SubType>
     </Compile>
     <Compile Include="Codecs\Lucene41Ords\Lucene41WithOrds.cs" />


[69/72] [abbrv] lucenenet git commit: Added out.dot to .gitignore (these files are created during testing)

Posted by ni...@apache.org.
Added out.dot to .gitignore (these files are created during testing)


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/3437f3b7
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/3437f3b7
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/3437f3b7

Branch: refs/heads/api-work
Commit: 3437f3b7fccd4d7fe358b5ebadb26e1d10bd9340
Parents: ab626ce
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 26 18:32:57 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Feb 27 06:18:02 2017 +0700

----------------------------------------------------------------------
 .gitignore | 1 +
 1 file changed, 1 insertion(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/3437f3b7/.gitignore
----------------------------------------------------------------------
diff --git a/.gitignore b/.gitignore
index 4ee6108..e75aaec 100644
--- a/.gitignore
+++ b/.gitignore
@@ -38,6 +38,7 @@ packages/
 TestResults/
 test-files/analysis/data/
 [Nn]u[Gg]et[Pp]ackages/
+out.dot
 
 # NuGet v3's project.json files produces more ignoreable files
 *.nuget.props


[14/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Lucene.Net.Tests.csproj
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Lucene.Net.Tests.csproj b/src/Lucene.Net.Tests/Lucene.Net.Tests.csproj
index 8509774..b8f5fc2 100644
--- a/src/Lucene.Net.Tests/Lucene.Net.Tests.csproj
+++ b/src/Lucene.Net.Tests/Lucene.Net.Tests.csproj
@@ -52,47 +52,47 @@
     <Reference Include="System.Xml.Linq" />
   </ItemGroup>
   <ItemGroup>
-    <None Include="core\App.config" />
-    <EmbeddedResource Include="core\Index\bogus24.upgraded.to.36.zip" />
-    <EmbeddedResource Include="core\Index\index.30.cfs.zip" />
-    <EmbeddedResource Include="core\Index\index.30.nocfs.zip" />
-    <EmbeddedResource Include="core\Index\index.31.cfs.zip" />
-    <EmbeddedResource Include="core\Index\index.31.nocfs.zip" />
-    <EmbeddedResource Include="core\Index\index.31.optimized.cfs.zip" />
-    <EmbeddedResource Include="core\Index\index.31.optimized.nocfs.zip" />
-    <EmbeddedResource Include="core\Index\index.32.cfs.zip" />
-    <EmbeddedResource Include="core\Index\index.32.nocfs.zip" />
-    <EmbeddedResource Include="core\Index\index.34.cfs.zip" />
-    <EmbeddedResource Include="core\Index\index.34.nocfs.zip" />
-    <EmbeddedResource Include="core\Index\index.36.surrogates.zip" />
-    <EmbeddedResource Include="core\Index\index.40.cfs.zip" />
-    <EmbeddedResource Include="core\Index\index.40.nocfs.zip" />
-    <EmbeddedResource Include="core\Index\index.40.optimized.cfs.zip" />
-    <EmbeddedResource Include="core\Index\index.40.optimized.nocfs.zip" />
-    <EmbeddedResource Include="core\Index\index.41.cfs.zip" />
-    <EmbeddedResource Include="core\Index\index.41.nocfs.zip" />
-    <EmbeddedResource Include="core\Index\index.42.cfs.zip" />
-    <EmbeddedResource Include="core\Index\index.42.nocfs.zip" />
-    <EmbeddedResource Include="core\Index\index.45.cfs.zip" />
-    <EmbeddedResource Include="core\Index\index.45.nocfs.zip" />
-    <EmbeddedResource Include="core\Index\index.461.cfs.zip" />
-    <EmbeddedResource Include="core\Index\index.461.nocfs.zip" />
-    <EmbeddedResource Include="core\Index\moreterms.40.zip" />
-    <EmbeddedResource Include="core\Index\unsupported.19.cfs.zip" />
-    <EmbeddedResource Include="core\Index\unsupported.19.nocfs.zip" />
-    <EmbeddedResource Include="core\Index\unsupported.20.cfs.zip" />
-    <EmbeddedResource Include="core\Index\unsupported.20.nocfs.zip" />
-    <EmbeddedResource Include="core\Index\unsupported.21.cfs.zip" />
-    <EmbeddedResource Include="core\Index\unsupported.21.nocfs.zip" />
-    <EmbeddedResource Include="core\Index\unsupported.22.cfs.zip" />
-    <EmbeddedResource Include="core\Index\unsupported.22.nocfs.zip" />
-    <EmbeddedResource Include="core\Index\unsupported.23.cfs.zip" />
-    <EmbeddedResource Include="core\Index\unsupported.23.nocfs.zip" />
-    <EmbeddedResource Include="core\Index\unsupported.24.cfs.zip" />
-    <EmbeddedResource Include="core\Index\unsupported.24.nocfs.zip" />
-    <EmbeddedResource Include="core\Index\unsupported.29.cfs.zip" />
-    <EmbeddedResource Include="core\Index\unsupported.29.nocfs.zip" />
-    <None Include="core\Lucene.Net.snk" />
+    <None Include="App.config" />
+    <EmbeddedResource Include="Index\bogus24.upgraded.to.36.zip" />
+    <EmbeddedResource Include="Index\index.30.cfs.zip" />
+    <EmbeddedResource Include="Index\index.30.nocfs.zip" />
+    <EmbeddedResource Include="Index\index.31.cfs.zip" />
+    <EmbeddedResource Include="Index\index.31.nocfs.zip" />
+    <EmbeddedResource Include="Index\index.31.optimized.cfs.zip" />
+    <EmbeddedResource Include="Index\index.31.optimized.nocfs.zip" />
+    <EmbeddedResource Include="Index\index.32.cfs.zip" />
+    <EmbeddedResource Include="Index\index.32.nocfs.zip" />
+    <EmbeddedResource Include="Index\index.34.cfs.zip" />
+    <EmbeddedResource Include="Index\index.34.nocfs.zip" />
+    <EmbeddedResource Include="Index\index.36.surrogates.zip" />
+    <EmbeddedResource Include="Index\index.40.cfs.zip" />
+    <EmbeddedResource Include="Index\index.40.nocfs.zip" />
+    <EmbeddedResource Include="Index\index.40.optimized.cfs.zip" />
+    <EmbeddedResource Include="Index\index.40.optimized.nocfs.zip" />
+    <EmbeddedResource Include="Index\index.41.cfs.zip" />
+    <EmbeddedResource Include="Index\index.41.nocfs.zip" />
+    <EmbeddedResource Include="Index\index.42.cfs.zip" />
+    <EmbeddedResource Include="Index\index.42.nocfs.zip" />
+    <EmbeddedResource Include="Index\index.45.cfs.zip" />
+    <EmbeddedResource Include="Index\index.45.nocfs.zip" />
+    <EmbeddedResource Include="Index\index.461.cfs.zip" />
+    <EmbeddedResource Include="Index\index.461.nocfs.zip" />
+    <EmbeddedResource Include="Index\moreterms.40.zip" />
+    <EmbeddedResource Include="Index\unsupported.19.cfs.zip" />
+    <EmbeddedResource Include="Index\unsupported.19.nocfs.zip" />
+    <EmbeddedResource Include="Index\unsupported.20.cfs.zip" />
+    <EmbeddedResource Include="Index\unsupported.20.nocfs.zip" />
+    <EmbeddedResource Include="Index\unsupported.21.cfs.zip" />
+    <EmbeddedResource Include="Index\unsupported.21.nocfs.zip" />
+    <EmbeddedResource Include="Index\unsupported.22.cfs.zip" />
+    <EmbeddedResource Include="Index\unsupported.22.nocfs.zip" />
+    <EmbeddedResource Include="Index\unsupported.23.cfs.zip" />
+    <EmbeddedResource Include="Index\unsupported.23.nocfs.zip" />
+    <EmbeddedResource Include="Index\unsupported.24.cfs.zip" />
+    <EmbeddedResource Include="Index\unsupported.24.nocfs.zip" />
+    <EmbeddedResource Include="Index\unsupported.29.cfs.zip" />
+    <EmbeddedResource Include="Index\unsupported.29.nocfs.zip" />
+    <None Include="Lucene.Net.snk" />
     <None Include="Lucene.Net.Tests.project.json" />
   </ItemGroup>
   <ItemGroup>
@@ -118,584 +118,584 @@
     </ProjectReference>
   </ItemGroup>
   <ItemGroup>
-    <Compile Include="core\Analysis\TestCachingTokenFilter.cs">
+    <Compile Include="Analysis\TestCachingTokenFilter.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Analysis\TestCharFilter.cs">
+    <Compile Include="Analysis\TestCharFilter.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Analysis\TestGraphTokenizers.cs">
-      <SubType>Code</SubType>
-    </Compile>
-    <Compile Include="core\Analysis\TestLookaheadTokenFilter.cs">
-      <SubType>Code</SubType>
-    </Compile>
-    <Compile Include="core\Analysis\TestMockAnalyzer.cs">
-      <SubType>Code</SubType>
-    </Compile>
-    <Compile Include="core\Analysis\TestMockCharFilter.cs">
-      <SubType>Code</SubType>
-    </Compile>
-    <Compile Include="core\Analysis\TestNumericTokenStream.cs">
-      <SubType>Code</SubType>
-    </Compile>
-    <Compile Include="core\Analysis\TestPosition.cs">
-      <SubType>Code</SubType>
-    </Compile>
-    <Compile Include="core\Analysis\TestReusableStringReader.cs">
-      <SubType>Code</SubType>
-    </Compile>
-    <Compile Include="core\Analysis\TestToken.cs">
-      <SubType>Code</SubType>
-    </Compile>
-    <Compile Include="core\Analysis\Tokenattributes\TestCharTermAttributeImpl.cs">
-      <SubType>Code</SubType>
-    </Compile>
-    <Compile Include="core\Analysis\Tokenattributes\TestSimpleAttributeImpl.cs">
-      <SubType>Code</SubType>
-    </Compile>
-    <Compile Include="core\Analysis\TrivialLookaheadFilter.cs">
-      <SubType>Code</SubType>
-    </Compile>
-    <Compile Include="core\AssemblyInfo.cs" />
-    <Compile Include="core\Codecs\Compressing\AbstractTestCompressionMode.cs" />
-    <Compile Include="core\Codecs\Compressing\AbstractTestLZ4CompressionMode.cs" />
-    <Compile Include="core\Codecs\Compressing\TestCompressingStoredFieldsFormat.cs" />
-    <Compile Include="core\Codecs\Compressing\TestCompressingTermVectorsFormat.cs" />
-    <Compile Include="core\Codecs\Compressing\TestFastCompressionMode.cs" />
-    <Compile Include="core\Codecs\Compressing\TestFastDecompressionMode.cs" />
-    <Compile Include="core\Codecs\Compressing\TestHighCompressionMode.cs" />
-    <Compile Include="core\Codecs\Lucene3x\TestImpersonation.cs" />
-    <Compile Include="core\Codecs\Lucene3x\TestLucene3xPostingsFormat.cs" />
-    <Compile Include="core\Codecs\Lucene3x\TestLucene3xStoredFieldsFormat.cs" />
-    <Compile Include="core\Codecs\Lucene3x\TestLucene3xTermVectorsFormat.cs" />
-    <Compile Include="core\Codecs\Lucene3x\TestSurrogates.cs" />
-    <Compile Include="core\Codecs\Lucene3x\TestTermInfosReaderIndex.cs" />
-    <Compile Include="core\Codecs\Lucene40\TestBitVector.cs" />
-    <Compile Include="core\Codecs\Lucene40\TestLucene40DocValuesFormat.cs" />
-    <Compile Include="core\Codecs\Lucene40\TestLucene40PostingsFormat.cs" />
-    <Compile Include="core\Codecs\Lucene40\TestLucene40PostingsReader.cs" />
-    <Compile Include="core\Codecs\Lucene40\TestLucene40StoredFieldsFormat.cs" />
-    <Compile Include="core\Codecs\Lucene40\TestLucene40TermVectorsFormat.cs" />
-    <Compile Include="core\Codecs\Lucene40\TestReuseDocsEnum.cs" />
-    <Compile Include="core\Codecs\Lucene41\TestBlockPostingsFormat.cs" />
-    <Compile Include="core\Codecs\Lucene41\TestBlockPostingsFormat2.cs" />
-    <Compile Include="core\Codecs\Lucene41\TestBlockPostingsFormat3.cs" />
-    <Compile Include="core\Codecs\Lucene41\TestForUtil.cs" />
-    <Compile Include="core\Codecs\Lucene41\TestLucene41StoredFieldsFormat.cs" />
-    <Compile Include="core\Codecs\Lucene42\TestLucene42DocValuesFormat.cs" />
-    <Compile Include="core\Codecs\Lucene45\TestLucene45DocValuesFormat.cs" />
-    <Compile Include="core\Codecs\PerField\TestPerFieldDocValuesFormat.cs" />
-    <Compile Include="core\Codecs\PerField\TestPerFieldPostingsFormat.cs" />
-    <Compile Include="core\Codecs\PerField\TestPerFieldPostingsFormat2.cs" />
-    <Compile Include="core\Document\TestBinaryDocument.cs" />
-    <Compile Include="core\Document\TestDateTools.cs" />
-    <Compile Include="core\Document\TestDocument.cs" />
-    <Compile Include="core\Document\TestField.cs" />
-    <Compile Include="core\Index\BinaryTokenStream.cs" />
-    <Compile Include="core\Index\SynchronizedList.cs" />
-    <Compile Include="core\Index\Test2BBinaryDocValues.cs" />
-    <Compile Include="core\Index\Test2BDocs.cs" />
-    <Compile Include="core\Index\Test2BNumericDocValues.cs" />
-    <Compile Include="core\Index\Test2BPositions.cs" />
-    <Compile Include="core\Index\Test2BPostings.cs" />
-    <Compile Include="core\Index\Test2BPostingsBytes.cs" />
-    <Compile Include="core\Index\Test2BSortedDocValues.cs" />
-    <Compile Include="core\Index\Test2BTerms.cs" />
-    <Compile Include="core\Index\Test4GBStoredFields.cs" />
-    <Compile Include="core\Index\TestAddIndexes.cs" />
-    <Compile Include="core\Index\TestAllFilesHaveChecksumFooter.cs" />
-    <Compile Include="core\Index\TestAllFilesHaveCodecHeader.cs" />
-    <Compile Include="core\Index\TestAtomicUpdate.cs" />
-    <Compile Include="core\Index\TestBackwardsCompatibility.cs" />
-    <Compile Include="core\Index\TestBackwardsCompatibility3x.cs" />
-    <Compile Include="core\Index\TestBagOfPositions.cs" />
-    <Compile Include="core\Index\TestBagOfPostings.cs" />
-    <Compile Include="core\Index\TestBinaryDocValuesUpdates.cs" />
-    <Compile Include="core\Index\TestBinaryTerms.cs" />
-    <Compile Include="core\Index\TestByteSlices.cs" />
-    <Compile Include="core\Index\TestCheckIndex.cs" />
-    <Compile Include="core\Index\TestCodecHoldsOpenFiles.cs" />
-    <Compile Include="core\Index\TestCodecs.cs" />
-    <Compile Include="core\Index\TestCompoundFile.cs" />
-    <Compile Include="core\Index\TestConcurrentMergeScheduler.cs" />
-    <Compile Include="core\Index\TestConsistentFieldNumbers.cs" />
-    <Compile Include="core\Index\TestCrash.cs" />
-    <Compile Include="core\Index\TestCrashCausesCorruptIndex.cs" />
-    <Compile Include="core\Index\TestCustomNorms.cs" />
-    <Compile Include="core\Index\TestDeletionPolicy.cs" />
-    <Compile Include="core\Index\TestDirectoryReader.cs" />
-    <Compile Include="core\Index\TestDirectoryReaderReopen.cs" />
-    <Compile Include="core\Index\TestDoc.cs" />
-    <Compile Include="core\Index\TestDocCount.cs" />
-    <Compile Include="core\Index\TestDocInverterPerFieldErrorInfo.cs" />
-    <Compile Include="core\Index\TestDocsAndPositions.cs" />
-    <Compile Include="core\Index\TestDocTermOrds.cs" />
-    <Compile Include="core\Index\TestDocumentsWriterDeleteQueue.cs" />
-    <Compile Include="core\Index\TestDocumentsWriterStallControl.cs" />
-    <Compile Include="core\Index\TestDocumentWriter.cs" />
-    <Compile Include="core\Index\TestDocValuesFormat.cs" />
-    <Compile Include="core\Index\TestDocValuesIndexing.cs" />
-    <Compile Include="core\Index\TestDocValuesWithThreads.cs" />
-    <Compile Include="core\Index\TestDuelingCodecs.cs" />
-    <Compile Include="core\Index\TestExceedMaxTermLength.cs" />
-    <Compile Include="core\Index\TestFieldInfos.cs" />
-    <Compile Include="core\Index\TestFieldsReader.cs" />
-    <Compile Include="core\Index\TestFilterAtomicReader.cs" />
-    <Compile Include="core\Index\TestFlex.cs" />
-    <Compile Include="core\Index\TestFlushByRamOrCountsPolicy.cs" />
-    <Compile Include="core\Index\TestForceMergeForever.cs" />
-    <Compile Include="core\Index\TestForTooMuchCloning.cs" />
-    <Compile Include="core\Index\TestIndexableField.cs" />
-    <Compile Include="core\Index\TestIndexCommit.cs" />
-    <Compile Include="core\Index\TestIndexFileDeleter.cs" />
-    <Compile Include="core\Index\TestIndexInput.cs" />
-    <Compile Include="core\Index\TestIndexReaderClose.cs" />
-    <Compile Include="core\Index\TestIndexWriter.cs" />
-    <Compile Include="core\Index\TestIndexWriterCommit.cs" />
-    <Compile Include="core\Index\TestIndexWriterConfig.cs" />
-    <Compile Include="core\Index\TestIndexWriterDelete.cs" />
-    <Compile Include="core\Index\TestIndexWriterExceptions.cs" />
-    <Compile Include="core\Index\TestIndexWriterForceMerge.cs" />
-    <Compile Include="core\Index\TestIndexWriterLockRelease.cs" />
-    <Compile Include="core\Index\TestIndexWriterMergePolicy.cs" />
-    <Compile Include="core\Index\TestIndexWriterMerging.cs" />
-    <Compile Include="core\Index\TestIndexWriterNRTIsCurrent.cs" />
-    <Compile Include="core\Index\TestIndexWriterOnDiskFull.cs" />
-    <Compile Include="core\Index\TestIndexWriterOnJRECrash.cs" />
-    <Compile Include="core\Index\TestIndexWriterOutOfFileDescriptors.cs" />
-    <Compile Include="core\Index\TestIndexWriterReader.cs" />
-    <Compile Include="core\Index\TestIndexWriterUnicode.cs" />
-    <Compile Include="core\Index\TestIndexWriterWithThreads.cs" />
-    <Compile Include="core\Index\TestIntBlockPool.cs" />
-    <Compile Include="core\Index\TestIsCurrent.cs" />
-    <Compile Include="core\Index\TestLazyProxSkipping.cs" />
-    <Compile Include="core\Index\TestLogMergePolicy.cs" />
-    <Compile Include="core\Index\TestLongPostings.cs" />
-    <Compile Include="core\Index\TestMaxTermFrequency.cs" />
-    <Compile Include="core\Index\TestMixedCodecs.cs" />
-    <Compile Include="core\Index\TestMixedDocValuesUpdates.cs" />
-    <Compile Include="core\Index\TestMultiDocValues.cs" />
-    <Compile Include="core\Index\TestMultiFields.cs" />
-    <Compile Include="core\Index\TestMultiLevelSkipList.cs" />
-    <Compile Include="core\Index\TestNeverDelete.cs" />
-    <Compile Include="core\Index\TestNewestSegment.cs" />
-    <Compile Include="core\Index\TestNoDeletionPolicy.cs" />
-    <Compile Include="core\Index\TestNoMergePolicy.cs" />
-    <Compile Include="core\Index\TestNoMergeScheduler.cs" />
-    <Compile Include="core\Index\TestNorms.cs" />
-    <Compile Include="core\Index\TestNRTReaderWithThreads.cs" />
-    <Compile Include="core\Index\TestNRTThreads.cs" />
-    <Compile Include="core\Index\TestNumericDocValuesUpdates.cs" />
-    <Compile Include="core\Index\TestOmitNorms.cs" />
-    <Compile Include="core\Index\TestOmitPositions.cs" />
-    <Compile Include="core\Index\TestOmitTf.cs" />
-    <Compile Include="core\Index\TestParallelAtomicReader.cs" />
-    <Compile Include="core\Index\TestParallelCompositeReader.cs" />
-    <Compile Include="core\Index\TestParallelReaderEmptyIndex.cs" />
-    <Compile Include="core\Index\TestParallelTermEnum.cs" />
-    <Compile Include="core\Index\TestPayloads.cs" />
-    <Compile Include="core\Index\TestPayloadsOnVectors.cs" />
-    <Compile Include="core\Index\TestPerSegmentDeletes.cs" />
-    <Compile Include="core\Index\TestPersistentSnapshotDeletionPolicy.cs" />
-    <Compile Include="core\Index\TestPostingsFormat.cs" />
-    <Compile Include="core\Index\TestPostingsOffsets.cs" />
-    <Compile Include="core\Index\TestPrefixCodedTerms.cs" />
-    <Compile Include="core\Index\TestReaderClosed.cs" />
-    <Compile Include="core\Index\TestRollback.cs" />
-    <Compile Include="core\Index\TestRollingUpdates.cs" />
-    <Compile Include="core\Index\TestSameTokenSamePosition.cs" />
-    <Compile Include="core\Index\TestSegmentMerger.cs" />
-    <Compile Include="core\Index\TestSegmentReader.cs" />
-    <Compile Include="core\Index\TestSegmentTermDocs.cs" />
-    <Compile Include="core\Index\TestSegmentTermEnum.cs" />
-    <Compile Include="core\Index\TestSizeBoundedForceMerge.cs" />
-    <Compile Include="core\Index\TestSnapshotDeletionPolicy.cs" />
-    <Compile Include="core\Index\TestStoredFieldsFormat.cs" />
-    <Compile Include="core\Index\TestStressAdvance.cs" />
-    <Compile Include="core\Index\TestStressIndexing.cs" />
-    <Compile Include="core\Index\TestStressIndexing2.cs" />
-    <Compile Include="core\Index\TestStressNRT.cs" />
-    <Compile Include="core\Index\TestSumDocFreq.cs" />
-    <Compile Include="core\Index\TestTerm.cs" />
-    <Compile Include="core\Index\TestTermdocPerf.cs" />
-    <Compile Include="core\Index\TestTermsEnum.cs" />
-    <Compile Include="core\Index\TestTermsEnum2.cs" />
-    <Compile Include="core\Index\TestTermVectorsFormat.cs" />
-    <Compile Include="core\Index\TestTermVectorsReader.cs" />
-    <Compile Include="core\Index\TestTermVectorsWriter.cs" />
-    <Compile Include="core\Index\TestThreadedForceMerge.cs" />
-    <Compile Include="core\Index\TestTieredMergePolicy.cs" />
-    <Compile Include="core\Index\TestTransactionRollback.cs" />
-    <Compile Include="core\Index\TestTransactions.cs" />
-    <Compile Include="core\Index\TestTryDelete.cs" />
-    <Compile Include="core\Index\TestTwoPhaseCommitTool.cs" />
-    <Compile Include="core\Index\TestUniqueTermCount.cs" />
-    <Compile Include="core\RectangularArrays.cs">
-      <SubType>Code</SubType>
-    </Compile>
-    <Compile Include="core\Search\BaseTestRangeFilter.cs" />
-    <Compile Include="core\Search\FuzzyTermOnShortTermsTest.cs" />
-    <Compile Include="core\Search\JustCompileSearch.cs" />
-    <Compile Include="core\Search\MockFilter.cs" />
-    <Compile Include="core\Search\MultiCollectorTest.cs" />
-    <Compile Include="core\Search\Payloads\PayloadHelper.cs" />
-    <Compile Include="core\Search\Payloads\TestPayloadExplanations.cs" />
-    <Compile Include="core\Search\Payloads\TestPayloadNearQuery.cs" />
-    <Compile Include="core\Search\Payloads\TestPayloadTermQuery.cs" />
-    <Compile Include="core\Search\Similarities\TestSimilarity2.cs" />
-    <Compile Include="core\Search\Similarities\TestSimilarityBase.cs" />
-    <Compile Include="core\Search\SingleDocTestFilter.cs" />
-    <Compile Include="core\Search\Spans\JustCompileSearchSpans.cs" />
-    <Compile Include="core\Search\Spans\MultiSpansWrapper.cs" />
-    <Compile Include="core\Search\Spans\TestBasics.cs" />
-    <Compile Include="core\Search\Spans\TestFieldMaskingSpanQuery.cs" />
-    <Compile Include="core\Search\Spans\TestNearSpansOrdered.cs" />
-    <Compile Include="core\Search\Spans\TestPayloadSpans.cs" />
-    <Compile Include="core\Search\Spans\TestSpanExplanations.cs" />
-    <Compile Include="core\Search\Spans\TestSpanExplanationsOfNonMatches.cs" />
-    <Compile Include="core\Search\Spans\TestSpanFirstQuery.cs" />
-    <Compile Include="core\Search\Spans\TestSpanMultiTermQueryWrapper.cs" />
-    <Compile Include="core\Search\Spans\TestSpans.cs" />
-    <Compile Include="core\Search\Spans\TestSpansAdvanced.cs" />
-    <Compile Include="core\Search\Spans\TestSpansAdvanced2.cs" />
-    <Compile Include="core\Search\Spans\TestSpanSearchEquivalence.cs" />
-    <Compile Include="core\Search\TestAutomatonQuery.cs" />
-    <Compile Include="core\Search\TestAutomatonQueryUnicode.cs" />
-    <Compile Include="core\Search\TestBoolean2.cs" />
-    <Compile Include="core\Search\TestBooleanMinShouldMatch.cs" />
-    <Compile Include="core\Search\TestBooleanOr.cs" />
-    <Compile Include="core\Search\TestBooleanQuery.cs" />
-    <Compile Include="core\Search\TestBooleanQueryVisitSubscorers.cs" />
-    <Compile Include="core\Search\TestBooleanScorer.cs" />
-    <Compile Include="core\Search\TestCachingCollector.cs" />
-    <Compile Include="core\Search\TestCachingWrapperFilter.cs" />
-    <Compile Include="core\Search\TestComplexExplanations.cs" />
-    <Compile Include="core\Search\TestComplexExplanationsOfNonMatches.cs" />
-    <Compile Include="core\Search\TestConjunctions.cs" />
-    <Compile Include="core\Search\TestConstantScoreQuery.cs" />
-    <Compile Include="core\Search\TestControlledRealTimeReopenThread.cs" />
-    <Compile Include="core\Search\TestCustomSearcherSort.cs" />
-    <Compile Include="core\Search\TestDateFilter.cs" />
-    <Compile Include="core\Search\TestDateSort.cs" />
-    <Compile Include="core\Search\TestDisjunctionMaxQuery.cs" />
-    <Compile Include="core\Search\TestDocBoost.cs" />
-    <Compile Include="core\Search\TestDocIdSet.cs" />
-    <Compile Include="core\Search\TestDocTermOrdsRangeFilter.cs" />
-    <Compile Include="core\Search\TestDocTermOrdsRewriteMethod.cs" />
-    <Compile Include="core\Search\TestDocValuesScoring.cs" />
-    <Compile Include="core\Search\TestEarlyTermination.cs" />
-    <Compile Include="core\Search\TestElevationComparator.cs" />
-    <Compile Include="core\Search\TestExplanations.cs" />
-    <Compile Include="core\Search\TestFieldCache.cs" />
-    <Compile Include="core\Search\TestFieldCacheRangeFilter.cs" />
-    <Compile Include="core\Search\TestFieldCacheRewriteMethod.cs" />
-    <Compile Include="core\Search\TestFieldCacheTermsFilter.cs" />
-    <Compile Include="core\Search\TestFieldValueFilter.cs" />
-    <Compile Include="core\Search\TestFilteredQuery.cs" />
-    <Compile Include="core\Search\TestFilteredSearch.cs" />
-    <Compile Include="core\Search\TestFuzzyQuery.cs" />
-    <Compile Include="core\Search\TestIndexSearcher.cs" />
-    <Compile Include="core\Search\TestLiveFieldValues.cs" />
-    <Compile Include="core\Search\TestMatchAllDocsQuery.cs" />
-    <Compile Include="core\Search\TestMinShouldMatch2.cs" />
-    <Compile Include="core\Search\TestMultiPhraseQuery.cs" />
-    <Compile Include="core\Search\TestMultiTermConstantScore.cs" />
-    <Compile Include="core\Search\TestMultiTermQueryRewrites.cs" />
-    <Compile Include="core\Search\TestMultiThreadTermVectors.cs" />
-    <Compile Include="core\Search\TestMultiValuedNumericRangeQuery.cs" />
-    <Compile Include="core\Search\TestNGramPhraseQuery.cs" />
-    <Compile Include="core\Search\TestNot.cs" />
-    <Compile Include="core\Search\TestNumericRangeQuery32.cs" />
-    <Compile Include="core\Search\TestNumericRangeQuery64.cs" />
-    <Compile Include="core\Search\TestPhrasePrefixQuery.cs" />
-    <Compile Include="core\Search\TestPhraseQuery.cs" />
-    <Compile Include="core\Search\TestPositionIncrement.cs" />
-    <Compile Include="core\Search\TestPositiveScoresOnlyCollector.cs" />
-    <Compile Include="core\Search\TestPrefixFilter.cs" />
-    <Compile Include="core\Search\TestPrefixInBooleanQuery.cs" />
-    <Compile Include="core\Search\TestPrefixQuery.cs" />
-    <Compile Include="core\Search\TestPrefixRandom.cs" />
-    <Compile Include="core\Search\TestQueryRescorer.cs" />
-    <Compile Include="core\Search\TestQueryWrapperFilter.cs" />
-    <Compile Include="core\Search\TestRegexpQuery.cs" />
-    <Compile Include="core\Search\TestRegexpRandom.cs" />
-    <Compile Include="core\Search\TestRegexpRandom2.cs" />
-    <Compile Include="core\Search\TestSameScoresWithThreads.cs" />
-    <Compile Include="core\Search\TestScoreCachingWrappingScorer.cs" />
-    <Compile Include="core\Search\TestScorerPerf.cs" />
-    <Compile Include="core\Search\TestSearchAfter.cs" />
-    <Compile Include="core\Search\TestSearcherManager.cs" />
-    <Compile Include="core\Search\TestSearchWithThreads.cs" />
-    <Compile Include="core\Search\TestShardSearching.cs" />
-    <Compile Include="core\Search\TestSimilarity.cs" />
-    <Compile Include="core\Search\TestSimilarityProvider.cs" />
-    <Compile Include="core\Search\TestSimpleExplanations.cs" />
-    <Compile Include="core\Search\TestSimpleExplanationsOfNonMatches.cs" />
-    <Compile Include="core\Search\TestSimpleSearchEquivalence.cs" />
-    <Compile Include="core\Search\TestSloppyPhraseQuery.cs" />
-    <Compile Include="core\Search\TestSloppyPhraseQuery2.cs" />
-    <Compile Include="core\Search\TestSort.cs" />
-    <Compile Include="core\Search\TestSortDocValues.cs" />
-    <Compile Include="core\Search\TestSortRandom.cs" />
-    <Compile Include="core\Search\TestSortRescorer.cs" />
-    <Compile Include="core\Search\TestSubScorerFreqs.cs" />
-    <Compile Include="core\Search\TestTermRangeFilter.cs" />
-    <Compile Include="core\Search\TestTermRangeQuery.cs" />
-    <Compile Include="core\Search\TestTermScorer.cs" />
-    <Compile Include="core\Search\TestTermVectors.cs" />
-    <Compile Include="core\Search\TestTimeLimitingCollector.cs" />
-    <Compile Include="core\Search\TestTopDocsCollector.cs" />
-    <Compile Include="core\Search\TestTopDocsMerge.cs" />
-    <Compile Include="core\Search\TestTopFieldCollector.cs" />
-    <Compile Include="core\Search\TestTopScoreDocCollector.cs" />
-    <Compile Include="core\Search\TestTotalHitCountCollector.cs" />
-    <Compile Include="core\Search\TestWildcard.cs" />
-    <Compile Include="core\Search\TestWildcardRandom.cs" />
-    <Compile Include="core\Store\TestBufferedChecksum.cs" />
-    <Compile Include="core\Store\TestBufferedIndexInput.cs" />
-    <Compile Include="core\Store\TestByteArrayDataInput.cs" />
-    <Compile Include="core\Store\TestCopyBytes.cs" />
-    <Compile Include="core\Store\TestDirectory.cs" />
-    <Compile Include="core\Store\TestFileSwitchDirectory.cs" />
-    <Compile Include="core\Store\TestFilterDirectory.cs" />
-    <Compile Include="core\Store\TestHugeRamFile.cs" />
-    <Compile Include="core\Store\TestLock.cs" />
-    <Compile Include="core\Store\TestLockFactory.cs" />
-    <Compile Include="core\Store\TestMockDirectoryWrapper.cs" />
-    <Compile Include="core\Store\TestMultiMMap.cs" />
-    <Compile Include="core\Store\TestNRTCachingDirectory.cs" />
-    <Compile Include="core\Store\TestRAMDirectory.cs" />
-    <Compile Include="core\Store\TestRateLimiter.cs" />
-    <Compile Include="core\Store\TestWindowsMMap.cs" />
-    <Compile Include="core\SupportClassException.cs">
-      <SubType>Code</SubType>
-    </Compile>
-    <Compile Include="core\Support\BaseBufferTestCase.cs" />
-    <Compile Include="core\Support\C5\ArrayBase.cs" />
-    <Compile Include="core\Support\C5\ArrayList.cs" />
-    <Compile Include="core\Support\C5\DropMultiplicity.cs" />
-    <Compile Include="core\Support\C5\Events.cs" />
-    <Compile Include="core\Support\C5\GenericCollectionTester.cs" />
-    <Compile Include="core\Support\C5\HashBag.cs" />
-    <Compile Include="core\Support\C5\Sorting.cs" />
-    <Compile Include="core\Support\C5\SupportClasses.cs" />
-    <Compile Include="core\Support\C5\WeakViewList.cs" />
-    <Compile Include="core\Support\C5\Wrappers.cs" />
-    <Compile Include="core\Support\TestHashMap.cs" />
-    <Compile Include="core\Support\TestLinkedHashMap.cs" />
-    <Compile Include="core\Support\TestLongBuffer.cs" />
-    <Compile Include="core\Support\TestByteBuffer.cs" />
-    <Compile Include="core\Support\TestLurchTable.cs" />
-    <Compile Include="core\Support\TestLurchTableThreading.cs" />
-    <Compile Include="core\Support\TestDataInputStream.cs" />
-    <Compile Include="core\Support\TestDataOutputStream.cs" />
-    <Compile Include="core\Support\TestSafeTextWriterWrapper.cs" />
-    <Compile Include="core\Support\TestStringBuilderExtensions.cs" />
-    <Compile Include="core\Support\TestTreeDictionary.cs" />
-    <Compile Include="core\Support\TestTreeSet.cs" />
-    <Compile Include="core\TestApiConsistency.cs" />
-    <Compile Include="core\TestAssertions.cs">
+    <Compile Include="Analysis\TestGraphTokenizers.cs">
+      <SubType>Code</SubType>
+    </Compile>
+    <Compile Include="Analysis\TestLookaheadTokenFilter.cs">
+      <SubType>Code</SubType>
+    </Compile>
+    <Compile Include="Analysis\TestMockAnalyzer.cs">
+      <SubType>Code</SubType>
+    </Compile>
+    <Compile Include="Analysis\TestMockCharFilter.cs">
+      <SubType>Code</SubType>
+    </Compile>
+    <Compile Include="Analysis\TestNumericTokenStream.cs">
+      <SubType>Code</SubType>
+    </Compile>
+    <Compile Include="Analysis\TestPosition.cs">
+      <SubType>Code</SubType>
+    </Compile>
+    <Compile Include="Analysis\TestReusableStringReader.cs">
+      <SubType>Code</SubType>
+    </Compile>
+    <Compile Include="Analysis\TestToken.cs">
+      <SubType>Code</SubType>
+    </Compile>
+    <Compile Include="Analysis\TokenAttributes\TestCharTermAttributeImpl.cs">
+      <SubType>Code</SubType>
+    </Compile>
+    <Compile Include="Analysis\TokenAttributes\TestSimpleAttributeImpl.cs">
+      <SubType>Code</SubType>
+    </Compile>
+    <Compile Include="Analysis\TrivialLookaheadFilter.cs">
+      <SubType>Code</SubType>
+    </Compile>
+    <Compile Include="AssemblyInfo.cs" />
+    <Compile Include="Codecs\Compressing\AbstractTestCompressionMode.cs" />
+    <Compile Include="Codecs\Compressing\AbstractTestLZ4CompressionMode.cs" />
+    <Compile Include="Codecs\Compressing\TestCompressingStoredFieldsFormat.cs" />
+    <Compile Include="Codecs\Compressing\TestCompressingTermVectorsFormat.cs" />
+    <Compile Include="Codecs\Compressing\TestFastCompressionMode.cs" />
+    <Compile Include="Codecs\Compressing\TestFastDecompressionMode.cs" />
+    <Compile Include="Codecs\Compressing\TestHighCompressionMode.cs" />
+    <Compile Include="Codecs\Lucene3x\TestImpersonation.cs" />
+    <Compile Include="Codecs\Lucene3x\TestLucene3xPostingsFormat.cs" />
+    <Compile Include="Codecs\Lucene3x\TestLucene3xStoredFieldsFormat.cs" />
+    <Compile Include="Codecs\Lucene3x\TestLucene3xTermVectorsFormat.cs" />
+    <Compile Include="Codecs\Lucene3x\TestSurrogates.cs" />
+    <Compile Include="Codecs\Lucene3x\TestTermInfosReaderIndex.cs" />
+    <Compile Include="Codecs\Lucene40\TestBitVector.cs" />
+    <Compile Include="Codecs\Lucene40\TestLucene40DocValuesFormat.cs" />
+    <Compile Include="Codecs\Lucene40\TestLucene40PostingsFormat.cs" />
+    <Compile Include="Codecs\Lucene40\TestLucene40PostingsReader.cs" />
+    <Compile Include="Codecs\Lucene40\TestLucene40StoredFieldsFormat.cs" />
+    <Compile Include="Codecs\Lucene40\TestLucene40TermVectorsFormat.cs" />
+    <Compile Include="Codecs\Lucene40\TestReuseDocsEnum.cs" />
+    <Compile Include="Codecs\Lucene41\TestBlockPostingsFormat.cs" />
+    <Compile Include="Codecs\Lucene41\TestBlockPostingsFormat2.cs" />
+    <Compile Include="Codecs\Lucene41\TestBlockPostingsFormat3.cs" />
+    <Compile Include="Codecs\Lucene41\TestForUtil.cs" />
+    <Compile Include="Codecs\Lucene41\TestLucene41StoredFieldsFormat.cs" />
+    <Compile Include="Codecs\Lucene42\TestLucene42DocValuesFormat.cs" />
+    <Compile Include="Codecs\Lucene45\TestLucene45DocValuesFormat.cs" />
+    <Compile Include="Codecs\PerField\TestPerFieldDocValuesFormat.cs" />
+    <Compile Include="Codecs\PerField\TestPerFieldPostingsFormat.cs" />
+    <Compile Include="Codecs\PerField\TestPerFieldPostingsFormat2.cs" />
+    <Compile Include="Document\TestBinaryDocument.cs" />
+    <Compile Include="Document\TestDateTools.cs" />
+    <Compile Include="Document\TestDocument.cs" />
+    <Compile Include="Document\TestField.cs" />
+    <Compile Include="Index\BinaryTokenStream.cs" />
+    <Compile Include="Index\SynchronizedList.cs" />
+    <Compile Include="Index\Test2BBinaryDocValues.cs" />
+    <Compile Include="Index\Test2BDocs.cs" />
+    <Compile Include="Index\Test2BNumericDocValues.cs" />
+    <Compile Include="Index\Test2BPositions.cs" />
+    <Compile Include="Index\Test2BPostings.cs" />
+    <Compile Include="Index\Test2BPostingsBytes.cs" />
+    <Compile Include="Index\Test2BSortedDocValues.cs" />
+    <Compile Include="Index\Test2BTerms.cs" />
+    <Compile Include="Index\Test4GBStoredFields.cs" />
+    <Compile Include="Index\TestAddIndexes.cs" />
+    <Compile Include="Index\TestAllFilesHaveChecksumFooter.cs" />
+    <Compile Include="Index\TestAllFilesHaveCodecHeader.cs" />
+    <Compile Include="Index\TestAtomicUpdate.cs" />
+    <Compile Include="Index\TestBackwardsCompatibility.cs" />
+    <Compile Include="Index\TestBackwardsCompatibility3x.cs" />
+    <Compile Include="Index\TestBagOfPositions.cs" />
+    <Compile Include="Index\TestBagOfPostings.cs" />
+    <Compile Include="Index\TestBinaryDocValuesUpdates.cs" />
+    <Compile Include="Index\TestBinaryTerms.cs" />
+    <Compile Include="Index\TestByteSlices.cs" />
+    <Compile Include="Index\TestCheckIndex.cs" />
+    <Compile Include="Index\TestCodecHoldsOpenFiles.cs" />
+    <Compile Include="Index\TestCodecs.cs" />
+    <Compile Include="Index\TestCompoundFile.cs" />
+    <Compile Include="Index\TestConcurrentMergeScheduler.cs" />
+    <Compile Include="Index\TestConsistentFieldNumbers.cs" />
+    <Compile Include="Index\TestCrash.cs" />
+    <Compile Include="Index\TestCrashCausesCorruptIndex.cs" />
+    <Compile Include="Index\TestCustomNorms.cs" />
+    <Compile Include="Index\TestDeletionPolicy.cs" />
+    <Compile Include="Index\TestDirectoryReader.cs" />
+    <Compile Include="Index\TestDirectoryReaderReopen.cs" />
+    <Compile Include="Index\TestDoc.cs" />
+    <Compile Include="Index\TestDocCount.cs" />
+    <Compile Include="Index\TestDocInverterPerFieldErrorInfo.cs" />
+    <Compile Include="Index\TestDocsAndPositions.cs" />
+    <Compile Include="Index\TestDocTermOrds.cs" />
+    <Compile Include="Index\TestDocumentsWriterDeleteQueue.cs" />
+    <Compile Include="Index\TestDocumentsWriterStallControl.cs" />
+    <Compile Include="Index\TestDocumentWriter.cs" />
+    <Compile Include="Index\TestDocValuesFormat.cs" />
+    <Compile Include="Index\TestDocValuesIndexing.cs" />
+    <Compile Include="Index\TestDocValuesWithThreads.cs" />
+    <Compile Include="Index\TestDuelingCodecs.cs" />
+    <Compile Include="Index\TestExceedMaxTermLength.cs" />
+    <Compile Include="Index\TestFieldInfos.cs" />
+    <Compile Include="Index\TestFieldsReader.cs" />
+    <Compile Include="Index\TestFilterAtomicReader.cs" />
+    <Compile Include="Index\TestFlex.cs" />
+    <Compile Include="Index\TestFlushByRamOrCountsPolicy.cs" />
+    <Compile Include="Index\TestForceMergeForever.cs" />
+    <Compile Include="Index\TestForTooMuchCloning.cs" />
+    <Compile Include="Index\TestIndexableField.cs" />
+    <Compile Include="Index\TestIndexCommit.cs" />
+    <Compile Include="Index\TestIndexFileDeleter.cs" />
+    <Compile Include="Index\TestIndexInput.cs" />
+    <Compile Include="Index\TestIndexReaderClose.cs" />
+    <Compile Include="Index\TestIndexWriter.cs" />
+    <Compile Include="Index\TestIndexWriterCommit.cs" />
+    <Compile Include="Index\TestIndexWriterConfig.cs" />
+    <Compile Include="Index\TestIndexWriterDelete.cs" />
+    <Compile Include="Index\TestIndexWriterExceptions.cs" />
+    <Compile Include="Index\TestIndexWriterForceMerge.cs" />
+    <Compile Include="Index\TestIndexWriterLockRelease.cs" />
+    <Compile Include="Index\TestIndexWriterMergePolicy.cs" />
+    <Compile Include="Index\TestIndexWriterMerging.cs" />
+    <Compile Include="Index\TestIndexWriterNRTIsCurrent.cs" />
+    <Compile Include="Index\TestIndexWriterOnDiskFull.cs" />
+    <Compile Include="Index\TestIndexWriterOnJRECrash.cs" />
+    <Compile Include="Index\TestIndexWriterOutOfFileDescriptors.cs" />
+    <Compile Include="Index\TestIndexWriterReader.cs" />
+    <Compile Include="Index\TestIndexWriterUnicode.cs" />
+    <Compile Include="Index\TestIndexWriterWithThreads.cs" />
+    <Compile Include="Index\TestIntBlockPool.cs" />
+    <Compile Include="Index\TestIsCurrent.cs" />
+    <Compile Include="Index\TestLazyProxSkipping.cs" />
+    <Compile Include="Index\TestLogMergePolicy.cs" />
+    <Compile Include="Index\TestLongPostings.cs" />
+    <Compile Include="Index\TestMaxTermFrequency.cs" />
+    <Compile Include="Index\TestMixedCodecs.cs" />
+    <Compile Include="Index\TestMixedDocValuesUpdates.cs" />
+    <Compile Include="Index\TestMultiDocValues.cs" />
+    <Compile Include="Index\TestMultiFields.cs" />
+    <Compile Include="Index\TestMultiLevelSkipList.cs" />
+    <Compile Include="Index\TestNeverDelete.cs" />
+    <Compile Include="Index\TestNewestSegment.cs" />
+    <Compile Include="Index\TestNoDeletionPolicy.cs" />
+    <Compile Include="Index\TestNoMergePolicy.cs" />
+    <Compile Include="Index\TestNoMergeScheduler.cs" />
+    <Compile Include="Index\TestNorms.cs" />
+    <Compile Include="Index\TestNRTReaderWithThreads.cs" />
+    <Compile Include="Index\TestNRTThreads.cs" />
+    <Compile Include="Index\TestNumericDocValuesUpdates.cs" />
+    <Compile Include="Index\TestOmitNorms.cs" />
+    <Compile Include="Index\TestOmitPositions.cs" />
+    <Compile Include="Index\TestOmitTf.cs" />
+    <Compile Include="Index\TestParallelAtomicReader.cs" />
+    <Compile Include="Index\TestParallelCompositeReader.cs" />
+    <Compile Include="Index\TestParallelReaderEmptyIndex.cs" />
+    <Compile Include="Index\TestParallelTermEnum.cs" />
+    <Compile Include="Index\TestPayloads.cs" />
+    <Compile Include="Index\TestPayloadsOnVectors.cs" />
+    <Compile Include="Index\TestPerSegmentDeletes.cs" />
+    <Compile Include="Index\TestPersistentSnapshotDeletionPolicy.cs" />
+    <Compile Include="Index\TestPostingsFormat.cs" />
+    <Compile Include="Index\TestPostingsOffsets.cs" />
+    <Compile Include="Index\TestPrefixCodedTerms.cs" />
+    <Compile Include="Index\TestReaderClosed.cs" />
+    <Compile Include="Index\TestRollback.cs" />
+    <Compile Include="Index\TestRollingUpdates.cs" />
+    <Compile Include="Index\TestSameTokenSamePosition.cs" />
+    <Compile Include="Index\TestSegmentMerger.cs" />
+    <Compile Include="Index\TestSegmentReader.cs" />
+    <Compile Include="Index\TestSegmentTermDocs.cs" />
+    <Compile Include="Index\TestSegmentTermEnum.cs" />
+    <Compile Include="Index\TestSizeBoundedForceMerge.cs" />
+    <Compile Include="Index\TestSnapshotDeletionPolicy.cs" />
+    <Compile Include="Index\TestStoredFieldsFormat.cs" />
+    <Compile Include="Index\TestStressAdvance.cs" />
+    <Compile Include="Index\TestStressIndexing.cs" />
+    <Compile Include="Index\TestStressIndexing2.cs" />
+    <Compile Include="Index\TestStressNRT.cs" />
+    <Compile Include="Index\TestSumDocFreq.cs" />
+    <Compile Include="Index\TestTerm.cs" />
+    <Compile Include="Index\TestTermdocPerf.cs" />
+    <Compile Include="Index\TestTermsEnum.cs" />
+    <Compile Include="Index\TestTermsEnum2.cs" />
+    <Compile Include="Index\TestTermVectorsFormat.cs" />
+    <Compile Include="Index\TestTermVectorsReader.cs" />
+    <Compile Include="Index\TestTermVectorsWriter.cs" />
+    <Compile Include="Index\TestThreadedForceMerge.cs" />
+    <Compile Include="Index\TestTieredMergePolicy.cs" />
+    <Compile Include="Index\TestTransactionRollback.cs" />
+    <Compile Include="Index\TestTransactions.cs" />
+    <Compile Include="Index\TestTryDelete.cs" />
+    <Compile Include="Index\TestTwoPhaseCommitTool.cs" />
+    <Compile Include="Index\TestUniqueTermCount.cs" />
+    <Compile Include="RectangularArrays.cs">
+      <SubType>Code</SubType>
+    </Compile>
+    <Compile Include="Search\BaseTestRangeFilter.cs" />
+    <Compile Include="Search\FuzzyTermOnShortTermsTest.cs" />
+    <Compile Include="Search\JustCompileSearch.cs" />
+    <Compile Include="Search\MockFilter.cs" />
+    <Compile Include="Search\MultiCollectorTest.cs" />
+    <Compile Include="Search\Payloads\PayloadHelper.cs" />
+    <Compile Include="Search\Payloads\TestPayloadExplanations.cs" />
+    <Compile Include="Search\Payloads\TestPayloadNearQuery.cs" />
+    <Compile Include="Search\Payloads\TestPayloadTermQuery.cs" />
+    <Compile Include="Search\Similarities\TestSimilarity2.cs" />
+    <Compile Include="Search\Similarities\TestSimilarityBase.cs" />
+    <Compile Include="Search\SingleDocTestFilter.cs" />
+    <Compile Include="Search\Spans\JustCompileSearchSpans.cs" />
+    <Compile Include="Search\Spans\MultiSpansWrapper.cs" />
+    <Compile Include="Search\Spans\TestBasics.cs" />
+    <Compile Include="Search\Spans\TestFieldMaskingSpanQuery.cs" />
+    <Compile Include="Search\Spans\TestNearSpansOrdered.cs" />
+    <Compile Include="Search\Spans\TestPayloadSpans.cs" />
+    <Compile Include="Search\Spans\TestSpanExplanations.cs" />
+    <Compile Include="Search\Spans\TestSpanExplanationsOfNonMatches.cs" />
+    <Compile Include="Search\Spans\TestSpanFirstQuery.cs" />
+    <Compile Include="Search\Spans\TestSpanMultiTermQueryWrapper.cs" />
+    <Compile Include="Search\Spans\TestSpans.cs" />
+    <Compile Include="Search\Spans\TestSpansAdvanced.cs" />
+    <Compile Include="Search\Spans\TestSpansAdvanced2.cs" />
+    <Compile Include="Search\Spans\TestSpanSearchEquivalence.cs" />
+    <Compile Include="Search\TestAutomatonQuery.cs" />
+    <Compile Include="Search\TestAutomatonQueryUnicode.cs" />
+    <Compile Include="Search\TestBoolean2.cs" />
+    <Compile Include="Search\TestBooleanMinShouldMatch.cs" />
+    <Compile Include="Search\TestBooleanOr.cs" />
+    <Compile Include="Search\TestBooleanQuery.cs" />
+    <Compile Include="Search\TestBooleanQueryVisitSubscorers.cs" />
+    <Compile Include="Search\TestBooleanScorer.cs" />
+    <Compile Include="Search\TestCachingCollector.cs" />
+    <Compile Include="Search\TestCachingWrapperFilter.cs" />
+    <Compile Include="Search\TestComplexExplanations.cs" />
+    <Compile Include="Search\TestComplexExplanationsOfNonMatches.cs" />
+    <Compile Include="Search\TestConjunctions.cs" />
+    <Compile Include="Search\TestConstantScoreQuery.cs" />
+    <Compile Include="Search\TestControlledRealTimeReopenThread.cs" />
+    <Compile Include="Search\TestCustomSearcherSort.cs" />
+    <Compile Include="Search\TestDateFilter.cs" />
+    <Compile Include="Search\TestDateSort.cs" />
+    <Compile Include="Search\TestDisjunctionMaxQuery.cs" />
+    <Compile Include="Search\TestDocBoost.cs" />
+    <Compile Include="Search\TestDocIdSet.cs" />
+    <Compile Include="Search\TestDocTermOrdsRangeFilter.cs" />
+    <Compile Include="Search\TestDocTermOrdsRewriteMethod.cs" />
+    <Compile Include="Search\TestDocValuesScoring.cs" />
+    <Compile Include="Search\TestEarlyTermination.cs" />
+    <Compile Include="Search\TestElevationComparator.cs" />
+    <Compile Include="Search\TestExplanations.cs" />
+    <Compile Include="Search\TestFieldCache.cs" />
+    <Compile Include="Search\TestFieldCacheRangeFilter.cs" />
+    <Compile Include="Search\TestFieldCacheRewriteMethod.cs" />
+    <Compile Include="Search\TestFieldCacheTermsFilter.cs" />
+    <Compile Include="Search\TestFieldValueFilter.cs" />
+    <Compile Include="Search\TestFilteredQuery.cs" />
+    <Compile Include="Search\TestFilteredSearch.cs" />
+    <Compile Include="Search\TestFuzzyQuery.cs" />
+    <Compile Include="Search\TestIndexSearcher.cs" />
+    <Compile Include="Search\TestLiveFieldValues.cs" />
+    <Compile Include="Search\TestMatchAllDocsQuery.cs" />
+    <Compile Include="Search\TestMinShouldMatch2.cs" />
+    <Compile Include="Search\TestMultiPhraseQuery.cs" />
+    <Compile Include="Search\TestMultiTermConstantScore.cs" />
+    <Compile Include="Search\TestMultiTermQueryRewrites.cs" />
+    <Compile Include="Search\TestMultiThreadTermVectors.cs" />
+    <Compile Include="Search\TestMultiValuedNumericRangeQuery.cs" />
+    <Compile Include="Search\TestNGramPhraseQuery.cs" />
+    <Compile Include="Search\TestNot.cs" />
+    <Compile Include="Search\TestNumericRangeQuery32.cs" />
+    <Compile Include="Search\TestNumericRangeQuery64.cs" />
+    <Compile Include="Search\TestPhrasePrefixQuery.cs" />
+    <Compile Include="Search\TestPhraseQuery.cs" />
+    <Compile Include="Search\TestPositionIncrement.cs" />
+    <Compile Include="Search\TestPositiveScoresOnlyCollector.cs" />
+    <Compile Include="Search\TestPrefixFilter.cs" />
+    <Compile Include="Search\TestPrefixInBooleanQuery.cs" />
+    <Compile Include="Search\TestPrefixQuery.cs" />
+    <Compile Include="Search\TestPrefixRandom.cs" />
+    <Compile Include="Search\TestQueryRescorer.cs" />
+    <Compile Include="Search\TestQueryWrapperFilter.cs" />
+    <Compile Include="Search\TestRegexpQuery.cs" />
+    <Compile Include="Search\TestRegexpRandom.cs" />
+    <Compile Include="Search\TestRegexpRandom2.cs" />
+    <Compile Include="Search\TestSameScoresWithThreads.cs" />
+    <Compile Include="Search\TestScoreCachingWrappingScorer.cs" />
+    <Compile Include="Search\TestScorerPerf.cs" />
+    <Compile Include="Search\TestSearchAfter.cs" />
+    <Compile Include="Search\TestSearcherManager.cs" />
+    <Compile Include="Search\TestSearchWithThreads.cs" />
+    <Compile Include="Search\TestShardSearching.cs" />
+    <Compile Include="Search\TestSimilarity.cs" />
+    <Compile Include="Search\TestSimilarityProvider.cs" />
+    <Compile Include="Search\TestSimpleExplanations.cs" />
+    <Compile Include="Search\TestSimpleExplanationsOfNonMatches.cs" />
+    <Compile Include="Search\TestSimpleSearchEquivalence.cs" />
+    <Compile Include="Search\TestSloppyPhraseQuery.cs" />
+    <Compile Include="Search\TestSloppyPhraseQuery2.cs" />
+    <Compile Include="Search\TestSort.cs" />
+    <Compile Include="Search\TestSortDocValues.cs" />
+    <Compile Include="Search\TestSortRandom.cs" />
+    <Compile Include="Search\TestSortRescorer.cs" />
+    <Compile Include="Search\TestSubScorerFreqs.cs" />
+    <Compile Include="Search\TestTermRangeFilter.cs" />
+    <Compile Include="Search\TestTermRangeQuery.cs" />
+    <Compile Include="Search\TestTermScorer.cs" />
+    <Compile Include="Search\TestTermVectors.cs" />
+    <Compile Include="Search\TestTimeLimitingCollector.cs" />
+    <Compile Include="Search\TestTopDocsCollector.cs" />
+    <Compile Include="Search\TestTopDocsMerge.cs" />
+    <Compile Include="Search\TestTopFieldCollector.cs" />
+    <Compile Include="Search\TestTopScoreDocCollector.cs" />
+    <Compile Include="Search\TestTotalHitCountCollector.cs" />
+    <Compile Include="Search\TestWildcard.cs" />
+    <Compile Include="Search\TestWildcardRandom.cs" />
+    <Compile Include="Store\TestBufferedChecksum.cs" />
+    <Compile Include="Store\TestBufferedIndexInput.cs" />
+    <Compile Include="Store\TestByteArrayDataInput.cs" />
+    <Compile Include="Store\TestCopyBytes.cs" />
+    <Compile Include="Store\TestDirectory.cs" />
+    <Compile Include="Store\TestFileSwitchDirectory.cs" />
+    <Compile Include="Store\TestFilterDirectory.cs" />
+    <Compile Include="Store\TestHugeRamFile.cs" />
+    <Compile Include="Store\TestLock.cs" />
+    <Compile Include="Store\TestLockFactory.cs" />
+    <Compile Include="Store\TestMockDirectoryWrapper.cs" />
+    <Compile Include="Store\TestMultiMMap.cs" />
+    <Compile Include="Store\TestNRTCachingDirectory.cs" />
+    <Compile Include="Store\TestRAMDirectory.cs" />
+    <Compile Include="Store\TestRateLimiter.cs" />
+    <Compile Include="Store\TestWindowsMMap.cs" />
+    <Compile Include="SupportClassException.cs">
+      <SubType>Code</SubType>
+    </Compile>
+    <Compile Include="Support\BaseBufferTestCase.cs" />
+    <Compile Include="Support\C5\ArrayBase.cs" />
+    <Compile Include="Support\C5\ArrayList.cs" />
+    <Compile Include="Support\C5\DropMultiplicity.cs" />
+    <Compile Include="Support\C5\Events.cs" />
+    <Compile Include="Support\C5\GenericCollectionTester.cs" />
+    <Compile Include="Support\C5\HashBag.cs" />
+    <Compile Include="Support\C5\Sorting.cs" />
+    <Compile Include="Support\C5\SupportClasses.cs" />
+    <Compile Include="Support\C5\WeakViewList.cs" />
+    <Compile Include="Support\C5\Wrappers.cs" />
+    <Compile Include="Support\TestHashMap.cs" />
+    <Compile Include="Support\TestLinkedHashMap.cs" />
+    <Compile Include="Support\TestLongBuffer.cs" />
+    <Compile Include="Support\TestByteBuffer.cs" />
+    <Compile Include="Support\TestLurchTable.cs" />
+    <Compile Include="Support\TestLurchTableThreading.cs" />
+    <Compile Include="Support\TestDataInputStream.cs" />
+    <Compile Include="Support\TestDataOutputStream.cs" />
+    <Compile Include="Support\TestSafeTextWriterWrapper.cs" />
+    <Compile Include="Support\TestStringBuilderExtensions.cs" />
+    <Compile Include="Support\TestTreeDictionary.cs" />
+    <Compile Include="Support\TestTreeSet.cs" />
+    <Compile Include="TestApiConsistency.cs" />
+    <Compile Include="TestAssertions.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\TestDemo.cs" />
-    <Compile Include="core\TestExternalCodecs.cs" />
-    <Compile Include="core\TestMergeSchedulerExternal.cs">
+    <Compile Include="TestDemo.cs" />
+    <Compile Include="TestExternalCodecs.cs" />
+    <Compile Include="TestMergeSchedulerExternal.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\TestSearch.cs" />
-    <Compile Include="core\TestSearchForDuplicates.cs" />
-    <Compile Include="core\TestWorstCaseTestBehavior.cs" />
-    <Compile Include="core\Util\Automaton\TestBasicOperations.cs">
+    <Compile Include="TestSearch.cs" />
+    <Compile Include="TestSearchForDuplicates.cs" />
+    <Compile Include="TestWorstCaseTestBehavior.cs" />
+    <Compile Include="Util\Automaton\TestBasicOperations.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\Automaton\TestCompiledAutomaton.cs">
+    <Compile Include="Util\Automaton\TestCompiledAutomaton.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\Automaton\TestDeterminism.cs">
+    <Compile Include="Util\Automaton\TestDeterminism.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\Automaton\TestDeterminizeLexicon.cs">
+    <Compile Include="Util\Automaton\TestDeterminizeLexicon.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\Automaton\TestLevenshteinAutomata.cs">
+    <Compile Include="Util\Automaton\TestLevenshteinAutomata.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\Automaton\TestMinimize.cs">
+    <Compile Include="Util\Automaton\TestMinimize.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\Automaton\TestSpecialOperations.cs">
+    <Compile Include="Util\Automaton\TestSpecialOperations.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\Automaton\TestUTF32ToUTF8.cs">
+    <Compile Include="Util\Automaton\TestUTF32ToUTF8.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\BaseSortTestCase.cs">
+    <Compile Include="Util\BaseSortTestCase.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\Fst\Test2BFST.cs">
+    <Compile Include="Util\Fst\Test2BFST.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\Fst\TestBytesStore.cs" />
-    <Compile Include="core\Util\Fst\TestFSTs.cs" />
-    <Compile Include="core\Util\Packed\TestEliasFanoDocIdSet.cs">
+    <Compile Include="Util\Fst\TestBytesStore.cs" />
+    <Compile Include="Util\Fst\TestFSTs.cs" />
+    <Compile Include="Util\Packed\TestEliasFanoDocIdSet.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\Packed\TestEliasFanoSequence.cs">
+    <Compile Include="Util\Packed\TestEliasFanoSequence.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\Packed\TestPackedInts.cs" />
-    <Compile Include="core\Util\StressRamUsageEstimator.cs" />
-    <Compile Include="core\Util\Test2BPagedBytes.cs">
+    <Compile Include="Util\Packed\TestPackedInts.cs" />
+    <Compile Include="Util\StressRamUsageEstimator.cs" />
+    <Compile Include="Util\Test2BPagedBytes.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestArrayUtil.cs">
+    <Compile Include="Util\TestArrayUtil.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestAttributeSource.cs">
+    <Compile Include="Util\TestAttributeSource.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestBroadWord.cs">
+    <Compile Include="Util\TestBroadWord.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestByteBlockPool.cs">
+    <Compile Include="Util\TestByteBlockPool.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestBytesRef.cs">
+    <Compile Include="Util\TestBytesRef.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestBytesRefArray.cs">
+    <Compile Include="Util\TestBytesRefArray.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestBytesRefHash.cs">
+    <Compile Include="Util\TestBytesRefHash.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestCharsRef.cs">
+    <Compile Include="Util\TestCharsRef.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestCloseableThreadLocal.cs">
+    <Compile Include="Util\TestCloseableThreadLocal.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestCollectionUtil.cs">
+    <Compile Include="Util\TestCollectionUtil.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestConstants.cs" />
-    <Compile Include="core\Util\TestDocIdBitSet.cs">
+    <Compile Include="Util\TestConstants.cs" />
+    <Compile Include="Util\TestDocIdBitSet.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestDoubleBarrelLRUCache.cs">
+    <Compile Include="Util\TestDoubleBarrelLRUCache.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestFieldCacheSanityChecker.cs">
+    <Compile Include="Util\TestFieldCacheSanityChecker.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestFilterIterator.cs">
+    <Compile Include="Util\TestFilterIterator.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestFixedBitSet.cs">
+    <Compile Include="Util\TestFixedBitSet.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestIdentityHashSet.cs" />
-    <Compile Include="core\Util\TestIndexableBinaryStringTools.cs">
+    <Compile Include="Util\TestIdentityHashSet.cs" />
+    <Compile Include="Util\TestIndexableBinaryStringTools.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestInPlaceMergeSorter.cs">
+    <Compile Include="Util\TestInPlaceMergeSorter.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestIntroSorter.cs">
+    <Compile Include="Util\TestIntroSorter.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestIntsRef.cs">
+    <Compile Include="Util\TestIntsRef.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestIOUtils.cs">
+    <Compile Include="Util\TestIOUtils.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestLongBitSet.cs">
+    <Compile Include="Util\TestLongBitSet.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestMathUtil.cs" />
-    <Compile Include="core\Util\TestMergedIterator.cs">
+    <Compile Include="Util\TestMathUtil.cs" />
+    <Compile Include="Util\TestMergedIterator.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestNamedSPILoader.cs">
+    <Compile Include="Util\TestNamedSPILoader.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestNumericUtils.cs">
+    <Compile Include="Util\TestNumericUtils.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestOfflineSorter.cs" />
-    <Compile Include="core\Util\TestOpenBitSet.cs">
+    <Compile Include="Util\TestOfflineSorter.cs" />
+    <Compile Include="Util\TestOpenBitSet.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestPagedBytes.cs">
+    <Compile Include="Util\TestPagedBytes.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestPForDeltaDocIdSet.cs">
+    <Compile Include="Util\TestPForDeltaDocIdSet.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestPriorityQueue.cs">
+    <Compile Include="Util\TestPriorityQueue.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestQueryBuilder.cs">
+    <Compile Include="Util\TestQueryBuilder.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestRamUsageEstimator.cs" />
-    <Compile Include="core\Util\TestRamUsageEstimatorOnWildAnimals.cs">
+    <Compile Include="Util\TestRamUsageEstimator.cs" />
+    <Compile Include="Util\TestRamUsageEstimatorOnWildAnimals.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestRecyclingByteBlockAllocator.cs">
+    <Compile Include="Util\TestRecyclingByteBlockAllocator.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestRecyclingIntBlockAllocator.cs">
+    <Compile Include="Util\TestRecyclingIntBlockAllocator.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestRollingBuffer.cs">
+    <Compile Include="Util\TestRollingBuffer.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestSentinelIntSet.cs">
+    <Compile Include="Util\TestSentinelIntSet.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestSetOnce.cs">
+    <Compile Include="Util\TestSetOnce.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestSloppyMath.cs">
+    <Compile Include="Util\TestSloppyMath.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestSmallFloat.cs">
+    <Compile Include="Util\TestSmallFloat.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestStringHelper.cs">
+    <Compile Include="Util\TestStringHelper.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestTimSorter.cs">
+    <Compile Include="Util\TestTimSorter.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Support\TestToStringUtils.cs" />
-    <Compile Include="core\Util\TestUnicodeUtil.cs">
+    <Compile Include="Support\TestToStringUtils.cs" />
+    <Compile Include="Util\TestUnicodeUtil.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestVersion.cs">
+    <Compile Include="Util\TestVersion.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestVersionComparator.cs">
+    <Compile Include="Util\TestVersionComparator.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestVirtualMethod.cs" />
-    <Compile Include="core\Util\TestWAH8DocIdSet.cs">
+    <Compile Include="Util\TestVirtualMethod.cs" />
+    <Compile Include="Util\TestWAH8DocIdSet.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestWeakIdentityMap.cs" />
+    <Compile Include="Util\TestWeakIdentityMap.cs" />
   </ItemGroup>
   <ItemGroup>
-    <Folder Include="core\Util\Cache\" />
-    <Folder Include="core\Util\Junitcompat\" />
+    <Folder Include="Util\Cache\" />
+    <Folder Include="Util\JunitCompat\" />
   </ItemGroup>
   <ItemGroup>
     <Service Include="{82A7F48D-3B50-4B1E-B82E-3ADA8210C358}" />
   </ItemGroup>
   <ItemGroup>
-    <EmbeddedResource Include="core\Support\ReadFully.txt" />
+    <EmbeddedResource Include="Support\ReadFully.txt" />
   </ItemGroup>
   <Import Project="$(MSBuildToolsPath)\Microsoft.CSHARP.Targets" />
   <ProjectExtensions>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Lucene.Net.snk
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Lucene.Net.snk b/src/Lucene.Net.Tests/Lucene.Net.snk
new file mode 100644
index 0000000..f7f9ee5
Binary files /dev/null and b/src/Lucene.Net.Tests/Lucene.Net.snk differ

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/RectangularArrays.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/RectangularArrays.cs b/src/Lucene.Net.Tests/RectangularArrays.cs
new file mode 100644
index 0000000..6375f45
--- /dev/null
+++ b/src/Lucene.Net.Tests/RectangularArrays.cs
@@ -0,0 +1,52 @@
+//----------------------------------------------------------------------------------------
+//	Copyright \ufffd 2007 - 2013 Tangible Software Solutions Inc.
+//	this class can be used by anyone provided that the copyright notice remains intact.
+//
+//	this class provides the logic to simulate Java rectangular arrays, which are jagged
+//	arrays with inner arrays of the same length. A size of -1 indicates unknown length.
+//----------------------------------------------------------------------------------------
+
+using Lucene.Net.Util;
+
+internal static partial class RectangularArrays
+{
+    internal static int[][] ReturnRectangularIntArray(int Size1, int Size2)
+    {
+        int[][] Array;
+        if (Size1 > -1)
+        {
+            Array = new int[Size1][];
+            if (Size2 > -1)
+            {
+                for (int Array1 = 0; Array1 < Size1; Array1++)
+                {
+                    Array[Array1] = new int[Size2];
+                }
+            }
+        }
+        else
+            Array = null;
+
+        return Array;
+    }
+
+    internal static BytesRef[][] ReturnRectangularBytesRefArray(int Size1, int Size2)
+    {
+        BytesRef[][] Array;
+        if (Size1 > -1)
+        {
+            Array = new BytesRef[Size1][];
+            if (Size2 > -1)
+            {
+                for (int Array1 = 0; Array1 < Size1; Array1++)
+                {
+                    Array[Array1] = new BytesRef[Size2];
+                }
+            }
+        }
+        else
+            Array = null;
+
+        return Array;
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/BaseTestRangeFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/BaseTestRangeFilter.cs b/src/Lucene.Net.Tests/Search/BaseTestRangeFilter.cs
new file mode 100644
index 0000000..3bd412e
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/BaseTestRangeFilter.cs
@@ -0,0 +1,211 @@
+using System;
+using System.Text;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using NUnit.Framework;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using OpenMode = Lucene.Net.Index.OpenMode;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    [TestFixture]
+    public class BaseTestRangeFilter : LuceneTestCase
+    {
+        public const bool F = false;
+        public const bool T = true;
+
+        /// <summary>
+        /// Collation interacts badly with hyphens -- collation produces different
+        /// ordering than Unicode code-point ordering -- so two indexes are created:
+        /// one which can't have negative random integers, for testing collated ranges,
+        /// and the other which can have negative random integers, for all other tests.
+        /// </summary>
+        internal class TestIndex
+        {
+            internal int MaxR;
+            internal int MinR;
+            internal bool AllowNegativeRandomInts;
+            internal Directory Index;
+
+            internal TestIndex(Random random, int minR, int maxR, bool allowNegativeRandomInts)
+            {
+                this.MinR = minR;
+                this.MaxR = maxR;
+                this.AllowNegativeRandomInts = allowNegativeRandomInts;
+                Index = NewDirectory(random);
+            }
+        }
+
+        internal static IndexReader SignedIndexReader;
+        internal static IndexReader UnsignedIndexReader;
+
+        internal static TestIndex SignedIndexDir;
+        internal static TestIndex UnsignedIndexDir;
+
+        internal static int MinId = 0;
+        internal static int MaxId;
+
+        internal static readonly int IntLength = Convert.ToString(int.MaxValue).Length;
+
+        /// <summary>
+        /// a simple padding function that should work with any int
+        /// </summary>
+        public static string Pad(int n)
+        {
+            StringBuilder b = new StringBuilder(40);
+            string p = "0";
+            if (n < 0)
+            {
+                p = "-";
+                n = int.MaxValue + n + 1;
+            }
+            b.Append(p);
+            string s = Convert.ToString(n);
+            for (int i = s.Length; i <= IntLength; i++)
+            {
+                b.Append("0");
+            }
+            b.Append(s);
+
+            return b.ToString();
+        }
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Is non-static because <see cref="Build(Random, TestIndex)"/> is no
+        /// longer static.
+        /// </summary>
+        [OneTimeSetUp]
+        public void BeforeClassBaseTestRangeFilter()
+        {
+            MaxId = AtLeast(500);
+            SignedIndexDir = new TestIndex(Random(), int.MaxValue, int.MinValue, true);
+            UnsignedIndexDir = new TestIndex(Random(), int.MaxValue, 0, false);
+            SignedIndexReader = Build(Random(), SignedIndexDir);
+            UnsignedIndexReader = Build(Random(), UnsignedIndexDir);
+        }
+
+        [OneTimeTearDown]
+        public static void AfterClassBaseTestRangeFilter()
+        {
+            SignedIndexReader.Dispose();
+            UnsignedIndexReader.Dispose();
+            SignedIndexDir.Index.Dispose();
+            UnsignedIndexDir.Index.Dispose();
+            SignedIndexReader = null;
+            UnsignedIndexReader = null;
+            SignedIndexDir = null;
+            UnsignedIndexDir = null;
+        }
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Passed in because NewStringField and NewIndexWriterConfig are no
+        /// longer static.
+        /// </summary>
+        private IndexReader Build(Random random, TestIndex index)
+        {
+            /* build an index */
+
+            Document doc = new Document();
+            Field idField = NewStringField(random, "id", "", Field.Store.YES);
+            Field randField = NewStringField(random, "rand", "", Field.Store.YES);
+            Field bodyField = NewStringField(random, "body", "", Field.Store.NO);
+            doc.Add(idField);
+            doc.Add(randField);
+            doc.Add(bodyField);
+
+            RandomIndexWriter writer = new RandomIndexWriter(random, index.Index, NewIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer(random)).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(TestUtil.NextInt(random, 50, 1000)).SetMergePolicy(NewLogMergePolicy()));
+            TestUtil.ReduceOpenFiles(writer.w);
+
+            while (true)
+            {
+                int minCount = 0;
+                int maxCount = 0;
+
+                for (int d = MinId; d <= MaxId; d++)
+                {
+                    idField.SetStringValue(Pad(d));
+                    int r = index.AllowNegativeRandomInts ? random.Next() : random.Next(int.MaxValue);
+                    if (index.MaxR < r)
+                    {
+                        index.MaxR = r;
+                        maxCount = 1;
+                    }
+                    else if (index.MaxR == r)
+                    {
+                        maxCount++;
+                    }
+
+                    if (r < index.MinR)
+                    {
+                        index.MinR = r;
+                        minCount = 1;
+                    }
+                    else if (r == index.MinR)
+                    {
+                        minCount++;
+                    }
+                    randField.SetStringValue(Pad(r));
+                    bodyField.SetStringValue("body");
+                    writer.AddDocument(doc);
+                }
+
+                if (minCount == 1 && maxCount == 1)
+                {
+                    // our subclasses rely on only 1 doc having the min or
+                    // max, so, we loop until we satisfy that.  it should be
+                    // exceedingly rare (Yonik calculates 1 in ~429,000)
+                    // times) that this loop requires more than one try:
+                    IndexReader ir = writer.Reader;
+                    writer.Dispose();
+                    return ir;
+                }
+
+                // try again
+                writer.DeleteAll();
+            }
+        }
+
+        // [Test] // LUCENENET NOTE: For now, we are overriding this test in every subclass to pull it into the right context for the subclass
+        public virtual void TestPad()
+        {
+            int[] tests = new int[] { -9999999, -99560, -100, -3, -1, 0, 3, 9, 10, 1000, 999999999 };
+            for (int i = 0; i < tests.Length - 1; i++)
+            {
+                int a = tests[i];
+                int b = tests[i + 1];
+                string aa = Pad(a);
+                string bb = Pad(b);
+                string label = a + ":" + aa + " vs " + b + ":" + bb;
+                Assert.AreEqual(aa.Length, bb.Length, "i=" + i + ": length of " + label);
+                Assert.IsTrue(System.String.Compare(aa, bb, System.StringComparison.Ordinal) < 0, "i=" + i + ": compare less than " + label);
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/FuzzyTermOnShortTermsTest.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/FuzzyTermOnShortTermsTest.cs b/src/Lucene.Net.Tests/Search/FuzzyTermOnShortTermsTest.cs
new file mode 100644
index 0000000..5638f8a
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/FuzzyTermOnShortTermsTest.cs
@@ -0,0 +1,118 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using NUnit.Framework;
+    using System.IO;
+    using Analyzer = Lucene.Net.Analysis.Analyzer;
+    using Directory = Lucene.Net.Store.Directory;
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockTokenizer = Lucene.Net.Analysis.MockTokenizer;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Term = Lucene.Net.Index.Term;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using Tokenizer = Lucene.Net.Analysis.Tokenizer;
+
+    [TestFixture]
+    public class FuzzyTermOnShortTermsTest : LuceneTestCase
+    {
+        private const string FIELD = "field";
+
+        [Test]
+        public virtual void Test()
+        {
+            // proves rule that edit distance between the two terms
+            // must be > smaller term for there to be a match
+            Analyzer a = Analyzer;
+            //these work
+            CountHits(a, new string[] { "abc" }, new FuzzyQuery(new Term(FIELD, "ab"), 1), 1);
+            CountHits(a, new string[] { "ab" }, new FuzzyQuery(new Term(FIELD, "abc"), 1), 1);
+
+            CountHits(a, new string[] { "abcde" }, new FuzzyQuery(new Term(FIELD, "abc"), 2), 1);
+            CountHits(a, new string[] { "abc" }, new FuzzyQuery(new Term(FIELD, "abcde"), 2), 1);
+
+            //these don't
+            CountHits(a, new string[] { "ab" }, new FuzzyQuery(new Term(FIELD, "a"), 1), 0);
+            CountHits(a, new string[] { "a" }, new FuzzyQuery(new Term(FIELD, "ab"), 1), 0);
+
+            CountHits(a, new string[] { "abc" }, new FuzzyQuery(new Term(FIELD, "a"), 2), 0);
+            CountHits(a, new string[] { "a" }, new FuzzyQuery(new Term(FIELD, "abc"), 2), 0);
+
+            CountHits(a, new string[] { "abcd" }, new FuzzyQuery(new Term(FIELD, "ab"), 2), 0);
+            CountHits(a, new string[] { "ab" }, new FuzzyQuery(new Term(FIELD, "abcd"), 2), 0);
+        }
+
+        private void CountHits(Analyzer analyzer, string[] docs, Query q, int expected)
+        {
+            Directory d = GetDirectory(analyzer, docs);
+            IndexReader r = DirectoryReader.Open(d);
+            IndexSearcher s = new IndexSearcher(r);
+            TotalHitCountCollector c = new TotalHitCountCollector();
+            s.Search(q, c);
+            Assert.AreEqual(expected, c.TotalHits, q.ToString());
+            r.Dispose();
+            d.Dispose();
+        }
+
+        public static Analyzer Analyzer
+        {
+            get
+            {
+                return new AnalyzerAnonymousInnerClassHelper();
+            }
+        }
+
+        private class AnalyzerAnonymousInnerClassHelper : Analyzer
+        {
+            public AnalyzerAnonymousInnerClassHelper()
+            {
+            }
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
+                return new TokenStreamComponents(tokenizer, tokenizer);
+            }
+        }
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Is non-static because NewIndexWriterConfig is no longer static.
+        /// </summary>
+        public Directory GetDirectory(Analyzer analyzer, string[] vals)
+        {
+            Directory directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetMaxBufferedDocs(TestUtil.NextInt(Random(), 100, 1000)).SetMergePolicy(NewLogMergePolicy()));
+
+            foreach (string s in vals)
+            {
+                Document d = new Document();
+                d.Add(NewTextField(FIELD, s, Field.Store.YES));
+                writer.AddDocument(d);
+            }
+            writer.Dispose();
+            return directory;
+        }
+    }
+}
\ No newline at end of file


[30/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestIndexWriterDelete.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterDelete.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterDelete.cs
new file mode 100644
index 0000000..6ac040a
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestIndexWriterDelete.cs
@@ -0,0 +1,1447 @@
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.Text;
+using System.Threading;
+using Lucene.Net.Attributes;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Randomized.Generators;
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using System.IO;
+    using Util;
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Analyzer = Lucene.Net.Analysis.Analyzer;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldType = FieldType;
+    using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+    using IOUtils = Lucene.Net.Util.IOUtils;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+    using MockTokenizer = Lucene.Net.Analysis.MockTokenizer;
+    using NumericDocValuesField = NumericDocValuesField;
+    using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+    using ScoreDoc = Lucene.Net.Search.ScoreDoc;
+    using StringField = StringField;
+    using TermQuery = Lucene.Net.Search.TermQuery;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    [TestFixture]
+    public class TestIndexWriterDelete : LuceneTestCase
+    {
+        // test the simple case
+        [Test]
+        public virtual void TestSimpleCase()
+        {
+            string[] keywords = new string[] { "1", "2" };
+            string[] unindexed = new string[] { "Netherlands", "Italy" };
+            string[] unstored = new string[] { "Amsterdam has lots of bridges", "Venice has lots of canals" };
+            string[] text = new string[] { "Amsterdam", "Venice" };
+
+            Directory dir = NewDirectory();
+            IndexWriter modifier = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false)).SetMaxBufferedDeleteTerms(1));
+
+            FieldType custom1 = new FieldType();
+            custom1.IsStored = true;
+            for (int i = 0; i < keywords.Length; i++)
+            {
+                Document doc = new Document();
+                doc.Add(NewStringField("id", keywords[i], Field.Store.YES));
+                doc.Add(NewField("country", unindexed[i], custom1));
+                doc.Add(NewTextField("contents", unstored[i], Field.Store.NO));
+                doc.Add(NewTextField("city", text[i], Field.Store.YES));
+                modifier.AddDocument(doc);
+            }
+            modifier.ForceMerge(1);
+            modifier.Commit();
+
+            Term term = new Term("city", "Amsterdam");
+            int hitCount = GetHitCount(dir, term);
+            Assert.AreEqual(1, hitCount);
+            if (VERBOSE)
+            {
+                Console.WriteLine("\nTEST: now delete by term=" + term);
+            }
+            modifier.DeleteDocuments(term);
+            modifier.Commit();
+
+            if (VERBOSE)
+            {
+                Console.WriteLine("\nTEST: now getHitCount");
+            }
+            hitCount = GetHitCount(dir, term);
+            Assert.AreEqual(0, hitCount);
+
+            modifier.Dispose();
+            dir.Dispose();
+        }
+
+        // test when delete terms only apply to disk segments
+        [Test]
+        public virtual void TestNonRAMDelete()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter modifier = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false)).SetMaxBufferedDocs(2).SetMaxBufferedDeleteTerms(2));
+            int id = 0;
+            int value = 100;
+
+            for (int i = 0; i < 7; i++)
+            {
+                AddDoc(modifier, ++id, value);
+            }
+            modifier.Commit();
+
+            Assert.AreEqual(0, modifier.NumBufferedDocuments);
+            Assert.IsTrue(0 < modifier.SegmentCount);
+
+            modifier.Commit();
+
+            IndexReader reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(7, reader.NumDocs);
+            reader.Dispose();
+
+            modifier.DeleteDocuments(new Term("value", Convert.ToString(value)));
+
+            modifier.Commit();
+
+            reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(0, reader.NumDocs);
+            reader.Dispose();
+            modifier.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestMaxBufferedDeletes()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false)).SetMaxBufferedDeleteTerms(1));
+
+            writer.AddDocument(new Document());
+            writer.DeleteDocuments(new Term("foobar", "1"));
+            writer.DeleteDocuments(new Term("foobar", "1"));
+            writer.DeleteDocuments(new Term("foobar", "1"));
+            Assert.AreEqual(3, writer.FlushDeletesCount);
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        // test when delete terms only apply to ram segments
+        [Test]
+        public virtual void TestRAMDeletes()
+        {
+            for (int t = 0; t < 2; t++)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: t=" + t);
+                }
+                Directory dir = NewDirectory();
+                IndexWriter modifier = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false)).SetMaxBufferedDocs(4).SetMaxBufferedDeleteTerms(4));
+                int id = 0;
+                int value = 100;
+
+                AddDoc(modifier, ++id, value);
+                if (0 == t)
+                {
+                    modifier.DeleteDocuments(new Term("value", Convert.ToString(value)));
+                }
+                else
+                {
+                    modifier.DeleteDocuments(new TermQuery(new Term("value", Convert.ToString(value))));
+                }
+                AddDoc(modifier, ++id, value);
+                if (0 == t)
+                {
+                    modifier.DeleteDocuments(new Term("value", Convert.ToString(value)));
+                    Assert.AreEqual(2, modifier.NumBufferedDeleteTerms);
+                    Assert.AreEqual(1, modifier.BufferedDeleteTermsSize);
+                }
+                else
+                {
+                    modifier.DeleteDocuments(new TermQuery(new Term("value", Convert.ToString(value))));
+                }
+
+                AddDoc(modifier, ++id, value);
+                Assert.AreEqual(0, modifier.SegmentCount);
+                modifier.Commit();
+
+                IndexReader reader = DirectoryReader.Open(dir);
+                Assert.AreEqual(1, reader.NumDocs);
+
+                int hitCount = GetHitCount(dir, new Term("id", Convert.ToString(id)));
+                Assert.AreEqual(1, hitCount);
+                reader.Dispose();
+                modifier.Dispose();
+                dir.Dispose();
+            }
+        }
+
+        // test when delete terms apply to both disk and ram segments
+        [Test]
+        public virtual void TestBothDeletes()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter modifier = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false)).SetMaxBufferedDocs(100).SetMaxBufferedDeleteTerms(100));
+
+            int id = 0;
+            int value = 100;
+
+            for (int i = 0; i < 5; i++)
+            {
+                AddDoc(modifier, ++id, value);
+            }
+
+            value = 200;
+            for (int i = 0; i < 5; i++)
+            {
+                AddDoc(modifier, ++id, value);
+            }
+            modifier.Commit();
+
+            for (int i = 0; i < 5; i++)
+            {
+                AddDoc(modifier, ++id, value);
+            }
+            modifier.DeleteDocuments(new Term("value", Convert.ToString(value)));
+
+            modifier.Commit();
+
+            IndexReader reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(5, reader.NumDocs);
+            modifier.Dispose();
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        // test that batched delete terms are flushed together
+        [Test]
+        public virtual void TestBatchDeletes()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter modifier = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false)).SetMaxBufferedDocs(2).SetMaxBufferedDeleteTerms(2));
+
+            int id = 0;
+            int value = 100;
+
+            for (int i = 0; i < 7; i++)
+            {
+                AddDoc(modifier, ++id, value);
+            }
+            modifier.Commit();
+
+            IndexReader reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(7, reader.NumDocs);
+            reader.Dispose();
+
+            id = 0;
+            modifier.DeleteDocuments(new Term("id", Convert.ToString(++id)));
+            modifier.DeleteDocuments(new Term("id", Convert.ToString(++id)));
+
+            modifier.Commit();
+
+            reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(5, reader.NumDocs);
+            reader.Dispose();
+
+            Term[] terms = new Term[3];
+            for (int i = 0; i < terms.Length; i++)
+            {
+                terms[i] = new Term("id", Convert.ToString(++id));
+            }
+            modifier.DeleteDocuments(terms);
+            modifier.Commit();
+            reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(2, reader.NumDocs);
+            reader.Dispose();
+
+            modifier.Dispose();
+            dir.Dispose();
+        }
+
+        // test deleteAll()
+        [Test]
+        public virtual void TestDeleteAll()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter modifier = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false)).SetMaxBufferedDocs(2).SetMaxBufferedDeleteTerms(2));
+
+            int id = 0;
+            int value = 100;
+
+            for (int i = 0; i < 7; i++)
+            {
+                AddDoc(modifier, ++id, value);
+            }
+            modifier.Commit();
+
+            IndexReader reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(7, reader.NumDocs);
+            reader.Dispose();
+
+            // Add 1 doc (so we will have something buffered)
+            AddDoc(modifier, 99, value);
+
+            // Delete all
+            modifier.DeleteAll();
+
+            // Delete all shouldn't be on disk yet
+            reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(7, reader.NumDocs);
+            reader.Dispose();
+
+            // Add a doc and update a doc (after the deleteAll, before the commit)
+            AddDoc(modifier, 101, value);
+            UpdateDoc(modifier, 102, value);
+
+            // commit the delete all
+            modifier.Commit();
+
+            // Validate there are no docs left
+            reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(2, reader.NumDocs);
+            reader.Dispose();
+
+            modifier.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestDeleteAllNoDeadLock()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter modifier = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            int numThreads = AtLeast(2);
+            ThreadClass[] threads = new ThreadClass[numThreads];
+            CountdownEvent latch = new CountdownEvent(1);
+            CountdownEvent doneLatch = new CountdownEvent(numThreads);
+            for (int i = 0; i < numThreads; i++)
+            {
+                int offset = i;
+                threads[i] = new ThreadAnonymousInnerClassHelper(this, modifier, latch, doneLatch, offset);
+                threads[i].Start();
+            }
+            latch.Signal();
+            //Wait for 1 millisecond
+            while (!doneLatch.Wait(new TimeSpan(0, 0, 0, 0, 1)))
+            {
+                modifier.DeleteAll();
+                if (VERBOSE)
+                {
+                    Console.WriteLine("del all");
+                }
+            }
+
+            modifier.DeleteAll();
+            foreach (ThreadClass thread in threads)
+            {
+                thread.Join();
+            }
+
+            modifier.Dispose();
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(reader.MaxDoc, 0);
+            Assert.AreEqual(reader.NumDocs, 0);
+            Assert.AreEqual(reader.NumDeletedDocs, 0);
+            reader.Dispose();
+
+            dir.Dispose();
+        }
+
+        private class ThreadAnonymousInnerClassHelper : ThreadClass
+        {
+            private readonly TestIndexWriterDelete OuterInstance;
+
+            private RandomIndexWriter Modifier;
+            private CountdownEvent Latch;
+            private CountdownEvent DoneLatch;
+            private int Offset;
+
+            public ThreadAnonymousInnerClassHelper(TestIndexWriterDelete outerInstance, RandomIndexWriter modifier, CountdownEvent latch, CountdownEvent doneLatch, int offset)
+            {
+                this.OuterInstance = outerInstance;
+                this.Modifier = modifier;
+                this.Latch = latch;
+                this.DoneLatch = doneLatch;
+                this.Offset = offset;
+            }
+
+            public override void Run()
+            {
+                int id = Offset * 1000;
+                int value = 100;
+                try
+                {
+                    Latch.Wait();
+                    for (int j = 0; j < 1000; j++)
+                    {
+                        Document doc = new Document();
+                        doc.Add(OuterInstance.NewTextField("content", "aaa", Field.Store.NO));
+                        doc.Add(OuterInstance.NewStringField("id", Convert.ToString(id++), Field.Store.YES));
+                        doc.Add(OuterInstance.NewStringField("value", Convert.ToString(value), Field.Store.NO));
+                        if (DefaultCodecSupportsDocValues())
+                        {
+                            doc.Add(new NumericDocValuesField("dv", value));
+                        }
+                        Modifier.AddDocument(doc);
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("\tThread[" + Offset + "]: add doc: " + id);
+                        }
+                    }
+                }
+                catch (Exception e)
+                {
+                    throw new Exception(e.Message, e);
+                }
+                finally
+                {
+                    DoneLatch.Signal();
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("\tThread[" + Offset + "]: done indexing");
+                    }
+                }
+            }
+        }
+
+        // test rollback of deleteAll()
+        [Test]
+        public virtual void TestDeleteAllRollback()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter modifier = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false)).SetMaxBufferedDocs(2).SetMaxBufferedDeleteTerms(2));
+
+            int id = 0;
+            int value = 100;
+
+            for (int i = 0; i < 7; i++)
+            {
+                AddDoc(modifier, ++id, value);
+            }
+            modifier.Commit();
+
+            AddDoc(modifier, ++id, value);
+
+            IndexReader reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(7, reader.NumDocs);
+            reader.Dispose();
+
+            // Delete all
+            modifier.DeleteAll();
+
+            // Roll it back
+            modifier.Rollback();
+            modifier.Dispose();
+
+            // Validate that the docs are still there
+            reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(7, reader.NumDocs);
+            reader.Dispose();
+
+            dir.Dispose();
+        }
+
+        // test deleteAll() w/ near real-time reader
+        [Test]
+        public virtual void TestDeleteAllNRT()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter modifier = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false)).SetMaxBufferedDocs(2).SetMaxBufferedDeleteTerms(2));
+
+            int id = 0;
+            int value = 100;
+
+            for (int i = 0; i < 7; i++)
+            {
+                AddDoc(modifier, ++id, value);
+            }
+            modifier.Commit();
+
+            IndexReader reader = modifier.Reader;
+            Assert.AreEqual(7, reader.NumDocs);
+            reader.Dispose();
+
+            AddDoc(modifier, ++id, value);
+            AddDoc(modifier, ++id, value);
+
+            // Delete all
+            modifier.DeleteAll();
+
+            reader = modifier.Reader;
+            Assert.AreEqual(0, reader.NumDocs);
+            reader.Dispose();
+
+            // Roll it back
+            modifier.Rollback();
+            modifier.Dispose();
+
+            // Validate that the docs are still there
+            reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(7, reader.NumDocs);
+            reader.Dispose();
+
+            dir.Dispose();
+        }
+
+        private void UpdateDoc(IndexWriter modifier, int id, int value)
+        {
+            Document doc = new Document();
+            doc.Add(NewTextField("content", "aaa", Field.Store.NO));
+            doc.Add(NewStringField("id", Convert.ToString(id), Field.Store.YES));
+            doc.Add(NewStringField("value", Convert.ToString(value), Field.Store.NO));
+            if (DefaultCodecSupportsDocValues())
+            {
+                doc.Add(new NumericDocValuesField("dv", value));
+            }
+            modifier.UpdateDocument(new Term("id", Convert.ToString(id)), doc);
+        }
+
+        private void AddDoc(IndexWriter modifier, int id, int value)
+        {
+            Document doc = new Document();
+            doc.Add(NewTextField("content", "aaa", Field.Store.NO));
+            doc.Add(NewStringField("id", Convert.ToString(id), Field.Store.YES));
+            doc.Add(NewStringField("value", Convert.ToString(value), Field.Store.NO));
+            if (DefaultCodecSupportsDocValues())
+            {
+                doc.Add(new NumericDocValuesField("dv", value));
+            }
+            modifier.AddDocument(doc);
+        }
+
+        private int GetHitCount(Directory dir, Term term)
+        {
+            IndexReader reader = DirectoryReader.Open(dir);
+            IndexSearcher searcher = NewSearcher(reader);
+            int hitCount = searcher.Search(new TermQuery(term), null, 1000).TotalHits;
+            reader.Dispose();
+            return hitCount;
+        }
+
+        [Test]
+        public virtual void TestDeletesOnDiskFull(
+            [ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            DoTestOperationsOnDiskFull(scheduler, false);
+        }
+
+        [Test]
+        public virtual void TestUpdatesOnDiskFull(
+            [ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            DoTestOperationsOnDiskFull(scheduler, true);
+        }
+
+        /// <summary>
+        /// Make sure if modifier tries to commit but hits disk full that modifier
+        /// remains consistent and usable. Similar to TestIndexReader.testDiskFull().
+        /// </summary>
+        private void DoTestOperationsOnDiskFull(IConcurrentMergeScheduler scheduler, bool updates)
+        {
+            Term searchTerm = new Term("content", "aaa");
+            int START_COUNT = 157;
+            int END_COUNT = 144;
+
+            // First build up a starting index:
+            MockDirectoryWrapper startDir = NewMockDirectory();
+            // TODO: find the resource leak that only occurs sometimes here.
+            startDir.NoDeleteOpenFile = false;
+            IndexWriter writer = new IndexWriter(startDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false)));
+            for (int i = 0; i < 157; i++)
+            {
+                Document d = new Document();
+                d.Add(NewStringField("id", Convert.ToString(i), Field.Store.YES));
+                d.Add(NewTextField("content", "aaa " + i, Field.Store.NO));
+                if (DefaultCodecSupportsDocValues())
+                {
+                    d.Add(new NumericDocValuesField("dv", i));
+                }
+                writer.AddDocument(d);
+            }
+            writer.Dispose();
+
+            long diskUsage = startDir.SizeInBytes();
+            long diskFree = diskUsage + 10;
+
+            IOException err = null;
+
+            bool done = false;
+
+            // Iterate w/ ever increasing free disk space:
+            while (!done)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: cycle");
+                }
+                MockDirectoryWrapper dir = new MockDirectoryWrapper(Random(), new RAMDirectory(startDir, NewIOContext(Random())));
+                dir.PreventDoubleWrite = false;
+                dir.AllowRandomFileNotFoundException = false;
+
+                var config = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false))
+                                .SetMaxBufferedDocs(1000)
+                                .SetMaxBufferedDeleteTerms(1000)
+                                .SetMergeScheduler(scheduler);
+
+                scheduler.SetSuppressExceptions();
+
+                IndexWriter modifier = new IndexWriter(dir, config);
+
+                // For each disk size, first try to commit against
+                // dir that will hit random IOExceptions & disk
+                // full; after, give it infinite disk space & turn
+                // off random IOExceptions & retry w/ same reader:
+                bool success = false;
+
+                for (int x = 0; x < 2; x++)
+                {
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("TEST: x=" + x);
+                    }
+
+                    double rate = 0.1;
+                    double diskRatio = ((double)diskFree) / diskUsage;
+                    long thisDiskFree;
+                    string testName;
+
+                    if (0 == x)
+                    {
+                        thisDiskFree = diskFree;
+                        if (diskRatio >= 2.0)
+                        {
+                            rate /= 2;
+                        }
+                        if (diskRatio >= 4.0)
+                        {
+                            rate /= 2;
+                        }
+                        if (diskRatio >= 6.0)
+                        {
+                            rate = 0.0;
+                        }
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("\ncycle: " + diskFree + " bytes");
+                        }
+                        testName = "disk full during reader.Dispose() @ " + thisDiskFree + " bytes";
+                        dir.RandomIOExceptionRateOnOpen = Random().NextDouble() * 0.01;
+                    }
+                    else
+                    {
+                        thisDiskFree = 0;
+                        rate = 0.0;
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("\ncycle: same writer: unlimited disk space");
+                        }
+                        testName = "reader re-use after disk full";
+                        dir.RandomIOExceptionRateOnOpen = 0.0;
+                    }
+
+                    dir.MaxSizeInBytes = thisDiskFree;
+                    dir.RandomIOExceptionRate = rate;
+
+                    try
+                    {
+                        if (0 == x)
+                        {
+                            int docId = 12;
+                            for (int i = 0; i < 13; i++)
+                            {
+                                if (updates)
+                                {
+                                    Document d = new Document();
+                                    d.Add(NewStringField("id", Convert.ToString(i), Field.Store.YES));
+                                    d.Add(NewTextField("content", "bbb " + i, Field.Store.NO));
+                                    if (DefaultCodecSupportsDocValues())
+                                    {
+                                        d.Add(new NumericDocValuesField("dv", i));
+                                    }
+                                    modifier.UpdateDocument(new Term("id", Convert.ToString(docId)), d);
+                                } // deletes
+                                else
+                                {
+                                    modifier.DeleteDocuments(new Term("id", Convert.ToString(docId)));
+                                    // modifier.setNorm(docId, "contents", (float)2.0);
+                                }
+                                docId += 12;
+                            }
+                        }
+                        modifier.Dispose();
+                        success = true;
+                        if (0 == x)
+                        {
+                            done = true;
+                        }
+                    }
+                    catch (IOException e)
+                    {
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("  hit IOException: " + e);
+                            Console.WriteLine(e.StackTrace);
+                        }
+                        err = e;
+                        if (1 == x)
+                        {
+                            Console.WriteLine(e.ToString());
+                            Console.Write(e.StackTrace);
+                            Assert.Fail(testName + " hit IOException after disk space was freed up");
+                        }
+                    }
+                    // prevent throwing a random exception here!!
+                    double randomIOExceptionRate = dir.RandomIOExceptionRate;
+                    long maxSizeInBytes = dir.MaxSizeInBytes;
+                    dir.RandomIOExceptionRate = 0.0;
+                    dir.RandomIOExceptionRateOnOpen = 0.0;
+                    dir.MaxSizeInBytes = 0;
+                    if (!success)
+                    {
+                        // Must force the close else the writer can have
+                        // open files which cause exc in MockRAMDir.close
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("TEST: now rollback");
+                        }
+                        modifier.Rollback();
+                    }
+
+                    // If the close() succeeded, make sure there are
+                    // no unreferenced files.
+                    if (success)
+                    {
+                        TestUtil.CheckIndex(dir);
+                        TestIndexWriter.AssertNoUnreferencedFiles(dir, "after writer.close");
+                    }
+                    dir.RandomIOExceptionRate = randomIOExceptionRate;
+                    dir.MaxSizeInBytes = maxSizeInBytes;
+
+                    // Finally, verify index is not corrupt, and, if
+                    // we succeeded, we see all docs changed, and if
+                    // we failed, we see either all docs or no docs
+                    // changed (transactional semantics):
+                    IndexReader newReader = null;
+                    try
+                    {
+                        newReader = DirectoryReader.Open(dir);
+                    }
+                    catch (IOException e)
+                    {
+                        Console.WriteLine(e.ToString());
+                        Console.Write(e.StackTrace);
+                        Assert.Fail(testName + ":exception when creating IndexReader after disk full during close: " + e);
+                    }
+
+                    IndexSearcher searcher = NewSearcher(newReader);
+                    ScoreDoc[] hits = null;
+                    try
+                    {
+                        hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
+                    }
+                    catch (IOException e)
+                    {
+                        Console.WriteLine(e.ToString());
+                        Console.Write(e.StackTrace);
+                        Assert.Fail(testName + ": exception when searching: " + e);
+                    }
+                    int result2 = hits.Length;
+                    if (success)
+                    {
+                        if (x == 0 && result2 != END_COUNT)
+                        {
+                            Assert.Fail(testName + ": method did not throw exception but hits.Length for search on term 'aaa' is " + result2 + " instead of expected " + END_COUNT);
+                        }
+                        else if (x == 1 && result2 != START_COUNT && result2 != END_COUNT)
+                        {
+                            // It's possible that the first exception was
+                            // "recoverable" wrt pending deletes, in which
+                            // case the pending deletes are retained and
+                            // then re-flushing (with plenty of disk
+                            // space) will succeed in flushing the
+                            // deletes:
+                            Assert.Fail(testName + ": method did not throw exception but hits.Length for search on term 'aaa' is " + result2 + " instead of expected " + START_COUNT + " or " + END_COUNT);
+                        }
+                    }
+                    else
+                    {
+                        // On hitting exception we still may have added
+                        // all docs:
+                        if (result2 != START_COUNT && result2 != END_COUNT)
+                        {
+                            Console.WriteLine(err.ToString());
+                            Console.Write(err.StackTrace);
+                            Assert.Fail(testName + ": method did throw exception but hits.Length for search on term 'aaa' is " + result2 + " instead of expected " + START_COUNT + " or " + END_COUNT);
+                        }
+                    }
+                    newReader.Dispose();
+                    if (result2 == END_COUNT)
+                    {
+                        break;
+                    }
+                }
+                dir.Dispose();
+                modifier.Dispose();
+
+                // Try again with 10 more bytes of free space:
+                diskFree += 10;
+            }
+            startDir.Dispose();
+        }
+
+        // this test tests that buffered deletes are cleared when
+        // an Exception is hit during flush.
+        [Test]
+        public virtual void TestErrorAfterApplyDeletes()
+        {
+            MockDirectoryWrapper.Failure failure = new FailureAnonymousInnerClassHelper(this);
+
+            // create a couple of files
+
+            string[] keywords = new string[] { "1", "2" };
+            string[] unindexed = new string[] { "Netherlands", "Italy" };
+            string[] unstored = new string[] { "Amsterdam has lots of bridges", "Venice has lots of canals" };
+            string[] text = new string[] { "Amsterdam", "Venice" };
+
+            MockDirectoryWrapper dir = NewMockDirectory();
+            IndexWriter modifier = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false)).SetMaxBufferedDeleteTerms(2).SetReaderPooling(false).SetMergePolicy(NewLogMergePolicy()));
+
+            MergePolicy lmp = modifier.Config.MergePolicy;
+            lmp.NoCFSRatio = 1.0;
+
+            dir.FailOn(failure.Reset());
+
+            FieldType custom1 = new FieldType();
+            custom1.IsStored = true;
+            for (int i = 0; i < keywords.Length; i++)
+            {
+                Document doc = new Document();
+                doc.Add(NewStringField("id", keywords[i], Field.Store.YES));
+                doc.Add(NewField("country", unindexed[i], custom1));
+                doc.Add(NewTextField("contents", unstored[i], Field.Store.NO));
+                doc.Add(NewTextField("city", text[i], Field.Store.YES));
+                modifier.AddDocument(doc);
+            }
+            // flush (and commit if ac)
+
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: now full merge");
+            }
+
+            modifier.ForceMerge(1);
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: now commit");
+            }
+            modifier.Commit();
+
+            // one of the two files hits
+
+            Term term = new Term("city", "Amsterdam");
+            int hitCount = GetHitCount(dir, term);
+            Assert.AreEqual(1, hitCount);
+
+            // open the writer again (closed above)
+
+            // delete the doc
+            // max buf del terms is two, so this is buffered
+
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: delete term=" + term);
+            }
+
+            modifier.DeleteDocuments(term);
+
+            // add a doc (needed for the !ac case; see below)
+            // doc remains buffered
+
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: add empty doc");
+            }
+            Document doc_ = new Document();
+            modifier.AddDocument(doc_);
+
+            // commit the changes, the buffered deletes, and the new doc
+
+            // The failure object will fail on the first write after the del
+            // file gets created when processing the buffered delete
+
+            // in the ac case, this will be when writing the new segments
+            // files so we really don't need the new doc, but it's harmless
+
+            // a new segments file won't be created but in this
+            // case, creation of the cfs file happens next so we
+            // need the doc (to test that it's okay that we don't
+            // lose deletes if failing while creating the cfs file)
+            bool failed = false;
+            try
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: now commit for failure");
+                }
+                modifier.Commit();
+            }
+#pragma warning disable 168
+            catch (IOException ioe)
+#pragma warning restore 168
+            {
+                // expected
+                failed = true;
+            }
+
+            Assert.IsTrue(failed);
+
+            // The commit above failed, so we need to retry it (which will
+            // succeed, because the failure is a one-shot)
+
+            modifier.Commit();
+
+            hitCount = GetHitCount(dir, term);
+
+            // Make sure the delete was successfully flushed:
+            Assert.AreEqual(0, hitCount);
+
+            modifier.Dispose();
+            dir.Dispose();
+        }
+
+        private class FailureAnonymousInnerClassHelper : MockDirectoryWrapper.Failure
+        {
+            private readonly TestIndexWriterDelete OuterInstance;
+
+            public FailureAnonymousInnerClassHelper(TestIndexWriterDelete outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+                sawMaybe = false;
+                failed = false;
+            }
+
+            internal bool sawMaybe;
+            internal bool failed;
+            internal Thread thread;
+
+            public override MockDirectoryWrapper.Failure Reset()
+            {
+                thread = Thread.CurrentThread;
+                sawMaybe = false;
+                failed = false;
+                return this;
+            }
+
+            public override void Eval(MockDirectoryWrapper dir)
+            {
+                if (Thread.CurrentThread != thread)
+                {
+                    // don't fail during merging
+                    return;
+                }
+                if (sawMaybe && !failed)
+                {
+                    bool seen = 
+                        StackTraceHelper.DoesStackTraceContainMethod("ApplyDeletesAndUpdates") ||
+                        StackTraceHelper.DoesStackTraceContainMethod("SlowFileExists");                 
+
+                    if (!seen)
+                    {
+                        // Only fail once we are no longer in applyDeletes
+                        failed = true;
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("TEST: mock failure: now fail");
+                            Console.WriteLine(Environment.StackTrace);
+                        }
+                        throw new IOException("fail after applyDeletes");
+                    }
+                }
+                if (!failed)
+                {
+                    if (StackTraceHelper.DoesStackTraceContainMethod("ApplyDeletesAndUpdates"))
+                    {
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("TEST: mock failure: saw applyDeletes");
+                            Console.WriteLine(Environment.StackTrace);
+                        }
+                        sawMaybe = true;
+                    }              
+                }
+            }
+        }
+
+        // this test tests that the files created by the docs writer before
+        // a segment is written are cleaned up if there's an i/o error
+        [Test]
+        public virtual void TestErrorInDocsWriterAdd()
+        {
+            MockDirectoryWrapper.Failure failure = new FailureAnonymousInnerClassHelper2(this);
+
+            // create a couple of files
+
+            string[] keywords = new string[] { "1", "2" };
+            string[] unindexed = new string[] { "Netherlands", "Italy" };
+            string[] unstored = new string[] { "Amsterdam has lots of bridges", "Venice has lots of canals" };
+            string[] text = new string[] { "Amsterdam", "Venice" };
+
+            MockDirectoryWrapper dir = NewMockDirectory();
+            IndexWriter modifier = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false)));
+            modifier.Commit();
+            dir.FailOn(failure.Reset());
+
+            FieldType custom1 = new FieldType();
+            custom1.IsStored = true;
+            for (int i = 0; i < keywords.Length; i++)
+            {
+                Document doc = new Document();
+                doc.Add(NewStringField("id", keywords[i], Field.Store.YES));
+                doc.Add(NewField("country", unindexed[i], custom1));
+                doc.Add(NewTextField("contents", unstored[i], Field.Store.NO));
+                doc.Add(NewTextField("city", text[i], Field.Store.YES));
+                try
+                {
+                    modifier.AddDocument(doc);
+                }
+                catch (IOException io)
+                {
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("TEST: got expected exc:");
+                        Console.WriteLine(io.StackTrace);
+                    }
+                    break;
+                }
+            }
+
+            modifier.Dispose();
+            TestIndexWriter.AssertNoUnreferencedFiles(dir, "docsWriter.abort() failed to delete unreferenced files");
+            dir.Dispose();
+        }
+
+        private class FailureAnonymousInnerClassHelper2 : MockDirectoryWrapper.Failure
+        {
+            private readonly TestIndexWriterDelete OuterInstance;
+
+            public FailureAnonymousInnerClassHelper2(TestIndexWriterDelete outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+                failed = false;
+            }
+
+            internal bool failed;
+
+            public override MockDirectoryWrapper.Failure Reset()
+            {
+                failed = false;
+                return this;
+            }
+
+            public override void Eval(MockDirectoryWrapper dir)
+            {
+                if (!failed)
+                {
+                    failed = true;
+                    throw new IOException("fail in add doc");
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestDeleteNullQuery()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false)));
+
+            for (int i = 0; i < 5; i++)
+            {
+                AddDoc(modifier, i, 2 * i);
+            }
+
+            modifier.DeleteDocuments(new TermQuery(new Term("nada", "nada")));
+            modifier.Commit();
+            Assert.AreEqual(5, modifier.NumDocs);
+            modifier.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestDeleteAllSlowly()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            int NUM_DOCS = AtLeast(1000);
+            IList<int?> ids = new List<int?>(NUM_DOCS);
+            for (int id = 0; id < NUM_DOCS; id++)
+            {
+                ids.Add(id);
+            }
+            Collections.Shuffle(ids);
+            foreach (int id in ids)
+            {
+                Document doc = new Document();
+                doc.Add(NewStringField("id", "" + id, Field.Store.NO));
+                w.AddDocument(doc);
+            }
+            Collections.Shuffle(ids);
+            int upto = 0;
+            while (upto < ids.Count)
+            {
+                int left = ids.Count - upto;
+                int inc = Math.Min(left, TestUtil.NextInt(Random(), 1, 20));
+                int limit = upto + inc;
+                while (upto < limit)
+                {
+                    w.DeleteDocuments(new Term("id", "" + ids[upto++]));
+                }
+                IndexReader r = w.Reader;
+                Assert.AreEqual(NUM_DOCS - upto, r.NumDocs);
+                r.Dispose();
+            }
+
+            w.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestIndexingThenDeleting()
+        {
+            // TODO: move this test to its own class and just @SuppressCodecs?
+            // TODO: is it enough to just use newFSDirectory?
+            string fieldFormat = TestUtil.GetPostingsFormat("field");
+            AssumeFalse("this test cannot run with Memory codec", fieldFormat.Equals("Memory"));
+            AssumeFalse("this test cannot run with SimpleText codec", fieldFormat.Equals("SimpleText"));
+            AssumeFalse("this test cannot run with Direct codec", fieldFormat.Equals("Direct"));
+            Random r = Random();
+            Directory dir = NewDirectory();
+            // note this test explicitly disables payloads
+            Analyzer analyzer = new AnalyzerAnonymousInnerClassHelper(this);
+            IndexWriter w = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetRAMBufferSizeMB(1.0).SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH).SetMaxBufferedDeleteTerms(IndexWriterConfig.DISABLE_AUTO_FLUSH));
+            Document doc = new Document();
+            doc.Add(NewTextField("field", "go 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20", Field.Store.NO));
+            int num = AtLeast(3);
+            for (int iter = 0; iter < num; iter++)
+            {
+                int count = 0;
+
+                bool doIndexing = r.NextBoolean();
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: iter doIndexing=" + doIndexing);
+                }
+                if (doIndexing)
+                {
+                    // Add docs until a flush is triggered
+                    int startFlushCount = w.FlushCount;
+                    while (w.FlushCount == startFlushCount)
+                    {
+                        w.AddDocument(doc);
+                        count++;
+                    }
+                }
+                else
+                {
+                    // Delete docs until a flush is triggered
+                    int startFlushCount = w.FlushCount;
+                    while (w.FlushCount == startFlushCount)
+                    {
+                        w.DeleteDocuments(new Term("foo", "" + count));
+                        count++;
+                    }
+                }
+                Assert.IsTrue(count > 2500, "flush happened too quickly during " + (doIndexing ? "indexing" : "deleting") + " count=" + count);
+            }
+            w.Dispose();
+            dir.Dispose();
+        }
+
+        private class AnalyzerAnonymousInnerClassHelper : Analyzer
+        {
+            private readonly TestIndexWriterDelete OuterInstance;
+
+            public AnalyzerAnonymousInnerClassHelper(TestIndexWriterDelete outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                return new TokenStreamComponents(new MockTokenizer(reader, MockTokenizer.WHITESPACE, true));
+            }
+        }
+
+        // LUCENE-3340: make sure deletes that we don't apply
+        // during flush (ie are just pushed into the stream) are
+        // in fact later flushed due to their RAM usage:
+        [Test]
+        public virtual void TestFlushPushedDeletesByRAM()
+        {
+            Directory dir = NewDirectory();
+            // Cannot use RandomIndexWriter because we don't want to
+            // ever call commit() for this test:
+            // note: tiny rambuffer used, as with a 1MB buffer the test is too slow (flush @ 128,999)
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetRAMBufferSizeMB(0.1f).SetMaxBufferedDocs(1000).SetMergePolicy(NoMergePolicy.NO_COMPOUND_FILES).SetReaderPooling(false));
+            int count = 0;
+            while (true)
+            {
+                Document doc = new Document();
+                doc.Add(new StringField("id", count + "", Field.Store.NO));
+                Term delTerm;
+                if (count == 1010)
+                {
+                    // this is the only delete that applies
+                    delTerm = new Term("id", "" + 0);
+                }
+                else
+                {
+                    // These get buffered, taking up RAM, but delete
+                    // nothing when applied:
+                    delTerm = new Term("id", "x" + count);
+                }
+                w.UpdateDocument(delTerm, doc);
+                // Eventually segment 0 should get a del docs:
+                // TODO: fix this test
+                if (SlowFileExists(dir, "_0_1.del") || SlowFileExists(dir, "_0_1.liv"))
+                {
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("TEST: deletes created @ count=" + count);
+                    }
+                    break;
+                }
+                count++;
+
+                // Today we applyDeletes @ count=21553; even if we make
+                // sizable improvements to RAM efficiency of buffered
+                // del term we're unlikely to go over 100K:
+                if (count > 100000)
+                {
+                    Assert.Fail("delete's were not applied");
+                }
+            }
+            w.Dispose();
+            dir.Dispose();
+        }
+
+        // LUCENE-3340: make sure deletes that we don't apply
+        // during flush (ie are just pushed into the stream) are
+        // in fact later flushed due to their RAM usage:
+        [Test]
+        public virtual void TestFlushPushedDeletesByCount()
+        {
+            Directory dir = NewDirectory();
+            // Cannot use RandomIndexWriter because we don't want to
+            // ever call commit() for this test:
+            int flushAtDelCount = AtLeast(1020);
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDeleteTerms(flushAtDelCount).SetMaxBufferedDocs(1000).SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH).SetMergePolicy(NoMergePolicy.NO_COMPOUND_FILES).SetReaderPooling(false));
+            int count = 0;
+            while (true)
+            {
+                Document doc = new Document();
+                doc.Add(new StringField("id", count + "", Field.Store.NO));
+                Term delTerm;
+                if (count == 1010)
+                {
+                    // this is the only delete that applies
+                    delTerm = new Term("id", "" + 0);
+                }
+                else
+                {
+                    // These get buffered, taking up RAM, but delete
+                    // nothing when applied:
+                    delTerm = new Term("id", "x" + count);
+                }
+                w.UpdateDocument(delTerm, doc);
+                // Eventually segment 0 should get a del docs:
+                // TODO: fix this test
+                if (SlowFileExists(dir, "_0_1.del") || SlowFileExists(dir, "_0_1.liv"))
+                {
+                    break;
+                }
+                count++;
+                if (count > flushAtDelCount)
+                {
+                    Assert.Fail("delete's were not applied at count=" + flushAtDelCount);
+                }
+            }
+            w.Dispose();
+            dir.Dispose();
+        }
+
+        // Make sure buffered (pushed) deletes don't use up so
+        // much RAM that it forces long tail of tiny segments:
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(int.MaxValue)]
+#endif
+        [Test, LongRunningTest, HasTimeout]
+        public virtual void TestApplyDeletesOnFlush()
+        {
+            Directory dir = NewDirectory();
+            // Cannot use RandomIndexWriter because we don't want to
+            // ever call commit() for this test:
+            AtomicInt32 docsInSegment = new AtomicInt32();
+            AtomicBoolean closing = new AtomicBoolean();
+            AtomicBoolean sawAfterFlush = new AtomicBoolean();
+            IndexWriter w = new IndexWriterAnonymousInnerClassHelper(this, dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetRAMBufferSizeMB(0.5).SetMaxBufferedDocs(-1).SetMergePolicy(NoMergePolicy.NO_COMPOUND_FILES).SetReaderPooling(false), docsInSegment, closing, sawAfterFlush);
+            int id = 0;
+            while (true)
+            {
+                StringBuilder sb = new StringBuilder();
+                for (int termIDX = 0; termIDX < 100; termIDX++)
+                {
+                    sb.Append(' ').Append(TestUtil.RandomRealisticUnicodeString(Random()));
+                }
+                if (id == 500)
+                {
+                    w.DeleteDocuments(new Term("id", "0"));
+                }
+                Document doc = new Document();
+                doc.Add(NewStringField("id", "" + id, Field.Store.NO));
+                doc.Add(NewTextField("body", sb.ToString(), Field.Store.NO));
+                w.UpdateDocument(new Term("id", "" + id), doc);
+                docsInSegment.IncrementAndGet();
+                // TODO: fix this test
+                if (SlowFileExists(dir, "_0_1.del") || SlowFileExists(dir, "_0_1.liv"))
+                {
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("TEST: deletes created @ id=" + id);
+                    }
+                    break;
+                }
+                id++;
+            }
+            closing.Set(true);
+            Assert.IsTrue(sawAfterFlush.Get());
+            w.Dispose();
+            dir.Dispose();
+        }
+
+        private class IndexWriterAnonymousInnerClassHelper : IndexWriter
+        {
+            private readonly TestIndexWriterDelete OuterInstance;
+
+            private AtomicInt32 DocsInSegment;
+            private AtomicBoolean Closing;
+            private AtomicBoolean SawAfterFlush;
+
+            public IndexWriterAnonymousInnerClassHelper(TestIndexWriterDelete outerInstance, Directory dir, IndexWriterConfig setReaderPooling, AtomicInt32 docsInSegment, AtomicBoolean closing, AtomicBoolean sawAfterFlush)
+                : base(dir, setReaderPooling)
+            {
+                this.OuterInstance = outerInstance;
+                this.DocsInSegment = docsInSegment;
+                this.Closing = closing;
+                this.SawAfterFlush = sawAfterFlush;
+            }
+
+            protected override void DoAfterFlush()
+            {
+                Assert.IsTrue(Closing.Get() || DocsInSegment.Get() >= 7, "only " + DocsInSegment.Get() + " in segment");
+                DocsInSegment.Set(0);
+                SawAfterFlush.Set(true);
+            }
+        }
+
+        // LUCENE-4455
+        [Test]
+        public virtual void TestDeletesCheckIndexOutput()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            iwc.SetMaxBufferedDocs(2);
+            IndexWriter w = new IndexWriter(dir, (IndexWriterConfig)iwc.Clone());
+            Document doc = new Document();
+            doc.Add(NewField("field", "0", StringField.TYPE_NOT_STORED));
+            w.AddDocument(doc);
+
+            doc = new Document();
+            doc.Add(NewField("field", "1", StringField.TYPE_NOT_STORED));
+            w.AddDocument(doc);
+            w.Commit();
+            Assert.AreEqual(1, w.SegmentCount);
+
+            w.DeleteDocuments(new Term("field", "0"));
+            w.Commit();
+            Assert.AreEqual(1, w.SegmentCount);
+            w.Dispose();
+
+            ByteArrayOutputStream bos = new ByteArrayOutputStream(1024);
+            //MemoryStream bos = new MemoryStream(1024);
+            CheckIndex checker = new CheckIndex(dir);
+            checker.InfoStream = new StreamWriter(bos, Encoding.UTF8);
+            CheckIndex.Status indexStatus = checker.DoCheckIndex(null);
+            Assert.IsTrue(indexStatus.Clean);
+            checker.FlushInfoStream();
+            string s = bos.ToString();
+
+            // Segment should have deletions:
+            Assert.IsTrue(s.Contains("has deletions"), "string was: " + s);
+            w = new IndexWriter(dir, (IndexWriterConfig)iwc.Clone());
+            w.ForceMerge(1);
+            w.Dispose();
+
+            bos = new ByteArrayOutputStream(1024);
+            checker.InfoStream = new StreamWriter(bos, Encoding.UTF8);
+            indexStatus = checker.DoCheckIndex(null);
+            Assert.IsTrue(indexStatus.Clean);
+            checker.FlushInfoStream();
+            s = bos.ToString();
+            Assert.IsFalse(s.Contains("has deletions"));
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestTryDeleteDocument()
+        {
+            Directory d = NewDirectory();
+
+            IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter w = new IndexWriter(d, iwc);
+            Document doc = new Document();
+            w.AddDocument(doc);
+            w.AddDocument(doc);
+            w.AddDocument(doc);
+            w.Dispose();
+
+            iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            iwc.SetOpenMode(OpenMode.APPEND);
+            w = new IndexWriter(d, iwc);
+            IndexReader r = DirectoryReader.Open(w, false);
+            Assert.IsTrue(w.TryDeleteDocument(r, 1));
+            Assert.IsTrue(w.TryDeleteDocument(r.Leaves[0].Reader, 0));
+            r.Dispose();
+            w.Dispose();
+
+            r = DirectoryReader.Open(d);
+            Assert.AreEqual(2, r.NumDeletedDocs);
+            Assert.IsNotNull(MultiFields.GetLiveDocs(r));
+            r.Dispose();
+            d.Dispose();
+        }
+    }
+}
\ No newline at end of file


[18/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestStressIndexing2.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestStressIndexing2.cs b/src/Lucene.Net.Tests/Index/TestStressIndexing2.cs
new file mode 100644
index 0000000..f073cb4
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestStressIndexing2.cs
@@ -0,0 +1,1064 @@
+using Lucene.Net.Analysis.TokenAttributes;
+using Lucene.Net.Documents;
+using Lucene.Net.Randomized.Generators;
+using Lucene.Net.Support;
+using Lucene.Net.Util;
+using NUnit.Framework;
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.Threading;
+
+namespace Lucene.Net.Index
+{
+    /*
+    * Licensed to the Apache Software Foundation (ASF) under one or more
+    * contributor license agreements.  See the NOTICE file distributed with
+    * this work for additional information regarding copyright ownership.
+    * The ASF licenses this file to You under the Apache License, Version 2.0
+    * (the "License"); you may not use this file except in compliance with
+    * the License.  You may obtain a copy of the License at
+    *
+    *     http://www.apache.org/licenses/LICENSE-2.0
+    *
+    * Unless required by applicable law or agreed to in writing, software
+    * distributed under the License is distributed on an "AS IS" BASIS,
+    * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    * See the License for the specific language governing permissions and
+    * limitations under the License.
+    */
+
+    using Directory = Lucene.Net.Store.Directory;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldType = FieldType;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using TermQuery = Lucene.Net.Search.TermQuery;
+    using TextField = TextField;
+
+    [TestFixture]
+    public class TestStressIndexing2 : LuceneTestCase
+    {
+        internal static int MaxFields = 4;
+        internal static int BigFieldSize = 10;
+        internal static bool SameFieldOrder = false;
+        internal static int MergeFactor = 3;
+        internal static int MaxBufferedDocs = 3;
+        internal static int Seed = 0;
+
+        public sealed class YieldTestPoint : RandomIndexWriter.TestPoint
+        {
+            private readonly TestStressIndexing2 OuterInstance;
+
+            public YieldTestPoint(TestStressIndexing2 outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            public void Apply(string name)
+            {
+                //      if (name.equals("startCommit")) {
+                if (Random().Next(4) == 2)
+                {
+                    Thread.Sleep(0);
+                }
+            }
+        }
+
+        //
+        [Test]
+        public virtual void TestRandomIWReader()
+        {
+            Directory dir = NewDirectory();
+
+            // TODO: verify equals using IW.getReader
+            DocsAndWriter dw = IndexRandomIWReader(5, 3, 100, dir);
+            DirectoryReader reader = dw.Writer.Reader;
+            dw.Writer.Commit();
+            VerifyEquals(Random(), reader, dir, "id");
+            reader.Dispose();
+            dw.Writer.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestRandom()
+        {
+            Directory dir1 = NewDirectory();
+            Directory dir2 = NewDirectory();
+            // mergeFactor=2; maxBufferedDocs=2; Map docs = indexRandom(1, 3, 2, dir1);
+            int maxThreadStates = 1 + Random().Next(10);
+            bool doReaderPooling = Random().NextBoolean();
+            IDictionary<string, Document> docs = IndexRandom(5, 3, 100, dir1, maxThreadStates, doReaderPooling);
+            IndexSerial(Random(), docs, dir2);
+
+            // verifying verify
+            // verifyEquals(dir1, dir1, "id");
+            // verifyEquals(dir2, dir2, "id");
+
+            VerifyEquals(dir1, dir2, "id");
+            dir1.Dispose();
+            dir2.Dispose();
+        }
+
+        [Test]
+        public virtual void TestMultiConfig()
+        {
+            // test lots of smaller different params together
+
+            int num = AtLeast(3);
+            for (int i = 0; i < num; i++) // increase iterations for better testing
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("\n\nTEST: top iter=" + i);
+                }
+                SameFieldOrder = Random().NextBoolean();
+                MergeFactor = Random().Next(3) + 2;
+                MaxBufferedDocs = Random().Next(3) + 2;
+                int maxThreadStates = 1 + Random().Next(10);
+                bool doReaderPooling = Random().NextBoolean();
+                Seed++;
+
+                int nThreads = Random().Next(5) + 1;
+                int iter = Random().Next(5) + 1;
+                int range = Random().Next(20) + 1;
+                Directory dir1 = NewDirectory();
+                Directory dir2 = NewDirectory();
+                if (VERBOSE)
+                {
+                    Console.WriteLine("  nThreads=" + nThreads + " iter=" + iter + " range=" + range + " doPooling=" + doReaderPooling + " maxThreadStates=" + maxThreadStates + " sameFieldOrder=" + SameFieldOrder + " mergeFactor=" + MergeFactor + " maxBufferedDocs=" + MaxBufferedDocs);
+                }
+                IDictionary<string, Document> docs = IndexRandom(nThreads, iter, range, dir1, maxThreadStates, doReaderPooling);
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: index serial");
+                }
+                IndexSerial(Random(), docs, dir2);
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: verify");
+                }
+                VerifyEquals(dir1, dir2, "id");
+                dir1.Dispose();
+                dir2.Dispose();
+            }
+        }
+
+        internal static Term IdTerm = new Term("id", "");
+        internal IndexingThread[] Threads;
+        internal static IComparer<IIndexableField> fieldNameComparer = new ComparerAnonymousInnerClassHelper();
+
+        private class ComparerAnonymousInnerClassHelper : IComparer<IIndexableField>
+        {
+            public ComparerAnonymousInnerClassHelper()
+            {
+            }
+
+            public virtual int Compare(IIndexableField o1, IIndexableField o2)
+            {
+                return o1.Name.CompareTo(o2.Name);
+            }
+        }
+
+        // this test avoids using any extra synchronization in the multiple
+        // indexing threads to test that IndexWriter does correctly synchronize
+        // everything.
+
+        public class DocsAndWriter
+        {
+            internal IDictionary<string, Document> Docs;
+            internal IndexWriter Writer;
+        }
+
+        public virtual DocsAndWriter IndexRandomIWReader(int nThreads, int iterations, int range, Directory dir)
+        {
+            IDictionary<string, Document> docs = new Dictionary<string, Document>();
+            IndexWriter w = RandomIndexWriter.MockIndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE).SetRAMBufferSizeMB(0.1).SetMaxBufferedDocs(MaxBufferedDocs).SetMergePolicy(NewLogMergePolicy()), new YieldTestPoint(this));
+            w.Commit();
+            LogMergePolicy lmp = (LogMergePolicy)w.Config.MergePolicy;
+            lmp.NoCFSRatio = 0.0;
+            lmp.MergeFactor = MergeFactor;
+            /*
+            ///    w.setMaxMergeDocs(Integer.MAX_VALUE);
+            ///    w.setMaxFieldLength(10000);
+            ///    w.SetRAMBufferSizeMB(1);
+            ///    w.setMergeFactor(10);
+            */
+
+            Threads = new IndexingThread[nThreads];
+            for (int i = 0; i < Threads.Length; i++)
+            {
+                IndexingThread th = new IndexingThread(this);
+                th.w = w;
+                th.@base = 1000000 * i;
+                th.Range = range;
+                th.Iterations = iterations;
+                Threads[i] = th;
+            }
+
+            for (int i = 0; i < Threads.Length; i++)
+            {
+                Threads[i].Start();
+            }
+            for (int i = 0; i < Threads.Length; i++)
+            {
+                Threads[i].Join();
+            }
+
+            // w.ForceMerge(1);
+            //w.Dispose();
+
+            for (int i = 0; i < Threads.Length; i++)
+            {
+                IndexingThread th = Threads[i];
+                lock (th)
+                {
+                    docs.PutAll(th.Docs);
+                }
+            }
+
+            TestUtil.CheckIndex(dir);
+            DocsAndWriter dw = new DocsAndWriter();
+            dw.Docs = docs;
+            dw.Writer = w;
+            return dw;
+        }
+
+        public virtual IDictionary<string, Document> IndexRandom(int nThreads, int iterations, int range, Directory dir, int maxThreadStates, bool doReaderPooling)
+        {
+            IDictionary<string, Document> docs = new Dictionary<string, Document>();
+            IndexWriter w = RandomIndexWriter.MockIndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE).SetRAMBufferSizeMB(0.1).SetMaxBufferedDocs(MaxBufferedDocs).SetIndexerThreadPool(new ThreadAffinityDocumentsWriterThreadPool(maxThreadStates)).SetReaderPooling(doReaderPooling).SetMergePolicy(NewLogMergePolicy()), new YieldTestPoint(this));
+            LogMergePolicy lmp = (LogMergePolicy)w.Config.MergePolicy;
+            lmp.NoCFSRatio = 0.0;
+            lmp.MergeFactor = MergeFactor;
+
+            Threads = new IndexingThread[nThreads];
+            for (int i = 0; i < Threads.Length; i++)
+            {
+                IndexingThread th = new IndexingThread(this);
+                th.w = w;
+                th.@base = 1000000 * i;
+                th.Range = range;
+                th.Iterations = iterations;
+                Threads[i] = th;
+            }
+
+            for (int i = 0; i < Threads.Length; i++)
+            {
+                Threads[i].Start();
+            }
+            for (int i = 0; i < Threads.Length; i++)
+            {
+                Threads[i].Join();
+            }
+
+            //w.ForceMerge(1);
+            w.Dispose();
+
+            for (int i = 0; i < Threads.Length; i++)
+            {
+                IndexingThread th = Threads[i];
+                lock (th)
+                {
+                    docs.PutAll(th.Docs);
+                }
+            }
+
+            //System.out.println("TEST: checkindex");
+            TestUtil.CheckIndex(dir);
+
+            return docs;
+        }
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Is non-static because NewIndexWriterConfig is no longer static.
+        /// </summary>
+        public void IndexSerial(Random random, IDictionary<string, Document> docs, Directory dir)
+        {
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer(random)).SetMergePolicy(NewLogMergePolicy()));
+
+            // index all docs in a single thread
+            IEnumerator<Document> iter = docs.Values.GetEnumerator();
+            while (iter.MoveNext())
+            {
+                Document d = iter.Current;
+                List<IIndexableField> fields = new List<IIndexableField>();
+                fields.AddRange(d.Fields);
+                // put fields in same order each time
+                fields.Sort(fieldNameComparer);
+
+                Document d1 = new Document();
+                for (int i = 0; i < fields.Count; i++)
+                {
+                    d1.Add(fields[i]);
+                }
+                w.AddDocument(d1);
+                // System.out.println("indexing "+d1);
+            }
+
+            w.Dispose();
+        }
+
+        public virtual void VerifyEquals(Random r, DirectoryReader r1, Directory dir2, string idField)
+        {
+            DirectoryReader r2 = DirectoryReader.Open(dir2);
+            VerifyEquals(r1, r2, idField);
+            r2.Dispose();
+        }
+
+        public virtual void VerifyEquals(Directory dir1, Directory dir2, string idField)
+        {
+            DirectoryReader r1 = DirectoryReader.Open(dir1);
+            DirectoryReader r2 = DirectoryReader.Open(dir2);
+            VerifyEquals(r1, r2, idField);
+            r1.Dispose();
+            r2.Dispose();
+        }
+
+        private static void PrintDocs(DirectoryReader r)
+        {
+            foreach (AtomicReaderContext ctx in r.Leaves)
+            {
+                // TODO: improve this
+                AtomicReader sub = (AtomicReader)ctx.Reader;
+                IBits liveDocs = sub.LiveDocs;
+                Console.WriteLine("  " + ((SegmentReader)sub).SegmentInfo);
+                for (int docID = 0; docID < sub.MaxDoc; docID++)
+                {
+                    Document doc = sub.Document(docID);
+                    if (liveDocs == null || liveDocs.Get(docID))
+                    {
+                        Console.WriteLine("    docID=" + docID + " id:" + doc.Get("id"));
+                    }
+                    else
+                    {
+                        Console.WriteLine("    DEL docID=" + docID + " id:" + doc.Get("id"));
+                    }
+                }
+            }
+        }
+
+        public virtual void VerifyEquals(DirectoryReader r1, DirectoryReader r2, string idField)
+        {
+            if (VERBOSE)
+            {
+                Console.WriteLine("\nr1 docs:");
+                PrintDocs(r1);
+                Console.WriteLine("\nr2 docs:");
+                PrintDocs(r2);
+            }
+            if (r1.NumDocs != r2.NumDocs)
+            {
+                Debug.Assert(false, "r1.NumDocs=" + r1.NumDocs + " vs r2.NumDocs=" + r2.NumDocs);
+            }
+            bool hasDeletes = !(r1.MaxDoc == r2.MaxDoc && r1.NumDocs == r1.MaxDoc);
+
+            int[] r2r1 = new int[r2.MaxDoc]; // r2 id to r1 id mapping
+
+            // create mapping from id2 space to id2 based on idField
+            Fields f1 = MultiFields.GetFields(r1);
+            if (f1 == null)
+            {
+                // make sure r2 is empty
+                Assert.IsNull(MultiFields.GetFields(r2));
+                return;
+            }
+            Terms terms1 = f1.GetTerms(idField);
+            if (terms1 == null)
+            {
+                Assert.IsTrue(MultiFields.GetFields(r2) == null || MultiFields.GetFields(r2).GetTerms(idField) == null);
+                return;
+            }
+            TermsEnum termsEnum = terms1.GetIterator(null);
+
+            IBits liveDocs1 = MultiFields.GetLiveDocs(r1);
+            IBits liveDocs2 = MultiFields.GetLiveDocs(r2);
+
+            Fields fields = MultiFields.GetFields(r2);
+            if (fields == null)
+            {
+                // make sure r1 is in fact empty (eg has only all
+                // deleted docs):
+                IBits liveDocs = MultiFields.GetLiveDocs(r1);
+                DocsEnum docs = null;
+                while (termsEnum.Next() != null)
+                {
+                    docs = TestUtil.Docs(Random(), termsEnum, liveDocs, docs, DocsEnum.FLAG_NONE);
+                    while (docs.NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
+                    {
+                        Assert.Fail("r1 is not empty but r2 is");
+                    }
+                }
+                return;
+            }
+            Terms terms2 = fields.GetTerms(idField);
+            TermsEnum termsEnum2 = terms2.GetIterator(null);
+
+            DocsEnum termDocs1 = null;
+            DocsEnum termDocs2 = null;
+
+            while (true)
+            {
+                BytesRef term = termsEnum.Next();
+                //System.out.println("TEST: match id term=" + term);
+                if (term == null)
+                {
+                    break;
+                }
+
+                termDocs1 = TestUtil.Docs(Random(), termsEnum, liveDocs1, termDocs1, DocsEnum.FLAG_NONE);
+                if (termsEnum2.SeekExact(term))
+                {
+                    termDocs2 = TestUtil.Docs(Random(), termsEnum2, liveDocs2, termDocs2, DocsEnum.FLAG_NONE);
+                }
+                else
+                {
+                    termDocs2 = null;
+                }
+
+                if (termDocs1.NextDoc() == DocIdSetIterator.NO_MORE_DOCS)
+                {
+                    // this doc is deleted and wasn't replaced
+                    Assert.IsTrue(termDocs2 == null || termDocs2.NextDoc() == DocIdSetIterator.NO_MORE_DOCS);
+                    continue;
+                }
+
+                int id1 = termDocs1.DocID;
+                Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, termDocs1.NextDoc());
+
+                Assert.IsTrue(termDocs2.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+                int id2 = termDocs2.DocID;
+                Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, termDocs2.NextDoc());
+
+                r2r1[id2] = id1;
+
+                // verify stored fields are equivalent
+                try
+                {
+                    VerifyEquals(r1.Document(id1), r2.Document(id2));
+                }
+                catch (Exception t)
+                {
+                    Console.WriteLine("FAILED id=" + term + " id1=" + id1 + " id2=" + id2 + " term=" + term);
+                    Console.WriteLine("  d1=" + r1.Document(id1));
+                    Console.WriteLine("  d2=" + r2.Document(id2));
+                    throw t;
+                }
+
+                try
+                {
+                    // verify term vectors are equivalent
+                    VerifyEquals(r1.GetTermVectors(id1), r2.GetTermVectors(id2));
+                }
+                catch (Exception e)
+                {
+                    Console.WriteLine("FAILED id=" + term + " id1=" + id1 + " id2=" + id2);
+                    Fields tv1 = r1.GetTermVectors(id1);
+                    Console.WriteLine("  d1=" + tv1);
+                    if (tv1 != null)
+                    {
+                        DocsAndPositionsEnum dpEnum = null;
+                        DocsEnum dEnum = null;
+                        foreach (string field in tv1)
+                        {
+                            Console.WriteLine("    " + field + ":");
+                            Terms terms3 = tv1.GetTerms(field);
+                            Assert.IsNotNull(terms3);
+                            TermsEnum termsEnum3 = terms3.GetIterator(null);
+                            BytesRef term2;
+                            while ((term2 = termsEnum3.Next()) != null)
+                            {
+                                Console.WriteLine("      " + term2.Utf8ToString() + ": freq=" + termsEnum3.TotalTermFreq);
+                                dpEnum = termsEnum3.DocsAndPositions(null, dpEnum);
+                                if (dpEnum != null)
+                                {
+                                    Assert.IsTrue(dpEnum.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+                                    int freq = dpEnum.Freq;
+                                    Console.WriteLine("        doc=" + dpEnum.DocID + " freq=" + freq);
+                                    for (int posUpto = 0; posUpto < freq; posUpto++)
+                                    {
+                                        Console.WriteLine("          pos=" + dpEnum.NextPosition());
+                                    }
+                                }
+                                else
+                                {
+                                    dEnum = TestUtil.Docs(Random(), termsEnum3, null, dEnum, DocsEnum.FLAG_FREQS);
+                                    Assert.IsNotNull(dEnum);
+                                    Assert.IsTrue(dEnum.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+                                    int freq = dEnum.Freq;
+                                    Console.WriteLine("        doc=" + dEnum.DocID + " freq=" + freq);
+                                }
+                            }
+                        }
+                    }
+
+                    Fields tv2 = r2.GetTermVectors(id2);
+                    Console.WriteLine("  d2=" + tv2);
+                    if (tv2 != null)
+                    {
+                        DocsAndPositionsEnum dpEnum = null;
+                        DocsEnum dEnum = null;
+                        foreach (string field in tv2)
+                        {
+                            Console.WriteLine("    " + field + ":");
+                            Terms terms3 = tv2.GetTerms(field);
+                            Assert.IsNotNull(terms3);
+                            TermsEnum termsEnum3 = terms3.GetIterator(null);
+                            BytesRef term2;
+                            while ((term2 = termsEnum3.Next()) != null)
+                            {
+                                Console.WriteLine("      " + term2.Utf8ToString() + ": freq=" + termsEnum3.TotalTermFreq);
+                                dpEnum = termsEnum3.DocsAndPositions(null, dpEnum);
+                                if (dpEnum != null)
+                                {
+                                    Assert.IsTrue(dpEnum.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+                                    int freq = dpEnum.Freq;
+                                    Console.WriteLine("        doc=" + dpEnum.DocID + " freq=" + freq);
+                                    for (int posUpto = 0; posUpto < freq; posUpto++)
+                                    {
+                                        Console.WriteLine("          pos=" + dpEnum.NextPosition());
+                                    }
+                                }
+                                else
+                                {
+                                    dEnum = TestUtil.Docs(Random(), termsEnum3, null, dEnum, DocsEnum.FLAG_FREQS);
+                                    Assert.IsNotNull(dEnum);
+                                    Assert.IsTrue(dEnum.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+                                    int freq = dEnum.Freq;
+                                    Console.WriteLine("        doc=" + dEnum.DocID + " freq=" + freq);
+                                }
+                            }
+                        }
+                    }
+
+                    throw e;
+                }
+            }
+
+            //System.out.println("TEST: done match id");
+
+            // Verify postings
+            //System.out.println("TEST: create te1");
+            Fields fields1 = MultiFields.GetFields(r1);
+            IEnumerator<string> fields1Enum = fields1.GetEnumerator();
+            Fields fields2 = MultiFields.GetFields(r2);
+            IEnumerator<string> fields2Enum = fields2.GetEnumerator();
+
+            string field1 = null, field2 = null;
+            TermsEnum termsEnum1 = null;
+            termsEnum2 = null;
+            DocsEnum docs1 = null, docs2 = null;
+
+            // pack both doc and freq into single element for easy sorting
+            long[] info1 = new long[r1.NumDocs];
+            long[] info2 = new long[r2.NumDocs];
+
+            for (; ; )
+            {
+                BytesRef term1 = null, term2 = null;
+
+                // iterate until we get some docs
+                int len1;
+                for (; ; )
+                {
+                    len1 = 0;
+                    if (termsEnum1 == null)
+                    {
+                        if (!fields1Enum.MoveNext())
+                        {
+                            break;
+                        }
+                        field1 = fields1Enum.Current;
+                        Terms terms = fields1.GetTerms(field1);
+                        if (terms == null)
+                        {
+                            continue;
+                        }
+                        termsEnum1 = terms.GetIterator(null);
+                    }
+                    term1 = termsEnum1.Next();
+                    if (term1 == null)
+                    {
+                        // no more terms in this field
+                        termsEnum1 = null;
+                        continue;
+                    }
+
+                    //System.out.println("TEST: term1=" + term1);
+                    docs1 = TestUtil.Docs(Random(), termsEnum1, liveDocs1, docs1, DocsEnum.FLAG_FREQS);
+                    while (docs1.NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
+                    {
+                        int d = docs1.DocID;
+                        int f = docs1.Freq;
+                        info1[len1] = (((long)d) << 32) | f;
+                        len1++;
+                    }
+                    if (len1 > 0)
+                    {
+                        break;
+                    }
+                }
+
+                // iterate until we get some docs
+                int len2;
+                for (; ; )
+                {
+                    len2 = 0;
+                    if (termsEnum2 == null)
+                    {
+                        if (!fields2Enum.MoveNext())
+                        {
+                            break;
+                        }
+                        field2 = fields2Enum.Current;
+                        Terms terms = fields2.GetTerms(field2);
+                        if (terms == null)
+                        {
+                            continue;
+                        }
+                        termsEnum2 = terms.GetIterator(null);
+                    }
+                    term2 = termsEnum2.Next();
+                    if (term2 == null)
+                    {
+                        // no more terms in this field
+                        termsEnum2 = null;
+                        continue;
+                    }
+
+                    //System.out.println("TEST: term1=" + term1);
+                    docs2 = TestUtil.Docs(Random(), termsEnum2, liveDocs2, docs2, DocsEnum.FLAG_FREQS);
+                    while (docs2.NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
+                    {
+                        int d = r2r1[docs2.DocID];
+                        int f = docs2.Freq;
+                        info2[len2] = (((long)d) << 32) | f;
+                        len2++;
+                    }
+                    if (len2 > 0)
+                    {
+                        break;
+                    }
+                }
+
+                Assert.AreEqual(len1, len2);
+                if (len1 == 0) // no more terms
+                {
+                    break;
+                }
+
+                Assert.AreEqual(field1, field2);
+                Assert.IsTrue(term1.BytesEquals(term2));
+
+                if (!hasDeletes)
+                {
+                    Assert.AreEqual(termsEnum1.DocFreq, termsEnum2.DocFreq);
+                }
+
+                Assert.AreEqual(term1, term2, "len1=" + len1 + " len2=" + len2 + " deletes?=" + hasDeletes);
+
+                // sort info2 to get it into ascending docid
+                Array.Sort(info2, 0, len2);
+
+                // now compare
+                for (int i = 0; i < len1; i++)
+                {
+                    Assert.AreEqual(info1[i], info2[i], "i=" + i + " len=" + len1 + " d1=" + ((long)((ulong)info1[i] >> 32)) + " f1=" + (info1[i] & int.MaxValue) + " d2=" + ((long)((ulong)info2[i] >> 32)) + " f2=" + (info2[i] & int.MaxValue) + " field=" + field1 + " term=" + term1.Utf8ToString());
+                }
+            }
+        }
+
+        public static void VerifyEquals(Document d1, Document d2)
+        {
+            IList<IIndexableField> ff1 = d1.Fields;
+            IList<IIndexableField> ff2 = d2.Fields;
+
+            ff1.Sort(fieldNameComparer);
+            ff2.Sort(fieldNameComparer);
+
+            Assert.AreEqual(ff1.Count, ff2.Count, ff1 + " : " + ff2);
+
+            for (int i = 0; i < ff1.Count; i++)
+            {
+                IIndexableField f1 = ff1[i];
+                IIndexableField f2 = ff2[i];
+                if (f1.GetBinaryValue() != null)
+                {
+                    Debug.Assert(f2.GetBinaryValue() != null);
+                }
+                else
+                {
+                    string s1 = f1.GetStringValue();
+                    string s2 = f2.GetStringValue();
+                    Assert.AreEqual(s1, s2, ff1 + " : " + ff2);
+                }
+            }
+        }
+
+        public static void VerifyEquals(Fields d1, Fields d2)
+        {
+            if (d1 == null)
+            {
+                Assert.IsTrue(d2 == null || d2.Count == 0);
+                return;
+            }
+            Assert.IsTrue(d2 != null);
+
+            IEnumerator<string> fieldsEnum2 = d2.GetEnumerator();
+
+            foreach (string field1 in d1)
+            {
+                fieldsEnum2.MoveNext();
+                string field2 = fieldsEnum2.Current;
+                Assert.AreEqual(field1, field2);
+
+                Terms terms1 = d1.GetTerms(field1);
+                Assert.IsNotNull(terms1);
+                TermsEnum termsEnum1 = terms1.GetIterator(null);
+
+                Terms terms2 = d2.GetTerms(field2);
+                Assert.IsNotNull(terms2);
+                TermsEnum termsEnum2 = terms2.GetIterator(null);
+
+                DocsAndPositionsEnum dpEnum1 = null;
+                DocsAndPositionsEnum dpEnum2 = null;
+                DocsEnum dEnum1 = null;
+                DocsEnum dEnum2 = null;
+
+                BytesRef term1;
+                while ((term1 = termsEnum1.Next()) != null)
+                {
+                    BytesRef term2 = termsEnum2.Next();
+                    Assert.AreEqual(term1, term2);
+                    Assert.AreEqual(termsEnum1.TotalTermFreq, termsEnum2.TotalTermFreq);
+
+                    dpEnum1 = termsEnum1.DocsAndPositions(null, dpEnum1);
+                    dpEnum2 = termsEnum2.DocsAndPositions(null, dpEnum2);
+                    if (dpEnum1 != null)
+                    {
+                        Assert.IsNotNull(dpEnum2);
+                        int docID1 = dpEnum1.NextDoc();
+                        dpEnum2.NextDoc();
+                        // docIDs are not supposed to be equal
+                        //int docID2 = dpEnum2.NextDoc();
+                        //Assert.AreEqual(docID1, docID2);
+                        Assert.IsTrue(docID1 != DocIdSetIterator.NO_MORE_DOCS);
+
+                        int freq1 = dpEnum1.Freq;
+                        int freq2 = dpEnum2.Freq;
+                        Assert.AreEqual(freq1, freq2);
+                        IOffsetAttribute offsetAtt1 = dpEnum1.Attributes.HasAttribute<IOffsetAttribute>() ? dpEnum1.Attributes.GetAttribute<IOffsetAttribute>() : null;
+                        IOffsetAttribute offsetAtt2 = dpEnum2.Attributes.HasAttribute<IOffsetAttribute>() ? dpEnum2.Attributes.GetAttribute<IOffsetAttribute>() : null;
+
+                        if (offsetAtt1 != null)
+                        {
+                            Assert.IsNotNull(offsetAtt2);
+                        }
+                        else
+                        {
+                            Assert.IsNull(offsetAtt2);
+                        }
+
+                        for (int posUpto = 0; posUpto < freq1; posUpto++)
+                        {
+                            int pos1 = dpEnum1.NextPosition();
+                            int pos2 = dpEnum2.NextPosition();
+                            Assert.AreEqual(pos1, pos2);
+                            if (offsetAtt1 != null)
+                            {
+                                Assert.AreEqual(offsetAtt1.StartOffset, offsetAtt2.StartOffset);
+                                Assert.AreEqual(offsetAtt1.EndOffset, offsetAtt2.EndOffset);
+                            }
+                        }
+                        Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, dpEnum1.NextDoc());
+                        Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, dpEnum2.NextDoc());
+                    }
+                    else
+                    {
+                        dEnum1 = TestUtil.Docs(Random(), termsEnum1, null, dEnum1, DocsEnum.FLAG_FREQS);
+                        dEnum2 = TestUtil.Docs(Random(), termsEnum2, null, dEnum2, DocsEnum.FLAG_FREQS);
+                        Assert.IsNotNull(dEnum1);
+                        Assert.IsNotNull(dEnum2);
+                        int docID1 = dEnum1.NextDoc();
+                        dEnum2.NextDoc();
+                        // docIDs are not supposed to be equal
+                        //int docID2 = dEnum2.NextDoc();
+                        //Assert.AreEqual(docID1, docID2);
+                        Assert.IsTrue(docID1 != DocIdSetIterator.NO_MORE_DOCS);
+                        int freq1 = dEnum1.Freq;
+                        int freq2 = dEnum2.Freq;
+                        Assert.AreEqual(freq1, freq2);
+                        Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, dEnum1.NextDoc());
+                        Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, dEnum2.NextDoc());
+                    }
+                }
+
+                Assert.IsNull(termsEnum2.Next());
+            }
+            Assert.IsFalse(fieldsEnum2.MoveNext());
+        }
+
+        internal class IndexingThread : ThreadClass
+        {
+            private readonly TestStressIndexing2 OuterInstance;
+
+            public IndexingThread(TestStressIndexing2 outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            internal IndexWriter w;
+            internal int @base;
+            internal int Range;
+            internal int Iterations;
+            internal IDictionary<string, Document> Docs = new Dictionary<string, Document>();
+            internal Random r;
+
+            public virtual int NextInt(int lim)
+            {
+                return r.Next(lim);
+            }
+
+            // start is inclusive and end is exclusive
+            public virtual int NextInt(int start, int end)
+            {
+                return start + r.Next(end - start);
+            }
+
+            internal char[] Buffer = new char[100];
+
+            internal virtual int AddUTF8Token(int start)
+            {
+                int end = start + NextInt(20);
+                if (Buffer.Length < 1 + end)
+                {
+                    char[] newBuffer = new char[(int)((1 + end) * 1.25)];
+                    Array.Copy(Buffer, 0, newBuffer, 0, Buffer.Length);
+                    Buffer = newBuffer;
+                }
+
+                for (int i = start; i < end; i++)
+                {
+                    int t = NextInt(5);
+                    if (0 == t && i < end - 1)
+                    {
+                        // Make a surrogate pair
+                        // High surrogate
+                        Buffer[i++] = (char)NextInt(0xd800, 0xdc00);
+                        // Low surrogate
+                        Buffer[i] = (char)NextInt(0xdc00, 0xe000);
+                    }
+                    else if (t <= 1)
+                    {
+                        Buffer[i] = (char)NextInt(0x80);
+                    }
+                    else if (2 == t)
+                    {
+                        Buffer[i] = (char)NextInt(0x80, 0x800);
+                    }
+                    else if (3 == t)
+                    {
+                        Buffer[i] = (char)NextInt(0x800, 0xd800);
+                    }
+                    else if (4 == t)
+                    {
+                        Buffer[i] = (char)NextInt(0xe000, 0xffff);
+                    }
+                }
+                Buffer[end] = ' ';
+                return 1 + end;
+            }
+
+            public virtual string GetString(int nTokens)
+            {
+                nTokens = nTokens != 0 ? nTokens : r.Next(4) + 1;
+
+                // Half the time make a random UTF8 string
+                if (r.NextBoolean())
+                {
+                    return GetUTF8String(nTokens);
+                }
+
+                // avoid StringBuffer because it adds extra synchronization.
+                char[] arr = new char[nTokens * 2];
+                for (int i = 0; i < nTokens; i++)
+                {
+                    arr[i * 2] = (char)('A' + r.Next(10));
+                    arr[i * 2 + 1] = ' ';
+                }
+                return new string(arr);
+            }
+
+            public virtual string GetUTF8String(int nTokens)
+            {
+                int upto = 0;
+                Arrays.Fill(Buffer, (char)0);
+                for (int i = 0; i < nTokens; i++)
+                {
+                    upto = AddUTF8Token(upto);
+                }
+                return new string(Buffer, 0, upto);
+            }
+
+            public virtual string IdString
+            {
+                get
+                {
+                    return Convert.ToString(@base + NextInt(Range));
+                }
+            }
+
+            public virtual void IndexDoc()
+            {
+                Document d = new Document();
+
+                FieldType customType1 = new FieldType(TextField.TYPE_STORED);
+                customType1.IsTokenized = false;
+                customType1.OmitNorms = true;
+
+                List<Field> fields = new List<Field>();
+                string idString = IdString;
+                Field idField = OuterInstance.NewField("id", idString, customType1);
+                fields.Add(idField);
+
+                int nFields = NextInt(MaxFields);
+                for (int i = 0; i < nFields; i++)
+                {
+                    FieldType customType = new FieldType();
+                    switch (NextInt(4))
+                    {
+                        case 0:
+                            break;
+
+                        case 1:
+                            customType.StoreTermVectors = true;
+                            break;
+
+                        case 2:
+                            customType.StoreTermVectors = true;
+                            customType.StoreTermVectorPositions = true;
+                            break;
+
+                        case 3:
+                            customType.StoreTermVectors = true;
+                            customType.StoreTermVectorOffsets = true;
+                            break;
+                    }
+
+                    switch (NextInt(4))
+                    {
+                        case 0:
+                            customType.IsStored = true;
+                            customType.OmitNorms = true;
+                            customType.IsIndexed = true;
+                            fields.Add(OuterInstance.NewField("f" + NextInt(100), GetString(1), customType));
+                            break;
+
+                        case 1:
+                            customType.IsIndexed = true;
+                            customType.IsTokenized = true;
+                            fields.Add(OuterInstance.NewField("f" + NextInt(100), GetString(0), customType));
+                            break;
+
+                        case 2:
+                            customType.IsStored = true;
+                            customType.StoreTermVectors = false;
+                            customType.StoreTermVectorOffsets = false;
+                            customType.StoreTermVectorPositions = false;
+                            fields.Add(OuterInstance.NewField("f" + NextInt(100), GetString(0), customType));
+                            break;
+
+                        case 3:
+                            customType.IsStored = true;
+                            customType.IsIndexed = true;
+                            customType.IsTokenized = true;
+                            fields.Add(OuterInstance.NewField("f" + NextInt(100), GetString(BigFieldSize), customType));
+                            break;
+                    }
+                }
+
+                if (SameFieldOrder)
+                {
+                    fields.Sort(fieldNameComparer);
+                }
+                else
+                {
+                    // random placement of id field also
+                    Collections.Swap(fields, NextInt(fields.Count), 0);
+                }
+
+                for (int i = 0; i < fields.Count; i++)
+                {
+                    d.Add(fields[i]);
+                }
+                if (VERBOSE)
+                {
+                    Console.WriteLine(Thread.CurrentThread.Name + ": indexing id:" + idString);
+                }
+                w.UpdateDocument(new Term("id", idString), d);
+                //System.out.println(Thread.currentThread().getName() + ": indexing "+d);
+                Docs[idString] = d;
+            }
+
+            public virtual void DeleteDoc()
+            {
+                string idString = IdString;
+                if (VERBOSE)
+                {
+                    Console.WriteLine(Thread.CurrentThread.Name + ": del id:" + idString);
+                }
+                w.DeleteDocuments(new Term("id", idString));
+                Docs.Remove(idString);
+            }
+
+            public virtual void DeleteByQuery()
+            {
+                string idString = IdString;
+                if (VERBOSE)
+                {
+                    Console.WriteLine(Thread.CurrentThread.Name + ": del query id:" + idString);
+                }
+                w.DeleteDocuments(new TermQuery(new Term("id", idString)));
+                Docs.Remove(idString);
+            }
+
+            public override void Run()
+            {
+                try
+                {
+                    r = new Random(@base + Range + Seed);
+                    for (int i = 0; i < Iterations; i++)
+                    {
+                        int what = NextInt(100);
+                        if (what < 5)
+                        {
+                            DeleteDoc();
+                        }
+                        else if (what < 10)
+                        {
+                            DeleteByQuery();
+                        }
+                        else
+                        {
+                            IndexDoc();
+                        }
+                    }
+                }
+                catch (Exception e)
+                {
+                    Console.WriteLine(e.ToString());
+                    Console.Write(e.StackTrace);
+                    Assert.Fail(e.ToString());
+                }
+
+                lock (this)
+                {
+                    int dummy = Docs.Count;
+                }
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestStressNRT.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestStressNRT.cs b/src/Lucene.Net.Tests/Index/TestStressNRT.cs
new file mode 100644
index 0000000..b9d52cb
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestStressNRT.cs
@@ -0,0 +1,530 @@
+using System;
+using System.Collections.Concurrent;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.Threading;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Randomized.Generators;
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using FieldType = FieldType;
+    using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using Query = Lucene.Net.Search.Query;
+    using ScoreDoc = Lucene.Net.Search.ScoreDoc;
+    using TermQuery = Lucene.Net.Search.TermQuery;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using TopDocs = Lucene.Net.Search.TopDocs;
+
+    [TestFixture]
+    public class TestStressNRT : LuceneTestCase
+    {
+        internal volatile DirectoryReader Reader;
+
+        internal readonly ConcurrentDictionary<int, long> Model = new ConcurrentDictionary<int, long>();
+        internal IDictionary<int, long> CommittedModel = new Dictionary<int, long>();
+        internal long SnapshotCount;
+        internal long CommittedModelClock;
+        internal volatile int LastId;
+        internal readonly string Field = "val_l";
+        internal object[] SyncArr;
+
+        private void InitModel(int ndocs)
+        {
+            SnapshotCount = 0;
+            CommittedModelClock = 0;
+            LastId = 0;
+
+            SyncArr = new object[ndocs];
+
+            for (int i = 0; i < ndocs; i++)
+            {
+                Model[i] = -1L;
+                SyncArr[i] = new object();
+            }
+            CommittedModel.PutAll(Model);
+        }
+
+        [Test]
+        public virtual void Test()
+        {
+            // update variables
+            int commitPercent = Random().Next(20);
+            int softCommitPercent = Random().Next(100); // what percent of the commits are soft
+            int deletePercent = Random().Next(50);
+            int deleteByQueryPercent = Random().Next(25);
+            int ndocs = AtLeast(50);
+            int nWriteThreads = TestUtil.NextInt(Random(), 1, TEST_NIGHTLY ? 10 : 5);
+            int maxConcurrentCommits = TestUtil.NextInt(Random(), 1, TEST_NIGHTLY ? 10 : 5); // number of committers at a time... needed if we want to avoid commit errors due to exceeding the max
+
+            bool tombstones = Random().NextBoolean();
+
+            // query variables
+            AtomicInt64 operations = new AtomicInt64(AtLeast(10000)); // number of query operations to perform in total
+
+            int nReadThreads = TestUtil.NextInt(Random(), 1, TEST_NIGHTLY ? 10 : 5);
+            InitModel(ndocs);
+
+            FieldType storedOnlyType = new FieldType();
+            storedOnlyType.IsStored = true;
+
+            if (VERBOSE)
+            {
+                Console.WriteLine("\n");
+                Console.WriteLine("TEST: commitPercent=" + commitPercent);
+                Console.WriteLine("TEST: softCommitPercent=" + softCommitPercent);
+                Console.WriteLine("TEST: deletePercent=" + deletePercent);
+                Console.WriteLine("TEST: deleteByQueryPercent=" + deleteByQueryPercent);
+                Console.WriteLine("TEST: ndocs=" + ndocs);
+                Console.WriteLine("TEST: nWriteThreads=" + nWriteThreads);
+                Console.WriteLine("TEST: nReadThreads=" + nReadThreads);
+                Console.WriteLine("TEST: maxConcurrentCommits=" + maxConcurrentCommits);
+                Console.WriteLine("TEST: tombstones=" + tombstones);
+                Console.WriteLine("TEST: operations=" + operations);
+                Console.WriteLine("\n");
+            }
+
+            AtomicInt32 numCommitting = new AtomicInt32();
+
+            IList<ThreadClass> threads = new List<ThreadClass>();
+
+            Directory dir = NewDirectory();
+
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            writer.DoRandomForceMergeAssert = false;
+            writer.Commit();
+            Reader = DirectoryReader.Open(dir);
+
+            for (int i = 0; i < nWriteThreads; i++)
+            {
+                ThreadClass thread = new ThreadAnonymousInnerClassHelper(this, "WRITER" + i, commitPercent, softCommitPercent, deletePercent, deleteByQueryPercent, ndocs, maxConcurrentCommits, tombstones, operations, storedOnlyType, numCommitting, writer);
+
+                threads.Add(thread);
+            }
+
+            for (int i = 0; i < nReadThreads; i++)
+            {
+                ThreadClass thread = new ThreadAnonymousInnerClassHelper2(this, "READER" + i, ndocs, tombstones, operations);
+
+                threads.Add(thread);
+            }
+
+            foreach (ThreadClass thread in threads)
+            {
+                thread.Start();
+            }
+
+            foreach (ThreadClass thread in threads)
+            {
+                thread.Join();
+            }
+
+            writer.Dispose();
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: close reader=" + Reader);
+            }
+            Reader.Dispose();
+            dir.Dispose();
+        }
+
+        private class ThreadAnonymousInnerClassHelper : ThreadClass
+        {
+            private readonly TestStressNRT OuterInstance;
+
+            private int CommitPercent;
+            private int SoftCommitPercent;
+            private int DeletePercent;
+            private int DeleteByQueryPercent;
+            private int Ndocs;
+            private int MaxConcurrentCommits;
+            private bool Tombstones;
+            private AtomicInt64 Operations;
+            private FieldType StoredOnlyType;
+            private AtomicInt32 NumCommitting;
+            private RandomIndexWriter Writer;
+
+            public ThreadAnonymousInnerClassHelper(TestStressNRT outerInstance, string str, int commitPercent, int softCommitPercent, int deletePercent, int deleteByQueryPercent, int ndocs, int maxConcurrentCommits, bool tombstones, AtomicInt64 operations, FieldType storedOnlyType, AtomicInt32 numCommitting, RandomIndexWriter writer)
+                : base(str)
+            {
+                this.OuterInstance = outerInstance;
+                this.CommitPercent = commitPercent;
+                this.SoftCommitPercent = softCommitPercent;
+                this.DeletePercent = deletePercent;
+                this.DeleteByQueryPercent = deleteByQueryPercent;
+                this.Ndocs = ndocs;
+                this.MaxConcurrentCommits = maxConcurrentCommits;
+                this.Tombstones = tombstones;
+                this.Operations = operations;
+                this.StoredOnlyType = storedOnlyType;
+                this.NumCommitting = numCommitting;
+                this.Writer = writer;
+                rand = new Random(Random().Next());
+            }
+
+            internal Random rand;
+
+            public override void Run()
+            {
+                try
+                {
+                    while (Operations.Get() > 0)
+                    {
+                        int oper = rand.Next(100);
+
+                        if (oper < CommitPercent)
+                        {
+                            if (NumCommitting.IncrementAndGet() <= MaxConcurrentCommits)
+                            {
+                                IDictionary<int, long> newCommittedModel;
+                                long version;
+                                DirectoryReader oldReader;
+
+                                lock (OuterInstance)
+                                {
+                                    newCommittedModel = new Dictionary<int, long>(OuterInstance.Model); // take a snapshot
+                                    version = OuterInstance.SnapshotCount++;
+                                    oldReader = OuterInstance.Reader;
+                                    oldReader.IncRef(); // increment the reference since we will use this for reopening
+                                }
+
+                                DirectoryReader newReader;
+                                if (rand.Next(100) < SoftCommitPercent)
+                                {
+                                    // assertU(h.Commit("softCommit","true"));
+                                    if (Random().NextBoolean())
+                                    {
+                                        if (VERBOSE)
+                                        {
+                                            Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": call writer.getReader");
+                                        }
+                                        newReader = Writer.GetReader(true);
+                                    }
+                                    else
+                                    {
+                                        if (VERBOSE)
+                                        {
+                                            Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": reopen reader=" + oldReader + " version=" + version);
+                                        }
+                                        newReader = DirectoryReader.OpenIfChanged(oldReader, Writer.w, true);
+                                    }
+                                }
+                                else
+                                {
+                                    // assertU(commit());
+                                    if (VERBOSE)
+                                    {
+                                        Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": commit+reopen reader=" + oldReader + " version=" + version);
+                                    }
+                                    Writer.Commit();
+                                    if (VERBOSE)
+                                    {
+                                        Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": now reopen after commit");
+                                    }
+                                    newReader = DirectoryReader.OpenIfChanged(oldReader);
+                                }
+
+                                // Code below assumes newReader comes w/
+                                // extra ref:
+                                if (newReader == null)
+                                {
+                                    oldReader.IncRef();
+                                    newReader = oldReader;
+                                }
+
+                                oldReader.DecRef();
+
+                                lock (OuterInstance)
+                                {
+                                    // install the new reader if it's newest (and check the current version since another reader may have already been installed)
+                                    //System.out.println(Thread.currentThread().getName() + ": newVersion=" + newReader.getVersion());
+                                    Debug.Assert(newReader.RefCount > 0);
+                                    Debug.Assert(OuterInstance.Reader.RefCount > 0);
+                                    if (newReader.Version > OuterInstance.Reader.Version)
+                                    {
+                                        if (VERBOSE)
+                                        {
+                                            Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": install new reader=" + newReader);
+                                        }
+                                        OuterInstance.Reader.DecRef();
+                                        OuterInstance.Reader = newReader;
+
+                                        // Silly: forces fieldInfos to be
+                                        // loaded so we don't hit IOE on later
+                                        // reader.toString
+                                        newReader.ToString();
+
+                                        // install this snapshot only if it's newer than the current one
+                                        if (version >= OuterInstance.CommittedModelClock)
+                                        {
+                                            if (VERBOSE)
+                                            {
+                                                Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": install new model version=" + version);
+                                            }
+                                            OuterInstance.CommittedModel = newCommittedModel;
+                                            OuterInstance.CommittedModelClock = version;
+                                        }
+                                        else
+                                        {
+                                            if (VERBOSE)
+                                            {
+                                                Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": skip install new model version=" + version);
+                                            }
+                                        }
+                                    }
+                                    else
+                                    {
+                                        // if the same reader, don't decRef.
+                                        if (VERBOSE)
+                                        {
+                                            Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": skip install new reader=" + newReader);
+                                        }
+                                        newReader.DecRef();
+                                    }
+                                }
+                            }
+                            NumCommitting.DecrementAndGet();
+                        }
+                        else
+                        {
+                            int id = rand.Next(Ndocs);
+                            object sync = OuterInstance.SyncArr[id];
+
+                            // set the lastId before we actually change it sometimes to try and
+                            // uncover more race conditions between writing and reading
+                            bool before = Random().NextBoolean();
+                            if (before)
+                            {
+                                OuterInstance.LastId = id;
+                            }
+
+                            // We can't concurrently update the same document and retain our invariants of increasing values
+                            // since we can't guarantee what order the updates will be executed.
+                            lock (sync)
+                            {
+                                long val = OuterInstance.Model[id];
+                                long nextVal = Math.Abs(val) + 1;
+
+                                if (oper < CommitPercent + DeletePercent)
+                                {
+                                    // assertU("<delete><id>" + id + "</id></delete>");
+
+                                    // add tombstone first
+                                    if (Tombstones)
+                                    {
+                                        Document d = new Document();
+                                        d.Add(OuterInstance.NewStringField("id", "-" + Convert.ToString(id), Documents.Field.Store.YES));
+                                        d.Add(OuterInstance.NewField(OuterInstance.Field, Convert.ToString(nextVal), StoredOnlyType));
+                                        Writer.UpdateDocument(new Term("id", "-" + Convert.ToString(id)), d);
+                                    }
+
+                                    if (VERBOSE)
+                                    {
+                                        Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": term delDocs id:" + id + " nextVal=" + nextVal);
+                                    }
+                                    Writer.DeleteDocuments(new Term("id", Convert.ToString(id)));
+                                    OuterInstance.Model[id] = -nextVal;
+                                }
+                                else if (oper < CommitPercent + DeletePercent + DeleteByQueryPercent)
+                                {
+                                    //assertU("<delete><query>id:" + id + "</query></delete>");
+
+                                    // add tombstone first
+                                    if (Tombstones)
+                                    {
+                                        Document d = new Document();
+                                        d.Add(OuterInstance.NewStringField("id", "-" + Convert.ToString(id), Documents.Field.Store.YES));
+                                        d.Add(OuterInstance.NewField(OuterInstance.Field, Convert.ToString(nextVal), StoredOnlyType));
+                                        Writer.UpdateDocument(new Term("id", "-" + Convert.ToString(id)), d);
+                                    }
+
+                                    if (VERBOSE)
+                                    {
+                                        Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": query delDocs id:" + id + " nextVal=" + nextVal);
+                                    }
+                                    Writer.DeleteDocuments(new TermQuery(new Term("id", Convert.ToString(id))));
+                                    OuterInstance.Model[id] = -nextVal;
+                                }
+                                else
+                                {
+                                    // assertU(adoc("id",Integer.toString(id), field, Long.toString(nextVal)));
+                                    Document d = new Document();
+                                    d.Add(OuterInstance.NewStringField("id", Convert.ToString(id), Documents.Field.Store.YES));
+                                    d.Add(OuterInstance.NewField(OuterInstance.Field, Convert.ToString(nextVal), StoredOnlyType));
+                                    if (VERBOSE)
+                                    {
+                                        Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": u id:" + id + " val=" + nextVal);
+                                    }
+                                    Writer.UpdateDocument(new Term("id", Convert.ToString(id)), d);
+                                    if (Tombstones)
+                                    {
+                                        // remove tombstone after new addition (this should be optional?)
+                                        Writer.DeleteDocuments(new Term("id", "-" + Convert.ToString(id)));
+                                    }
+                                    OuterInstance.Model[id] = nextVal;
+                                }
+                            }
+
+                            if (!before)
+                            {
+                                OuterInstance.LastId = id;
+                            }
+                        }
+                    }
+                }
+                catch (Exception e)
+                {
+                    Console.WriteLine(Thread.CurrentThread.Name + ": FAILED: unexpected exception");
+                    Console.WriteLine(e.StackTrace);
+                    throw new Exception(e.Message, e);
+                }
+            }
+        }
+
+        private class ThreadAnonymousInnerClassHelper2 : ThreadClass
+        {
+            private readonly TestStressNRT OuterInstance;
+
+            private int Ndocs;
+            private bool Tombstones;
+            private AtomicInt64 Operations;
+
+            public ThreadAnonymousInnerClassHelper2(TestStressNRT outerInstance, string str, int ndocs, bool tombstones, AtomicInt64 operations)
+                : base(str)
+            {
+                this.OuterInstance = outerInstance;
+                this.Ndocs = ndocs;
+                this.Tombstones = tombstones;
+                this.Operations = operations;
+                rand = new Random(Random().Next());
+            }
+
+            internal Random rand;
+
+            public override void Run()
+            {
+                try
+                {
+                    IndexReader lastReader = null;
+                    IndexSearcher lastSearcher = null;
+
+                    while (Operations.DecrementAndGet() >= 0)
+                    {
+                        // bias toward a recently changed doc
+                        int id = rand.Next(100) < 25 ? OuterInstance.LastId : rand.Next(Ndocs);
+
+                        // when indexing, we update the index, then the model
+                        // so when querying, we should first check the model, and then the index
+
+                        long val;
+                        DirectoryReader r;
+                        lock (OuterInstance)
+                        {
+                            val = OuterInstance.CommittedModel[id];
+                            r = OuterInstance.Reader;
+                            r.IncRef();
+                        }
+
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": s id=" + id + " val=" + val + " r=" + r.Version);
+                        }
+
+                        //  sreq = req("wt","json", "q","id:"+Integer.toString(id), "omitHeader","true");
+                        IndexSearcher searcher;
+                        if (r == lastReader)
+                        {
+                            // Just re-use lastSearcher, else
+                            // newSearcher may create too many thread
+                            // pools (ExecutorService):
+                            searcher = lastSearcher;
+                        }
+                        else
+                        {
+                            searcher = OuterInstance.NewSearcher(r);
+                            lastReader = r;
+                            lastSearcher = searcher;
+                        }
+                        Query q = new TermQuery(new Term("id", Convert.ToString(id)));
+                        TopDocs results = searcher.Search(q, 10);
+
+                        if (results.TotalHits == 0 && Tombstones)
+                        {
+                            // if we couldn't find the doc, look for its tombstone
+                            q = new TermQuery(new Term("id", "-" + Convert.ToString(id)));
+                            results = searcher.Search(q, 1);
+                            if (results.TotalHits == 0)
+                            {
+                                if (val == -1L)
+                                {
+                                    // expected... no doc was added yet
+                                    r.DecRef();
+                                    continue;
+                                }
+                                Assert.Fail("No documents or tombstones found for id " + id + ", expected at least " + val + " reader=" + r);
+                            }
+                        }
+
+                        if (results.TotalHits == 0 && !Tombstones)
+                        {
+                            // nothing to do - we can't tell anything from a deleted doc without tombstones
+                        }
+                        else
+                        {
+                            // we should have found the document, or its tombstone
+                            if (results.TotalHits != 1)
+                            {
+                                Console.WriteLine("FAIL: hits id:" + id + " val=" + val);
+                                foreach (ScoreDoc sd in results.ScoreDocs)
+                                {
+                                    Document doc = r.Document(sd.Doc);
+                                    Console.WriteLine("  docID=" + sd.Doc + " id:" + doc.Get("id") + " foundVal=" + doc.Get(OuterInstance.Field));
+                                }
+                                Assert.Fail("id=" + id + " reader=" + r + " totalHits=" + results.TotalHits);
+                            }
+                            Document doc_ = searcher.Doc(results.ScoreDocs[0].Doc);
+                            long foundVal = Convert.ToInt64(doc_.Get(OuterInstance.Field));
+                            if (foundVal < Math.Abs(val))
+                            {
+                                Assert.Fail("foundVal=" + foundVal + " val=" + val + " id=" + id + " reader=" + r);
+                            }
+                        }
+
+                        r.DecRef();
+                    }
+                }
+                catch (Exception e)
+                {
+                    Operations.Set((int)-1L);
+                    Console.WriteLine(Thread.CurrentThread.Name + ": FAILED: unexpected exception");
+                    Console.WriteLine(e.StackTrace);
+                    throw new Exception(e.Message, e);
+                }
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestSumDocFreq.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestSumDocFreq.cs b/src/Lucene.Net.Tests/Index/TestSumDocFreq.cs
new file mode 100644
index 0000000..66f88c2
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestSumDocFreq.cs
@@ -0,0 +1,112 @@
+using System;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+    using Directory = Lucene.Net.Store.Directory;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Document = Documents.Document;
+    using Field = Field;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    /// <summary>
+    /// Tests <seealso cref="Terms#getSumDocFreq()"/>
+    /// @lucene.experimental
+    /// </summary>
+    [TestFixture]
+    public class TestSumDocFreq : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestSumDocFreq_Mem()
+        {
+            int numDocs = AtLeast(500);
+
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+
+            Document doc = new Document();
+            Field id = NewStringField("id", "", Field.Store.NO);
+            Field field1 = NewTextField("foo", "", Field.Store.NO);
+            Field field2 = NewTextField("bar", "", Field.Store.NO);
+            doc.Add(id);
+            doc.Add(field1);
+            doc.Add(field2);
+            for (int i = 0; i < numDocs; i++)
+            {
+                id.SetStringValue("" + i);
+                char ch1 = (char)TestUtil.NextInt(Random(), 'a', 'z');
+                char ch2 = (char)TestUtil.NextInt(Random(), 'a', 'z');
+                field1.SetStringValue("" + ch1 + " " + ch2);
+                ch1 = (char)TestUtil.NextInt(Random(), 'a', 'z');
+                ch2 = (char)TestUtil.NextInt(Random(), 'a', 'z');
+                field2.SetStringValue("" + ch1 + " " + ch2);
+                writer.AddDocument(doc);
+            }
+
+            IndexReader ir = writer.Reader;
+
+            AssertSumDocFreq(ir);
+            ir.Dispose();
+
+            int numDeletions = AtLeast(20);
+            for (int i = 0; i < numDeletions; i++)
+            {
+                writer.DeleteDocuments(new Term("id", "" + Random().Next(numDocs)));
+            }
+            writer.ForceMerge(1);
+            writer.Dispose();
+
+            ir = DirectoryReader.Open(dir);
+            AssertSumDocFreq(ir);
+            ir.Dispose();
+            dir.Dispose();
+        }
+
+        private void AssertSumDocFreq(IndexReader ir)
+        {
+            // compute sumDocFreq across all fields
+            Fields fields = MultiFields.GetFields(ir);
+
+            foreach (string f in fields)
+            {
+                Terms terms = fields.GetTerms(f);
+                long sumDocFreq = terms.SumDocFreq;
+                if (sumDocFreq == -1)
+                {
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("skipping field: " + f + ", codec does not support sumDocFreq");
+                    }
+                    continue;
+                }
+
+                long computedSumDocFreq = 0;
+                TermsEnum termsEnum = terms.GetIterator(null);
+                while (termsEnum.Next() != null)
+                {
+                    computedSumDocFreq += termsEnum.DocFreq;
+                }
+                Assert.AreEqual(computedSumDocFreq, sumDocFreq);
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestTaskMergeSchedulerExternal.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestTaskMergeSchedulerExternal.cs b/src/Lucene.Net.Tests/Index/TestTaskMergeSchedulerExternal.cs
new file mode 100644
index 0000000..2a38dae
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestTaskMergeSchedulerExternal.cs
@@ -0,0 +1,146 @@
+\ufeffusing Lucene.Net.Documents;
+using NUnit.Framework;
+using System;
+using System.IO;
+
+namespace Lucene.Net.Tests
+{
+    using Index;
+    using Util;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexWriter = Lucene.Net.Index.IndexWriter;
+    using IndexWriterConfig = Lucene.Net.Index.IndexWriterConfig;
+    using LogMergePolicy = Lucene.Net.Index.LogMergePolicy;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MergePolicy = Lucene.Net.Index.MergePolicy;
+    using MergeScheduler = Lucene.Net.Index.MergeScheduler;
+    using MergeTrigger = Lucene.Net.Index.MergeTrigger;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+    using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Holds tests cases to verify external APIs are accessible
+    /// while not being in Lucene.Net.Index package.
+    /// </summary>
+    public class TestTaskMergeSchedulerExternal : LuceneTestCase
+    {
+        internal volatile bool MergeCalled;
+        internal volatile bool ExcCalled;
+
+        private class MyMergeScheduler : TaskMergeScheduler
+        {
+            private readonly TestTaskMergeSchedulerExternal OuterInstance;
+
+            public MyMergeScheduler(TestTaskMergeSchedulerExternal outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            protected override void HandleMergeException(Exception t)
+            {
+                OuterInstance.ExcCalled = true;
+            }
+
+            public override void Merge(IndexWriter writer, MergeTrigger trigger, bool newMergesFound)
+            {
+                OuterInstance.MergeCalled = true;
+                base.Merge(writer, trigger, newMergesFound);
+            }
+        }
+
+        private class FailOnlyOnMerge : MockDirectoryWrapper.Failure
+        {
+            public override void Eval(MockDirectoryWrapper dir)
+            {
+                if (StackTraceHelper.DoesStackTraceContainMethod("DoMerge"))
+                {
+                    throw new IOException("now failing during merge");
+                }
+            }
+        }
+
+        [Test]
+        public void TestSubclassTaskMergeScheduler()
+        {
+            MockDirectoryWrapper dir = NewMockDirectory();
+            dir.FailOn(new FailOnlyOnMerge());
+
+            Document doc = new Document();
+            Field idField = NewStringField("id", "", Field.Store.YES);
+            doc.Add(idField);
+
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergeScheduler(new MyMergeScheduler(this)).SetMaxBufferedDocs(2).SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH).SetMergePolicy(NewLogMergePolicy()));
+            LogMergePolicy logMP = (LogMergePolicy)writer.Config.MergePolicy;
+            logMP.MergeFactor = 10;
+            for (int i = 0; i < 20; i++)
+            {
+                writer.AddDocument(doc);
+            }
+
+            ((MyMergeScheduler)writer.Config.MergeScheduler).Sync();
+            writer.Dispose();
+
+            Assert.IsTrue(MergeCalled);
+            dir.Dispose();
+        }
+
+        private class ReportingMergeScheduler : MergeScheduler
+        {
+            public override void Merge(IndexWriter writer, MergeTrigger trigger, bool newMergesFound)
+            {
+                MergePolicy.OneMerge merge = null;
+                while ((merge = writer.NextMerge()) != null)
+                {
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("executing merge " + merge.SegString(writer.Directory));
+                    }
+                    writer.Merge(merge);
+                }
+            }
+
+            protected override void Dispose(bool disposing)
+            {
+            }
+        }
+
+        [Test]
+        public void TestCustomMergeScheduler()
+        {
+            // we don't really need to execute anything, just to make sure the custom MS
+            // compiles. But ensure that it can be used as well, e.g., no other hidden
+            // dependencies or something. Therefore, don't use any random API !
+            Directory dir = new RAMDirectory();
+            IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, null);
+            conf.SetMergeScheduler(new ReportingMergeScheduler());
+            IndexWriter writer = new IndexWriter(dir, conf);
+            writer.AddDocument(new Document());
+            writer.Commit(); // trigger flush
+            writer.AddDocument(new Document());
+            writer.Commit(); // trigger flush
+            writer.ForceMerge(1);
+            writer.Dispose();
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestTerm.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestTerm.cs b/src/Lucene.Net.Tests/Index/TestTerm.cs
new file mode 100644
index 0000000..308dbba
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestTerm.cs
@@ -0,0 +1,42 @@
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    [TestFixture]
+    public class TestTerm : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestEquals()
+        {
+            Term @base = new Term("same", "same");
+            Term same = new Term("same", "same");
+            Term differentField = new Term("different", "same");
+            Term differentText = new Term("same", "different");
+            const string differentType = "AString";
+            Assert.AreEqual(@base, @base);
+            Assert.AreEqual(@base, same);
+            Assert.IsFalse(@base.Equals(differentField));
+            Assert.IsFalse(@base.Equals(differentText));
+            Assert.IsFalse(@base.Equals(differentType));
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestTermVectorsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestTermVectorsFormat.cs b/src/Lucene.Net.Tests/Index/TestTermVectorsFormat.cs
new file mode 100644
index 0000000..b189e41
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestTermVectorsFormat.cs
@@ -0,0 +1,119 @@
+using NUnit.Framework;
+
+namespace Lucene.Net.Index
+{
+    using System.Collections.Generic;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Codec = Lucene.Net.Codecs.Codec;
+    using Lucene3xCodec = Lucene.Net.Codecs.Lucene3x.Lucene3xCodec;
+
+    /// <summary>
+    /// Tests with the default randomized codec. Not really redundant with
+    /// other specific instantiations since we want to test some test-only impls
+    /// like Asserting, as well as make it easy to write a codec and pass -Dtests.codec
+    /// </summary>
+    [TestFixture]
+    public class TestTermVectorsFormat : BaseTermVectorsFormatTestCase
+    {
+        protected override Codec Codec
+        {
+            get
+            {
+                return Codec.Default;
+            }
+        }
+
+        protected override IEnumerable<Options> ValidOptions()
+        {
+#pragma warning disable 612, 618
+            if (Codec is Lucene3xCodec)
+#pragma warning restore 612, 618
+            {
+                // payloads are not supported on vectors in 3.x indexes
+                return ValidOptions(Options.NONE, Options.POSITIONS_AND_OFFSETS);
+            }
+            else
+            {
+                return base.ValidOptions();
+            }
+        }
+
+        [Test]
+        public override void TestMergeStability()
+        {
+            AssumeTrue("The MockRandom PF randomizes content on the fly, so we can't check it", false);
+        }
+
+
+
+        #region BaseTermVectorsFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        // only one doc with vectors
+        public override void TestRareVectors()
+        {
+            base.TestRareVectors();
+        }
+
+        [Test]
+        public override void TestHighFreqs()
+        {
+            base.TestHighFreqs();
+        }
+
+        [Test]
+        public override void TestLotsOfFields()
+        {
+            base.TestLotsOfFields();
+        }
+
+        [Test]
+        // different options for the same field
+        public override void TestMixedOptions()
+        {
+            base.TestMixedOptions();
+        }
+
+        [Test]
+        public override void TestRandom()
+        {
+            base.TestRandom();
+        }
+
+        [Test]
+        public override void TestMerge()
+        {
+            base.TestMerge();
+        }
+
+        [Test]
+        // run random tests from different threads to make sure the per-thread clones
+        // don't share mutable data
+        public override void TestClone()
+        {
+            base.TestClone();
+        }
+
+        #endregion
+    }
+}
\ No newline at end of file


[51/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
Lucene.Net.Tests: Removed \core directory and put its contents in root directory


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/96822396
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/96822396
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/96822396

Branch: refs/heads/api-work
Commit: 968223960ecc4f9283c3ae251d623be13af9b06b
Parents: a1fb326
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 26 02:34:21 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Feb 27 06:17:54 2017 +0700

----------------------------------------------------------------------
 .../Analysis/TestCachingTokenFilter.cs          |  138 +
 src/Lucene.Net.Tests/Analysis/TestCharFilter.cs |   94 +
 .../Analysis/TestGraphTokenizers.cs             |  728 +++++
 .../Analysis/TestLookaheadTokenFilter.cs        |  129 +
 .../Analysis/TestMockAnalyzer.cs                |  420 +++
 .../Analysis/TestMockCharFilter.cs              |   59 +
 .../Analysis/TestNumericTokenStream.cs          |  144 +
 src/Lucene.Net.Tests/Analysis/TestPosition.cs   |   27 +
 .../Analysis/TestReusableStringReader.cs        |   71 +
 src/Lucene.Net.Tests/Analysis/TestToken.cs      |  305 ++
 .../TestCharTermAttributeImpl.cs                |  485 +++
 .../TokenAttributes/TestSimpleAttributeImpl.cs  |   66 +
 .../Analysis/TrivialLookaheadFilter.cs          |  109 +
 src/Lucene.Net.Tests/App.config                 |   47 +
 src/Lucene.Net.Tests/AssemblyInfo.cs            |   87 +
 .../Compressing/AbstractTestCompressionMode.cs  |  180 ++
 .../AbstractTestLZ4CompressionMode.cs           |  129 +
 .../TestCompressingStoredFieldsFormat.cs        |  200 ++
 .../TestCompressingTermVectorsFormat.cs         |  164 +
 .../Compressing/TestFastCompressionMode.cs      |  113 +
 .../Compressing/TestFastDecompressionMode.cs    |  123 +
 .../Compressing/TestHighCompressionMode.cs      |   82 +
 .../Codecs/Lucene3x/TestImpersonation.cs        |   39 +
 .../Lucene3x/TestLucene3xPostingsFormat.cs      |  109 +
 .../Lucene3x/TestLucene3xStoredFieldsFormat.cs  |  145 +
 .../Lucene3x/TestLucene3xTermVectorsFormat.cs   |  122 +
 .../Codecs/Lucene3x/TestSurrogates.cs           |  424 +++
 .../Codecs/Lucene3x/TestTermInfosReaderIndex.cs |  235 ++
 .../Codecs/Lucene40/TestBitVector.cs            |  325 ++
 .../Lucene40/TestLucene40DocValuesFormat.cs     |  555 ++++
 .../Lucene40/TestLucene40PostingsFormat.cs      |  111 +
 .../Lucene40/TestLucene40PostingsReader.cs      |  166 +
 .../Lucene40/TestLucene40StoredFieldsFormat.cs  |  147 +
 .../Lucene40/TestLucene40TermVectorsFormat.cs   |  117 +
 .../Codecs/Lucene40/TestReuseDocsEnum.cs        |  221 ++
 .../Codecs/Lucene41/TestBlockPostingsFormat.cs  |  103 +
 .../Codecs/Lucene41/TestBlockPostingsFormat2.cs |  166 +
 .../Codecs/Lucene41/TestBlockPostingsFormat3.cs |  571 ++++
 .../Codecs/Lucene41/TestForUtil.cs              |   97 +
 .../Lucene41/TestLucene41StoredFieldsFormat.cs  |  146 +
 .../Lucene42/TestLucene42DocValuesFormat.cs     |  581 ++++
 .../Lucene45/TestLucene45DocValuesFormat.cs     |  565 ++++
 .../PerField/TestPerFieldDocValuesFormat.cs     |  658 ++++
 .../PerField/TestPerFieldPostingsFormat.cs      |  100 +
 .../PerField/TestPerFieldPostingsFormat2.cs     |  372 +++
 .../Document/TestBinaryDocument.cs              |  122 +
 src/Lucene.Net.Tests/Document/TestDateTools.cs  |  244 ++
 src/Lucene.Net.Tests/Document/TestDocument.cs   |  454 +++
 src/Lucene.Net.Tests/Document/TestField.cs      |  617 ++++
 src/Lucene.Net.Tests/Index/BinaryTokenStream.cs |  101 +
 src/Lucene.Net.Tests/Index/SynchronizedList.cs  |  168 +
 .../Index/Test2BBinaryDocValues.cs              |  171 +
 src/Lucene.Net.Tests/Index/Test2BDocs.cs        |  105 +
 .../Index/Test2BNumericDocValues.cs             |   89 +
 src/Lucene.Net.Tests/Index/Test2BPositions.cs   |  123 +
 src/Lucene.Net.Tests/Index/Test2BPostings.cs    |  125 +
 .../Index/Test2BPostingsBytes.cs                |  167 +
 .../Index/Test2BSortedDocValues.cs              |  168 +
 src/Lucene.Net.Tests/Index/Test2BTerms.cs       |  317 ++
 .../Index/Test4GBStoredFields.cs                |  123 +
 src/Lucene.Net.Tests/Index/TestAddIndexes.cs    | 1396 ++++++++
 .../Index/TestAllFilesHaveChecksumFooter.cs     |  114 +
 .../Index/TestAllFilesHaveCodecHeader.cs        |  119 +
 src/Lucene.Net.Tests/Index/TestAtomicUpdate.cs  |  218 ++
 .../Index/TestBackwardsCompatibility.cs         | 1079 +++++++
 .../Index/TestBackwardsCompatibility3x.cs       | 1053 +++++++
 .../Index/TestBagOfPositions.cs                 |  210 ++
 src/Lucene.Net.Tests/Index/TestBagOfPostings.cs |  193 ++
 .../Index/TestBinaryDocValuesUpdates.cs         | 1786 +++++++++++
 src/Lucene.Net.Tests/Index/TestBinaryTerms.cs   |   88 +
 src/Lucene.Net.Tests/Index/TestByteSlices.cs    |  141 +
 src/Lucene.Net.Tests/Index/TestCheckIndex.cs    |  129 +
 .../Index/TestCodecHoldsOpenFiles.cs            |  116 +
 src/Lucene.Net.Tests/Index/TestCodecs.cs        |  933 ++++++
 src/Lucene.Net.Tests/Index/TestCompoundFile.cs  |  917 ++++++
 .../Index/TestConcurrentMergeScheduler.cs       |  438 +++
 .../Index/TestConsistentFieldNumbers.cs         |  421 +++
 src/Lucene.Net.Tests/Index/TestCrash.cs         |  229 ++
 .../Index/TestCrashCausesCorruptIndex.cs        |  201 ++
 src/Lucene.Net.Tests/Index/TestCustomNorms.cs   |  144 +
 .../Index/TestDeletionPolicy.cs                 |  803 +++++
 .../Index/TestDirectoryReader.cs                | 1341 ++++++++
 .../Index/TestDirectoryReaderReopen.cs          |  785 +++++
 src/Lucene.Net.Tests/Index/TestDoc.cs           |  277 ++
 src/Lucene.Net.Tests/Index/TestDocCount.cs      |  101 +
 .../Index/TestDocInverterPerFieldErrorInfo.cs   |  148 +
 src/Lucene.Net.Tests/Index/TestDocTermOrds.cs   |  541 ++++
 .../Index/TestDocValuesFormat.cs                |  546 ++++
 .../Index/TestDocValuesIndexing.cs              |  982 ++++++
 .../Index/TestDocValuesWithThreads.cs           |  311 ++
 .../Index/TestDocsAndPositions.cs               |  430 +++
 .../Index/TestDocumentWriter.cs                 |  409 +++
 .../Index/TestDocumentsWriterDeleteQueue.cs     |  302 ++
 .../Index/TestDocumentsWriterStallControl.cs    |  473 +++
 src/Lucene.Net.Tests/Index/TestDuelingCodecs.cs |  183 ++
 .../Index/TestExceedMaxTermLength.cs            |  108 +
 src/Lucene.Net.Tests/Index/TestFieldInfos.cs    |  126 +
 src/Lucene.Net.Tests/Index/TestFieldsReader.cs  |  286 ++
 .../Index/TestFilterAtomicReader.cs             |  224 ++
 src/Lucene.Net.Tests/Index/TestFlex.cs          |  100 +
 .../Index/TestFlushByRamOrCountsPolicy.cs       |  477 +++
 .../Index/TestForTooMuchCloning.cs              |   86 +
 .../Index/TestForceMergeForever.cs              |  144 +
 src/Lucene.Net.Tests/Index/TestIndexCommit.cs   |  191 ++
 .../Index/TestIndexFileDeleter.cs               |  218 ++
 src/Lucene.Net.Tests/Index/TestIndexInput.cs    |  186 ++
 .../Index/TestIndexReaderClose.cs               |  155 +
 src/Lucene.Net.Tests/Index/TestIndexWriter.cs   | 2888 +++++++++++++++++
 .../Index/TestIndexWriterCommit.cs              |  772 +++++
 .../Index/TestIndexWriterConfig.cs              |  539 ++++
 .../Index/TestIndexWriterDelete.cs              | 1447 +++++++++
 .../Index/TestIndexWriterExceptions.cs          | 2584 +++++++++++++++
 .../Index/TestIndexWriterForceMerge.cs          |  260 ++
 .../Index/TestIndexWriterLockRelease.cs         |   64 +
 .../Index/TestIndexWriterMergePolicy.cs         |  311 ++
 .../Index/TestIndexWriterMerging.cs             |  488 +++
 .../Index/TestIndexWriterNRTIsCurrent.cs        |  260 ++
 .../Index/TestIndexWriterOnDiskFull.cs          |  703 +++++
 .../Index/TestIndexWriterOnJRECrash.cs          |  281 ++
 .../TestIndexWriterOutOfFileDescriptors.cs      |  196 ++
 .../Index/TestIndexWriterReader.cs              | 1419 +++++++++
 .../Index/TestIndexWriterUnicode.cs             |  390 +++
 .../Index/TestIndexWriterWithThreads.cs         |  796 +++++
 .../Index/TestIndexableField.cs                 |  453 +++
 src/Lucene.Net.Tests/Index/TestIntBlockPool.cs  |  185 ++
 src/Lucene.Net.Tests/Index/TestIsCurrent.cs     |  109 +
 .../Index/TestLazyProxSkipping.cs               |  258 ++
 .../Index/TestLogMergePolicy.cs                 |   42 +
 src/Lucene.Net.Tests/Index/TestLongPostings.cs  |  570 ++++
 .../Index/TestMaxTermFrequency.cs               |  171 +
 src/Lucene.Net.Tests/Index/TestMixedCodecs.cs   |  107 +
 .../Index/TestMixedDocValuesUpdates.cs          |  576 ++++
 .../Index/TestMultiDocValues.cs                 |  439 +++
 src/Lucene.Net.Tests/Index/TestMultiFields.cs   |  228 ++
 .../Index/TestMultiLevelSkipList.cs             |  218 ++
 .../Index/TestNRTReaderWithThreads.cs           |  140 +
 src/Lucene.Net.Tests/Index/TestNRTThreads.cs    |  185 ++
 src/Lucene.Net.Tests/Index/TestNeverDelete.cs   |  158 +
 src/Lucene.Net.Tests/Index/TestNewestSegment.cs |   39 +
 .../Index/TestNoDeletionPolicy.cs               |   95 +
 src/Lucene.Net.Tests/Index/TestNoMergePolicy.cs |   83 +
 .../Index/TestNoMergeScheduler.cs               |   72 +
 src/Lucene.Net.Tests/Index/TestNorms.cs         |  252 ++
 .../Index/TestNumericDocValuesUpdates.cs        | 1698 ++++++++++
 src/Lucene.Net.Tests/Index/TestOmitNorms.cs     |  331 ++
 src/Lucene.Net.Tests/Index/TestOmitPositions.cs |  294 ++
 src/Lucene.Net.Tests/Index/TestOmitTf.cs        |  588 ++++
 .../Index/TestParallelAtomicReader.cs           |  357 +++
 .../Index/TestParallelCompositeReader.cs        |  666 ++++
 .../Index/TestParallelReaderEmptyIndex.cs       |  162 +
 .../Index/TestParallelTermEnum.cs               |  127 +
 src/Lucene.Net.Tests/Index/TestPayloads.cs      |  738 +++++
 .../Index/TestPayloadsOnVectors.cs              |  165 +
 .../Index/TestPerSegmentDeletes.cs              |  318 ++
 .../TestPersistentSnapshotDeletionPolicy.cs     |  260 ++
 .../Index/TestPostingsFormat.cs                 |   95 +
 .../Index/TestPostingsOffsets.cs                |  580 ++++
 .../Index/TestPrefixCodedTerms.cs               |  142 +
 src/Lucene.Net.Tests/Index/TestReaderClosed.cs  |  118 +
 src/Lucene.Net.Tests/Index/TestRollback.cs      |   67 +
 .../Index/TestRollingUpdates.cs                 |  285 ++
 .../Index/TestSameTokenSamePosition.cs          |  110 +
 src/Lucene.Net.Tests/Index/TestSegmentMerger.cs |  207 ++
 src/Lucene.Net.Tests/Index/TestSegmentReader.cs |  277 ++
 .../Index/TestSegmentTermDocs.cs                |  274 ++
 .../Index/TestSegmentTermEnum.cs                |  152 +
 .../Index/TestSizeBoundedForceMerge.cs          |  403 +++
 .../Index/TestSnapshotDeletionPolicy.cs         |  527 ++++
 .../Index/TestStoredFieldsFormat.cs             |  141 +
 src/Lucene.Net.Tests/Index/TestStressAdvance.cs |  173 +
 .../Index/TestStressIndexing.cs                 |  237 ++
 .../Index/TestStressIndexing2.cs                | 1064 +++++++
 src/Lucene.Net.Tests/Index/TestStressNRT.cs     |  530 ++++
 src/Lucene.Net.Tests/Index/TestSumDocFreq.cs    |  112 +
 .../Index/TestTaskMergeSchedulerExternal.cs     |  146 +
 src/Lucene.Net.Tests/Index/TestTerm.cs          |   42 +
 .../Index/TestTermVectorsFormat.cs              |  119 +
 .../Index/TestTermVectorsReader.cs              |  477 +++
 .../Index/TestTermVectorsWriter.cs              |  601 ++++
 src/Lucene.Net.Tests/Index/TestTermdocPerf.cs   |  176 ++
 src/Lucene.Net.Tests/Index/TestTermsEnum.cs     | 1050 +++++++
 src/Lucene.Net.Tests/Index/TestTermsEnum2.cs    |  204 ++
 .../Index/TestThreadedForceMerge.cs             |  183 ++
 .../Index/TestTieredMergePolicy.cs              |  297 ++
 .../Index/TestTransactionRollback.cs            |  271 ++
 src/Lucene.Net.Tests/Index/TestTransactions.cs  |  336 ++
 src/Lucene.Net.Tests/Index/TestTryDelete.cs     |  196 ++
 .../Index/TestTwoPhaseCommitTool.cs             |  178 ++
 .../Index/TestUniqueTermCount.cs                |  122 +
 .../Index/bogus24.upgraded.to.36.zip            |  Bin 0 -> 2400 bytes
 src/Lucene.Net.Tests/Index/index.30.cfs.zip     |  Bin 0 -> 4786 bytes
 src/Lucene.Net.Tests/Index/index.30.nocfs.zip   |  Bin 0 -> 8953 bytes
 src/Lucene.Net.Tests/Index/index.31.cfs.zip     |  Bin 0 -> 5158 bytes
 src/Lucene.Net.Tests/Index/index.31.nocfs.zip   |  Bin 0 -> 12119 bytes
 .../Index/index.31.optimized.cfs.zip            |  Bin 0 -> 2174 bytes
 .../Index/index.31.optimized.nocfs.zip          |  Bin 0 -> 3638 bytes
 src/Lucene.Net.Tests/Index/index.32.cfs.zip     |  Bin 0 -> 5184 bytes
 src/Lucene.Net.Tests/Index/index.32.nocfs.zip   |  Bin 0 -> 7603 bytes
 src/Lucene.Net.Tests/Index/index.34.cfs.zip     |  Bin 0 -> 5203 bytes
 src/Lucene.Net.Tests/Index/index.34.nocfs.zip   |  Bin 0 -> 12145 bytes
 .../Index/index.36.surrogates.zip               |  Bin 0 -> 40680 bytes
 src/Lucene.Net.Tests/Index/index.40.cfs.zip     |  Bin 0 -> 15601 bytes
 src/Lucene.Net.Tests/Index/index.40.nocfs.zip   |  Bin 0 -> 22143 bytes
 .../Index/index.40.optimized.cfs.zip            |  Bin 0 -> 4359 bytes
 .../Index/index.40.optimized.nocfs.zip          |  Bin 0 -> 6434 bytes
 src/Lucene.Net.Tests/Index/index.41.cfs.zip     |  Bin 0 -> 12871 bytes
 src/Lucene.Net.Tests/Index/index.41.nocfs.zip   |  Bin 0 -> 23246 bytes
 src/Lucene.Net.Tests/Index/index.42.cfs.zip     |  Bin 0 -> 12516 bytes
 src/Lucene.Net.Tests/Index/index.42.nocfs.zip   |  Bin 0 -> 22064 bytes
 src/Lucene.Net.Tests/Index/index.45.cfs.zip     |  Bin 0 -> 12369 bytes
 src/Lucene.Net.Tests/Index/index.45.nocfs.zip   |  Bin 0 -> 21793 bytes
 src/Lucene.Net.Tests/Index/index.461.cfs.zip    |  Bin 0 -> 12329 bytes
 src/Lucene.Net.Tests/Index/index.461.nocfs.zip  |  Bin 0 -> 21780 bytes
 src/Lucene.Net.Tests/Index/moreterms.40.zip     |  Bin 0 -> 78661 bytes
 .../Index/unsupported.19.cfs.zip                |  Bin 0 -> 2747 bytes
 .../Index/unsupported.19.nocfs.zip              |  Bin 0 -> 8528 bytes
 .../Index/unsupported.20.cfs.zip                |  Bin 0 -> 2747 bytes
 .../Index/unsupported.20.nocfs.zip              |  Bin 0 -> 8528 bytes
 .../Index/unsupported.21.cfs.zip                |  Bin 0 -> 2784 bytes
 .../Index/unsupported.21.nocfs.zip              |  Bin 0 -> 7705 bytes
 .../Index/unsupported.22.cfs.zip                |  Bin 0 -> 1913 bytes
 .../Index/unsupported.22.nocfs.zip              |  Bin 0 -> 5226 bytes
 .../Index/unsupported.23.cfs.zip                |  Bin 0 -> 2091 bytes
 .../Index/unsupported.23.nocfs.zip              |  Bin 0 -> 3375 bytes
 .../Index/unsupported.24.cfs.zip                |  Bin 0 -> 3654 bytes
 .../Index/unsupported.24.nocfs.zip              |  Bin 0 -> 7254 bytes
 .../Index/unsupported.29.cfs.zip                |  Bin 0 -> 4531 bytes
 .../Index/unsupported.29.nocfs.zip              |  Bin 0 -> 8733 bytes
 src/Lucene.Net.Tests/Lucene.Net.Tests.csproj    |  984 +++---
 src/Lucene.Net.Tests/Lucene.Net.snk             |  Bin 0 -> 596 bytes
 src/Lucene.Net.Tests/RectangularArrays.cs       |   52 +
 .../Search/BaseTestRangeFilter.cs               |  211 ++
 .../Search/FuzzyTermOnShortTermsTest.cs         |  118 +
 .../Search/JustCompileSearch.cs                 |  358 +++
 src/Lucene.Net.Tests/Search/MockFilter.cs       |   44 +
 .../Search/MultiCollectorTest.cs                |  118 +
 .../Search/Payloads/PayloadHelper.cs            |  158 +
 .../Search/Payloads/TestPayloadExplanations.cs  |  117 +
 .../Search/Payloads/TestPayloadNearQuery.cs     |  392 +++
 .../Search/Payloads/TestPayloadTermQuery.cs     |  367 +++
 .../Search/Similarities/TestSimilarity2.cs      |  275 ++
 .../Search/Similarities/TestSimilarityBase.cs   |  651 ++++
 .../Search/SingleDocTestFilter.cs               |   44 +
 .../Search/Spans/JustCompileSearchSpans.cs      |  165 +
 .../Search/Spans/MultiSpansWrapper.cs           |  215 ++
 src/Lucene.Net.Tests/Search/Spans/TestBasics.cs |  626 ++++
 .../Search/Spans/TestFieldMaskingSpanQuery.cs   |  326 ++
 .../Search/Spans/TestNearSpansOrdered.cs        |  203 ++
 .../Search/Spans/TestPayloadSpans.cs            |  589 ++++
 .../Search/Spans/TestSpanExplanations.cs        |  260 ++
 .../Spans/TestSpanExplanationsOfNonMatches.cs   |  251 ++
 .../Search/Spans/TestSpanFirstQuery.cs          |   74 +
 .../Spans/TestSpanMultiTermQueryWrapper.cs      |  245 ++
 .../Search/Spans/TestSpanSearchEquivalence.cs   |  134 +
 src/Lucene.Net.Tests/Search/Spans/TestSpans.cs  |  571 ++++
 .../Search/Spans/TestSpansAdvanced.cs           |  181 ++
 .../Search/Spans/TestSpansAdvanced2.cs          |  124 +
 .../Search/TestAutomatonQuery.cs                |  278 ++
 .../Search/TestAutomatonQueryUnicode.cs         |  139 +
 src/Lucene.Net.Tests/Search/TestBoolean2.cs     |  423 +++
 .../Search/TestBooleanMinShouldMatch.cs         |  527 ++++
 src/Lucene.Net.Tests/Search/TestBooleanOr.cs    |  253 ++
 src/Lucene.Net.Tests/Search/TestBooleanQuery.cs |  412 +++
 .../Search/TestBooleanQueryVisitSubscorers.cs   |  206 ++
 .../Search/TestBooleanScorer.cs                 |  330 ++
 .../Search/TestCachingCollector.cs              |  252 ++
 .../Search/TestCachingWrapperFilter.cs          |  513 +++
 .../Search/TestComplexExplanations.cs           |  389 +++
 .../TestComplexExplanationsOfNonMatches.cs      |  197 ++
 src/Lucene.Net.Tests/Search/TestConjunctions.cs |  161 +
 .../Search/TestConstantScoreQuery.cs            |  241 ++
 .../TestControlledRealTimeReopenThread.cs       |  731 +++++
 .../Search/TestCustomSearcherSort.cs            |  262 ++
 src/Lucene.Net.Tests/Search/TestDateFilter.cs   |  165 +
 src/Lucene.Net.Tests/Search/TestDateSort.cs     |  125 +
 .../Search/TestDisjunctionMaxQuery.cs           |  570 ++++
 src/Lucene.Net.Tests/Search/TestDocBoost.cs     |  122 +
 src/Lucene.Net.Tests/Search/TestDocIdSet.cs     |  254 ++
 .../Search/TestDocTermOrdsRangeFilter.cs        |  149 +
 .../Search/TestDocTermOrdsRewriteMethod.cs      |  164 +
 .../Search/TestDocValuesScoring.cs              |  233 ++
 .../Search/TestEarlyTermination.cs              |  124 +
 .../Search/TestElevationComparator.cs           |  240 ++
 src/Lucene.Net.Tests/Search/TestExplanations.cs |  270 ++
 src/Lucene.Net.Tests/Search/TestFieldCache.cs   | 1058 +++++++
 .../Search/TestFieldCacheRangeFilter.cs         |  613 ++++
 .../Search/TestFieldCacheRewriteMethod.cs       |   86 +
 .../Search/TestFieldCacheTermsFilter.cs         |   80 +
 .../Search/TestFieldValueFilter.cs              |  127 +
 .../Search/TestFilteredQuery.cs                 |  719 +++++
 .../Search/TestFilteredSearch.cs                |  112 +
 src/Lucene.Net.Tests/Search/TestFuzzyQuery.cs   |  389 +++
 .../Search/TestIndexSearcher.cs                 |  145 +
 .../Search/TestLiveFieldValues.cs               |  245 ++
 .../Search/TestMatchAllDocsQuery.cs             |  115 +
 .../Search/TestMinShouldMatch2.cs               |  431 +++
 .../Search/TestMultiPhraseQuery.cs              |  631 ++++
 .../Search/TestMultiTermConstantScore.cs        |  563 ++++
 .../Search/TestMultiTermQueryRewrites.cs        |  305 ++
 .../Search/TestMultiThreadTermVectors.cs        |  244 ++
 .../Search/TestMultiValuedNumericRangeQuery.cs  |   88 +
 .../Search/TestNGramPhraseQuery.cs              |  113 +
 src/Lucene.Net.Tests/Search/TestNot.cs          |   65 +
 .../Search/TestNumericRangeQuery32.cs           |  705 +++++
 .../Search/TestNumericRangeQuery64.cs           |  752 +++++
 .../Search/TestPhrasePrefixQuery.cs             |  108 +
 src/Lucene.Net.Tests/Search/TestPhraseQuery.cs  |  758 +++++
 .../Search/TestPositionIncrement.cs             |  323 ++
 .../Search/TestPositiveScoresOnlyCollector.cs   |  118 +
 src/Lucene.Net.Tests/Search/TestPrefixFilter.cs |  114 +
 .../Search/TestPrefixInBooleanQuery.cs          |  127 +
 src/Lucene.Net.Tests/Search/TestPrefixQuery.cs  |   75 +
 src/Lucene.Net.Tests/Search/TestPrefixRandom.cs |  158 +
 .../Search/TestQueryRescorer.cs                 |  610 ++++
 .../Search/TestQueryWrapperFilter.cs            |  166 +
 src/Lucene.Net.Tests/Search/TestRegexpQuery.cs  |  161 +
 src/Lucene.Net.Tests/Search/TestRegexpRandom.cs |  156 +
 .../Search/TestRegexpRandom2.cs                 |  194 ++
 .../Search/TestSameScoresWithThreads.cs         |  158 +
 .../Search/TestScoreCachingWrappingScorer.cs    |  150 +
 src/Lucene.Net.Tests/Search/TestScorerPerf.cs   |  493 +++
 src/Lucene.Net.Tests/Search/TestSearchAfter.cs  |  370 +++
 .../Search/TestSearchWithThreads.cs             |  163 +
 .../Search/TestSearcherManager.cs               |  648 ++++
 .../Search/TestShardSearching.cs                |  488 +++
 src/Lucene.Net.Tests/Search/TestSimilarity.cs   |  258 ++
 .../Search/TestSimilarityProvider.cs            |  241 ++
 .../Search/TestSimpleExplanations.cs            |  950 ++++++
 .../TestSimpleExplanationsOfNonMatches.cs       |  489 +++
 .../Search/TestSimpleSearchEquivalence.cs       |  230 ++
 .../Search/TestSloppyPhraseQuery.cs             |  365 +++
 .../Search/TestSloppyPhraseQuery2.cs            |  247 ++
 src/Lucene.Net.Tests/Search/TestSort.cs         | 1984 ++++++++++++
 .../Search/TestSortDocValues.cs                 | 1036 ++++++
 src/Lucene.Net.Tests/Search/TestSortRandom.cs   |  365 +++
 src/Lucene.Net.Tests/Search/TestSortRescorer.cs |  220 ++
 .../Search/TestSubScorerFreqs.cs                |  230 ++
 .../Search/TestTermRangeFilter.cs               |  195 ++
 .../Search/TestTermRangeQuery.cs                |  375 +++
 src/Lucene.Net.Tests/Search/TestTermScorer.cs   |  203 ++
 src/Lucene.Net.Tests/Search/TestTermVectors.cs  |  274 ++
 .../Search/TestTimeLimitingCollector.cs         |  442 +++
 .../Search/TestTopDocsCollector.cs              |  232 ++
 src/Lucene.Net.Tests/Search/TestTopDocsMerge.cs |  361 +++
 .../Search/TestTopFieldCollector.cs             |  253 ++
 .../Search/TestTopScoreDocCollector.cs          |   72 +
 .../Search/TestTotalHitCountCollector.cs        |   58 +
 src/Lucene.Net.Tests/Search/TestWildcard.cs     |  375 +++
 .../Search/TestWildcardRandom.cs                |  159 +
 .../Store/TestBufferedChecksum.cs               |   79 +
 .../Store/TestBufferedIndexInput.cs             |  401 +++
 .../Store/TestByteArrayDataInput.cs             |   39 +
 src/Lucene.Net.Tests/Store/TestCopyBytes.cs     |  200 ++
 src/Lucene.Net.Tests/Store/TestDirectory.cs     |  415 +++
 .../Store/TestFileSwitchDirectory.cs            |  186 ++
 .../Store/TestFilterDirectory.cs                |   52 +
 src/Lucene.Net.Tests/Store/TestHugeRamFile.cs   |  128 +
 src/Lucene.Net.Tests/Store/TestLock.cs          |   78 +
 src/Lucene.Net.Tests/Store/TestLockFactory.cs   |  510 +++
 .../Store/TestMockDirectoryWrapper.cs           |  117 +
 src/Lucene.Net.Tests/Store/TestMultiMMap.cs     |  438 +++
 .../Store/TestNRTCachingDirectory.cs            |  217 ++
 src/Lucene.Net.Tests/Store/TestRAMDirectory.cs  |  231 ++
 src/Lucene.Net.Tests/Store/TestRateLimiter.cs   |   49 +
 src/Lucene.Net.Tests/Store/TestWindowsMMap.cs   |  125 +
 .../Support/BaseBufferTestCase.cs               |  135 +
 src/Lucene.Net.Tests/Support/BigObject.cs       |   35 +
 src/Lucene.Net.Tests/Support/C5/ArrayBase.cs    |  485 +++
 src/Lucene.Net.Tests/Support/C5/ArrayList.cs    | 2252 +++++++++++++
 .../Support/C5/DropMultiplicity.cs              |   17 +
 src/Lucene.Net.Tests/Support/C5/Events.cs       |  893 ++++++
 .../Support/C5/GenericCollectionTester.cs       |   88 +
 src/Lucene.Net.Tests/Support/C5/HashBag.cs      |  660 ++++
 src/Lucene.Net.Tests/Support/C5/Sorting.cs      |  239 ++
 .../Support/C5/SupportClasses.cs                |  506 +++
 src/Lucene.Net.Tests/Support/C5/WeakViewList.cs |  105 +
 src/Lucene.Net.Tests/Support/C5/Wrappers.cs     | 2364 ++++++++++++++
 src/Lucene.Net.Tests/Support/CollisionTester.cs |   50 +
 src/Lucene.Net.Tests/Support/ReadFully.txt      |   14 +
 src/Lucene.Net.Tests/Support/SmallObject.cs     |   33 +
 src/Lucene.Net.Tests/Support/TestByteBuffer.cs  |  739 +++++
 src/Lucene.Net.Tests/Support/TestCase.cs        |   54 +
 .../Support/TestCloseableThreadLocal.cs         |  108 +
 .../Support/TestDataInputStream.cs              |  231 ++
 .../Support/TestDataOutputStream.cs             |  173 +
 .../Support/TestEquatableList.cs                |  167 +
 .../Support/TestExceptionSerialization.cs       |  102 +
 src/Lucene.Net.Tests/Support/TestHashMap.cs     |  214 ++
 src/Lucene.Net.Tests/Support/TestIDisposable.cs |   67 +
 src/Lucene.Net.Tests/Support/TestLRUCache.cs    |   47 +
 .../Support/TestLinkedHashMap.cs                |  439 +++
 src/Lucene.Net.Tests/Support/TestLongBuffer.cs  |  523 +++
 src/Lucene.Net.Tests/Support/TestLurchTable.cs  | 1132 +++++++
 .../Support/TestLurchTableThreading.cs          |  250 ++
 src/Lucene.Net.Tests/Support/TestOSClass.cs     |   48 +
 src/Lucene.Net.Tests/Support/TestOldPatches.cs  |  292 ++
 .../Support/TestSafeTextWriterWrapper.cs        |   47 +
 .../Support/TestSerialization.cs                |  102 +
 .../Support/TestStringBuilderExtensions.cs      |   72 +
 .../Support/TestSupportClass.cs                 |   86 +
 src/Lucene.Net.Tests/Support/TestThreadClass.cs |   59 +
 .../Support/TestToStringUtils.cs                |   71 +
 .../Support/TestTreeDictionary.cs               |  522 +++
 src/Lucene.Net.Tests/Support/TestTreeSet.cs     | 2970 ++++++++++++++++++
 .../Support/TestWeakDictionary.cs               |  148 +
 .../Support/TestWeakDictionaryBehavior.cs       |  291 ++
 .../Support/TestWeakDictionaryPerformance.cs    |  134 +
 src/Lucene.Net.Tests/SupportClassException.cs   |   47 +
 src/Lucene.Net.Tests/Test.nunit                 |   22 +
 src/Lucene.Net.Tests/TestApiConsistency.cs      |  127 +
 src/Lucene.Net.Tests/TestAssertions.cs          |   78 +
 src/Lucene.Net.Tests/TestDemo.cs                |   89 +
 src/Lucene.Net.Tests/TestExternalCodecs.cs      |  154 +
 .../TestMergeSchedulerExternal.cs               |  177 ++
 src/Lucene.Net.Tests/TestSearch.cs              |  206 ++
 src/Lucene.Net.Tests/TestSearchForDuplicates.cs |  157 +
 .../TestWorstCaseTestBehavior.cs                |  150 +
 .../Util/Automaton/TestBasicOperations.cs       |  180 ++
 .../Util/Automaton/TestCompiledAutomaton.cs     |  147 +
 .../Util/Automaton/TestDeterminism.cs           |   92 +
 .../Util/Automaton/TestDeterminizeLexicon.cs    |   72 +
 .../Util/Automaton/TestLevenshteinAutomata.cs   |  435 +++
 .../Util/Automaton/TestMinimize.cs              |   72 +
 .../Util/Automaton/TestSpecialOperations.cs     |   61 +
 .../Util/Automaton/TestUTF32ToUTF8.cs           |  287 ++
 src/Lucene.Net.Tests/Util/BaseSortTestCase.cs   |  210 ++
 .../Util/Cache/TestSimpleLRUCache.cs            |   77 +
 src/Lucene.Net.Tests/Util/Fst/Test2BFST.cs      |  349 ++
 src/Lucene.Net.Tests/Util/Fst/TestBytesStore.cs |  433 +++
 src/Lucene.Net.Tests/Util/Fst/TestFSTs.cs       | 1904 +++++++++++
 .../Util/JunitCompat/SorePoint.cs               |   36 +
 .../Util/JunitCompat/SoreType.cs                |   28 +
 .../JunitCompat/TestBeforeAfterOverrides.cs     |  103 +
 .../Util/JunitCompat/TestCodecReported.cs       |   56 +
 .../TestExceptionInBeforeClassHooks.cs          |  215 ++
 .../JunitCompat/TestFailIfDirectoryNotClosed.cs |   64 +
 .../JunitCompat/TestFailIfUnreferencedFiles.cs  |   80 +
 .../JunitCompat/TestFailOnFieldCacheInsanity.cs |   88 +
 .../Util/JunitCompat/TestGroupFiltering.cs      |   85 +
 .../Util/JunitCompat/TestJUnitRuleOrder.cs      |  132 +
 .../JunitCompat/TestLeaveFilesIfTestFails.cs    |   87 +
 .../Util/JunitCompat/TestReproduceMessage.cs    |  389 +++
 .../TestReproduceMessageWithRepeated.cs         |   64 +
 .../TestSameRandomnessLocalePassedOrNot.cs      |   91 +
 .../Util/JunitCompat/TestSeedFromUncaught.cs    |   85 +
 .../JunitCompat/TestSetupTeardownChaining.cs    |   96 +
 .../TestSystemPropertiesInvariantRule.cs        |  196 ++
 .../Util/JunitCompat/WithNestedTests.cs         |  198 ++
 .../Util/Packed/TestEliasFanoDocIdSet.cs        |  124 +
 .../Util/Packed/TestEliasFanoSequence.cs        |  504 +++
 .../Util/Packed/TestPackedInts.cs               | 1547 +++++++++
 .../Util/StressRamUsageEstimator.cs             |  174 +
 src/Lucene.Net.Tests/Util/Test2BPagedBytes.cs   |   87 +
 src/Lucene.Net.Tests/Util/TestArrayUtil.cs      |  357 +++
 .../Util/TestAttributeSource.cs                 |  194 ++
 src/Lucene.Net.Tests/Util/TestBroadWord.cs      |  172 +
 src/Lucene.Net.Tests/Util/TestByteBlockPool.cs  |   70 +
 src/Lucene.Net.Tests/Util/TestBytesRef.cs       |   99 +
 src/Lucene.Net.Tests/Util/TestBytesRefArray.cs  |  115 +
 src/Lucene.Net.Tests/Util/TestBytesRefHash.cs   |  440 +++
 src/Lucene.Net.Tests/Util/TestCharsRef.cs       |  257 ++
 .../Util/TestCloseableThreadLocal.cs            |   69 +
 src/Lucene.Net.Tests/Util/TestCollectionUtil.cs |  114 +
 src/Lucene.Net.Tests/Util/TestConstants.cs      |   60 +
 src/Lucene.Net.Tests/Util/TestDocIdBitSet.cs    |   79 +
 .../Util/TestDoubleBarrelLRUCache.cs            |  226 ++
 .../Util/TestFieldCacheSanityChecker.cs         |  193 ++
 src/Lucene.Net.Tests/Util/TestFilterIterator.cs |  252 ++
 src/Lucene.Net.Tests/Util/TestFixedBitSet.cs    |  563 ++++
 src/Lucene.Net.Tests/Util/TestIOUtils.cs        |   91 +
 .../Util/TestIdentityHashSet.cs                 |   63 +
 .../Util/TestInPlaceMergeSorter.cs              |  100 +
 .../Util/TestIndexableBinaryStringTools.cs      |  212 ++
 src/Lucene.Net.Tests/Util/TestIntroSorter.cs    |   96 +
 src/Lucene.Net.Tests/Util/TestIntsRef.cs        |   49 +
 src/Lucene.Net.Tests/Util/TestLongBitSet.cs     |  450 +++
 src/Lucene.Net.Tests/Util/TestMathUtil.cs       |  215 ++
 .../Util/TestMaxFailuresRule.cs                 |  262 ++
 src/Lucene.Net.Tests/Util/TestMergedIterator.cs |  175 ++
 src/Lucene.Net.Tests/Util/TestNamedSPILoader.cs |   60 +
 src/Lucene.Net.Tests/Util/TestNumericUtils.cs   |  545 ++++
 src/Lucene.Net.Tests/Util/TestOfflineSorter.cs  |  257 ++
 src/Lucene.Net.Tests/Util/TestOpenBitSet.cs     |  566 ++++
 .../Util/TestPForDeltaDocIdSet.cs               |   86 +
 src/Lucene.Net.Tests/Util/TestPagedBytes.cs     |  233 ++
 src/Lucene.Net.Tests/Util/TestPriorityQueue.cs  |  633 ++++
 src/Lucene.Net.Tests/Util/TestQueryBuilder.cs   |  437 +++
 .../Util/TestRamUsageEstimator.cs               |  148 +
 .../Util/TestRamUsageEstimatorOnWildAnimals.cs  |   70 +
 .../Util/TestRecyclingByteBlockAllocator.cs     |  151 +
 .../Util/TestRecyclingIntBlockAllocator.cs      |  151 +
 src/Lucene.Net.Tests/Util/TestRollingBuffer.cs  |  114 +
 src/Lucene.Net.Tests/Util/TestSentinelIntSet.cs |   76 +
 src/Lucene.Net.Tests/Util/TestSetOnce.cs        |  119 +
 src/Lucene.Net.Tests/Util/TestSloppyMath.cs     |  133 +
 src/Lucene.Net.Tests/Util/TestSmallFloat.cs     |  160 +
 src/Lucene.Net.Tests/Util/TestStringHelper.cs   |   35 +
 src/Lucene.Net.Tests/Util/TestTimSorter.cs      |   96 +
 src/Lucene.Net.Tests/Util/TestUnicodeUtil.cs    |  254 ++
 src/Lucene.Net.Tests/Util/TestVersion.cs        |   83 +
 .../Util/TestVersionComparator.cs               |   59 +
 src/Lucene.Net.Tests/Util/TestVirtualMethod.cs  |  161 +
 src/Lucene.Net.Tests/Util/TestWAH8DocIdSet.cs   |  154 +
 .../Util/TestWeakIdentityMap.cs                 |  354 +++
 .../core/Analysis/TestCachingTokenFilter.cs     |  138 -
 .../core/Analysis/TestCharFilter.cs             |   94 -
 .../core/Analysis/TestGraphTokenizers.cs        |  728 -----
 .../core/Analysis/TestLookaheadTokenFilter.cs   |  129 -
 .../core/Analysis/TestMockAnalyzer.cs           |  420 ---
 .../core/Analysis/TestMockCharFilter.cs         |   59 -
 .../core/Analysis/TestNumericTokenStream.cs     |  144 -
 .../core/Analysis/TestPosition.cs               |   27 -
 .../core/Analysis/TestReusableStringReader.cs   |   71 -
 src/Lucene.Net.Tests/core/Analysis/TestToken.cs |  305 --
 .../TestCharTermAttributeImpl.cs                |  485 ---
 .../Tokenattributes/TestSimpleAttributeImpl.cs  |   66 -
 .../core/Analysis/TrivialLookaheadFilter.cs     |  109 -
 src/Lucene.Net.Tests/core/App.config            |   47 -
 src/Lucene.Net.Tests/core/AssemblyInfo.cs       |   87 -
 .../Compressing/AbstractTestCompressionMode.cs  |  180 --
 .../AbstractTestLZ4CompressionMode.cs           |  129 -
 .../TestCompressingStoredFieldsFormat.cs        |  200 --
 .../TestCompressingTermVectorsFormat.cs         |  164 -
 .../Compressing/TestFastCompressionMode.cs      |  113 -
 .../Compressing/TestFastDecompressionMode.cs    |  123 -
 .../Compressing/TestHighCompressionMode.cs      |   82 -
 .../core/Codecs/Lucene3x/TestImpersonation.cs   |   39 -
 .../Lucene3x/TestLucene3xPostingsFormat.cs      |  109 -
 .../Lucene3x/TestLucene3xStoredFieldsFormat.cs  |  145 -
 .../Lucene3x/TestLucene3xTermVectorsFormat.cs   |  122 -
 .../core/Codecs/Lucene3x/TestSurrogates.cs      |  424 ---
 .../Codecs/Lucene3x/TestTermInfosReaderIndex.cs |  235 --
 .../core/Codecs/Lucene40/TestBitVector.cs       |  325 --
 .../Lucene40/TestLucene40DocValuesFormat.cs     |  555 ----
 .../Lucene40/TestLucene40PostingsFormat.cs      |  111 -
 .../Lucene40/TestLucene40PostingsReader.cs      |  166 -
 .../Lucene40/TestLucene40StoredFieldsFormat.cs  |  147 -
 .../Lucene40/TestLucene40TermVectorsFormat.cs   |  117 -
 .../core/Codecs/Lucene40/TestReuseDocsEnum.cs   |  221 --
 .../Codecs/Lucene41/TestBlockPostingsFormat.cs  |  103 -
 .../Codecs/Lucene41/TestBlockPostingsFormat2.cs |  166 -
 .../Codecs/Lucene41/TestBlockPostingsFormat3.cs |  571 ----
 .../core/Codecs/Lucene41/TestForUtil.cs         |   97 -
 .../Lucene41/TestLucene41StoredFieldsFormat.cs  |  146 -
 .../Lucene42/TestLucene42DocValuesFormat.cs     |  581 ----
 .../Lucene45/TestLucene45DocValuesFormat.cs     |  565 ----
 .../PerField/TestPerFieldDocValuesFormat.cs     |  658 ----
 .../PerField/TestPerFieldPostingsFormat.cs      |  100 -
 .../PerField/TestPerFieldPostingsFormat2.cs     |  372 ---
 .../core/Document/TestBinaryDocument.cs         |  122 -
 .../core/Document/TestDateTools.cs              |  244 --
 .../core/Document/TestDocument.cs               |  454 ---
 src/Lucene.Net.Tests/core/Document/TestField.cs |  617 ----
 .../core/Index/BinaryTokenStream.cs             |  101 -
 .../core/Index/SynchronizedList.cs              |  168 -
 .../core/Index/Test2BBinaryDocValues.cs         |  171 -
 src/Lucene.Net.Tests/core/Index/Test2BDocs.cs   |  105 -
 .../core/Index/Test2BNumericDocValues.cs        |   89 -
 .../core/Index/Test2BPositions.cs               |  123 -
 .../core/Index/Test2BPostings.cs                |  125 -
 .../core/Index/Test2BPostingsBytes.cs           |  167 -
 .../core/Index/Test2BSortedDocValues.cs         |  168 -
 src/Lucene.Net.Tests/core/Index/Test2BTerms.cs  |  317 --
 .../core/Index/Test4GBStoredFields.cs           |  123 -
 .../core/Index/TestAddIndexes.cs                | 1396 --------
 .../Index/TestAllFilesHaveChecksumFooter.cs     |  114 -
 .../core/Index/TestAllFilesHaveCodecHeader.cs   |  119 -
 .../core/Index/TestAtomicUpdate.cs              |  218 --
 .../core/Index/TestBackwardsCompatibility.cs    | 1079 -------
 .../core/Index/TestBackwardsCompatibility3x.cs  | 1053 -------
 .../core/Index/TestBagOfPositions.cs            |  210 --
 .../core/Index/TestBagOfPostings.cs             |  193 --
 .../core/Index/TestBinaryDocValuesUpdates.cs    | 1786 -----------
 .../core/Index/TestBinaryTerms.cs               |   88 -
 .../core/Index/TestByteSlices.cs                |  141 -
 .../core/Index/TestCheckIndex.cs                |  129 -
 .../core/Index/TestCodecHoldsOpenFiles.cs       |  116 -
 src/Lucene.Net.Tests/core/Index/TestCodecs.cs   |  933 ------
 .../core/Index/TestCompoundFile.cs              |  917 ------
 .../core/Index/TestConcurrentMergeScheduler.cs  |  438 ---
 .../core/Index/TestConsistentFieldNumbers.cs    |  421 ---
 src/Lucene.Net.Tests/core/Index/TestCrash.cs    |  229 --
 .../core/Index/TestCrashCausesCorruptIndex.cs   |  201 --
 .../core/Index/TestCustomNorms.cs               |  144 -
 .../core/Index/TestDeletionPolicy.cs            |  803 -----
 .../core/Index/TestDirectoryReader.cs           | 1341 --------
 .../core/Index/TestDirectoryReaderReopen.cs     |  785 -----
 src/Lucene.Net.Tests/core/Index/TestDoc.cs      |  277 --
 src/Lucene.Net.Tests/core/Index/TestDocCount.cs |  101 -
 .../Index/TestDocInverterPerFieldErrorInfo.cs   |  148 -
 .../core/Index/TestDocTermOrds.cs               |  541 ----
 .../core/Index/TestDocValuesFormat.cs           |  546 ----
 .../core/Index/TestDocValuesIndexing.cs         |  982 ------
 .../core/Index/TestDocValuesWithThreads.cs      |  311 --
 .../core/Index/TestDocsAndPositions.cs          |  430 ---
 .../core/Index/TestDocumentWriter.cs            |  409 ---
 .../Index/TestDocumentsWriterDeleteQueue.cs     |  302 --
 .../Index/TestDocumentsWriterStallControl.cs    |  473 ---
 .../core/Index/TestDuelingCodecs.cs             |  183 --
 .../core/Index/TestExceedMaxTermLength.cs       |  108 -
 .../core/Index/TestFieldInfos.cs                |  126 -
 .../core/Index/TestFieldsReader.cs              |  286 --
 .../core/Index/TestFilterAtomicReader.cs        |  224 --
 src/Lucene.Net.Tests/core/Index/TestFlex.cs     |  100 -
 .../core/Index/TestFlushByRamOrCountsPolicy.cs  |  477 ---
 .../core/Index/TestForTooMuchCloning.cs         |   86 -
 .../core/Index/TestForceMergeForever.cs         |  144 -
 .../core/Index/TestIndexCommit.cs               |  191 --
 .../core/Index/TestIndexFileDeleter.cs          |  218 --
 .../core/Index/TestIndexInput.cs                |  186 --
 .../core/Index/TestIndexReaderClose.cs          |  155 -
 .../core/Index/TestIndexWriter.cs               | 2888 -----------------
 .../core/Index/TestIndexWriterCommit.cs         |  772 -----
 .../core/Index/TestIndexWriterConfig.cs         |  539 ----
 .../core/Index/TestIndexWriterDelete.cs         | 1447 ---------
 .../core/Index/TestIndexWriterExceptions.cs     | 2584 ---------------
 .../core/Index/TestIndexWriterForceMerge.cs     |  260 --
 .../core/Index/TestIndexWriterLockRelease.cs    |   64 -
 .../core/Index/TestIndexWriterMergePolicy.cs    |  311 --
 .../core/Index/TestIndexWriterMerging.cs        |  488 ---
 .../core/Index/TestIndexWriterNRTIsCurrent.cs   |  260 --
 .../core/Index/TestIndexWriterOnDiskFull.cs     |  703 -----
 .../core/Index/TestIndexWriterOnJRECrash.cs     |  281 --
 .../TestIndexWriterOutOfFileDescriptors.cs      |  196 --
 .../core/Index/TestIndexWriterReader.cs         | 1419 ---------
 .../core/Index/TestIndexWriterUnicode.cs        |  390 ---
 .../core/Index/TestIndexWriterWithThreads.cs    |  796 -----
 .../core/Index/TestIndexableField.cs            |  453 ---
 .../core/Index/TestIntBlockPool.cs              |  185 --
 .../core/Index/TestIsCurrent.cs                 |  109 -
 .../core/Index/TestLazyProxSkipping.cs          |  258 --
 .../core/Index/TestLogMergePolicy.cs            |   42 -
 .../core/Index/TestLongPostings.cs              |  570 ----
 .../core/Index/TestMaxTermFrequency.cs          |  171 -
 .../core/Index/TestMixedCodecs.cs               |  107 -
 .../core/Index/TestMixedDocValuesUpdates.cs     |  576 ----
 .../core/Index/TestMultiDocValues.cs            |  439 ---
 .../core/Index/TestMultiFields.cs               |  228 --
 .../core/Index/TestMultiLevelSkipList.cs        |  218 --
 .../core/Index/TestNRTReaderWithThreads.cs      |  140 -
 .../core/Index/TestNRTThreads.cs                |  185 --
 .../core/Index/TestNeverDelete.cs               |  158 -
 .../core/Index/TestNewestSegment.cs             |   39 -
 .../core/Index/TestNoDeletionPolicy.cs          |   95 -
 .../core/Index/TestNoMergePolicy.cs             |   83 -
 .../core/Index/TestNoMergeScheduler.cs          |   72 -
 src/Lucene.Net.Tests/core/Index/TestNorms.cs    |  252 --
 .../core/Index/TestNumericDocValuesUpdates.cs   | 1698 ----------
 .../core/Index/TestOmitNorms.cs                 |  331 --
 .../core/Index/TestOmitPositions.cs             |  294 --
 src/Lucene.Net.Tests/core/Index/TestOmitTf.cs   |  588 ----
 .../core/Index/TestParallelAtomicReader.cs      |  357 ---
 .../core/Index/TestParallelCompositeReader.cs   |  666 ----
 .../core/Index/TestParallelReaderEmptyIndex.cs  |  162 -
 .../core/Index/TestParallelTermEnum.cs          |  127 -
 src/Lucene.Net.Tests/core/Index/TestPayloads.cs |  738 -----
 .../core/Index/TestPayloadsOnVectors.cs         |  165 -
 .../core/Index/TestPerSegmentDeletes.cs         |  318 --
 .../TestPersistentSnapshotDeletionPolicy.cs     |  260 --
 .../core/Index/TestPostingsFormat.cs            |   95 -
 .../core/Index/TestPostingsOffsets.cs           |  580 ----
 .../core/Index/TestPrefixCodedTerms.cs          |  142 -
 .../core/Index/TestReaderClosed.cs              |  118 -
 src/Lucene.Net.Tests/core/Index/TestRollback.cs |   67 -
 .../core/Index/TestRollingUpdates.cs            |  285 --
 .../core/Index/TestSameTokenSamePosition.cs     |  110 -
 .../core/Index/TestSegmentMerger.cs             |  207 --
 .../core/Index/TestSegmentReader.cs             |  277 --
 .../core/Index/TestSegmentTermDocs.cs           |  274 --
 .../core/Index/TestSegmentTermEnum.cs           |  152 -
 .../core/Index/TestSizeBoundedForceMerge.cs     |  403 ---
 .../core/Index/TestSnapshotDeletionPolicy.cs    |  527 ----
 .../core/Index/TestStoredFieldsFormat.cs        |  141 -
 .../core/Index/TestStressAdvance.cs             |  173 -
 .../core/Index/TestStressIndexing.cs            |  237 --
 .../core/Index/TestStressIndexing2.cs           | 1064 -------
 .../core/Index/TestStressNRT.cs                 |  530 ----
 .../core/Index/TestSumDocFreq.cs                |  112 -
 .../Index/TestTaskMergeSchedulerExternal.cs     |  146 -
 src/Lucene.Net.Tests/core/Index/TestTerm.cs     |   42 -
 .../core/Index/TestTermVectorsFormat.cs         |  119 -
 .../core/Index/TestTermVectorsReader.cs         |  477 ---
 .../core/Index/TestTermVectorsWriter.cs         |  601 ----
 .../core/Index/TestTermdocPerf.cs               |  176 --
 .../core/Index/TestTermsEnum.cs                 | 1050 -------
 .../core/Index/TestTermsEnum2.cs                |  204 --
 .../core/Index/TestThreadedForceMerge.cs        |  183 --
 .../core/Index/TestTieredMergePolicy.cs         |  297 --
 .../core/Index/TestTransactionRollback.cs       |  271 --
 .../core/Index/TestTransactions.cs              |  336 --
 .../core/Index/TestTryDelete.cs                 |  196 --
 .../core/Index/TestTwoPhaseCommitTool.cs        |  178 --
 .../core/Index/TestUniqueTermCount.cs           |  122 -
 .../core/Index/bogus24.upgraded.to.36.zip       |  Bin 2400 -> 0 bytes
 .../core/Index/index.30.cfs.zip                 |  Bin 4786 -> 0 bytes
 .../core/Index/index.30.nocfs.zip               |  Bin 8953 -> 0 bytes
 .../core/Index/index.31.cfs.zip                 |  Bin 5158 -> 0 bytes
 .../core/Index/index.31.nocfs.zip               |  Bin 12119 -> 0 bytes
 .../core/Index/index.31.optimized.cfs.zip       |  Bin 2174 -> 0 bytes
 .../core/Index/index.31.optimized.nocfs.zip     |  Bin 3638 -> 0 bytes
 .../core/Index/index.32.cfs.zip                 |  Bin 5184 -> 0 bytes
 .../core/Index/index.32.nocfs.zip               |  Bin 7603 -> 0 bytes
 .../core/Index/index.34.cfs.zip                 |  Bin 5203 -> 0 bytes
 .../core/Index/index.34.nocfs.zip               |  Bin 12145 -> 0 bytes
 .../core/Index/index.36.surrogates.zip          |  Bin 40680 -> 0 bytes
 .../core/Index/index.40.cfs.zip                 |  Bin 15601 -> 0 bytes
 .../core/Index/index.40.nocfs.zip               |  Bin 22143 -> 0 bytes
 .../core/Index/index.40.optimized.cfs.zip       |  Bin 4359 -> 0 bytes
 .../core/Index/index.40.optimized.nocfs.zip     |  Bin 6434 -> 0 bytes
 .../core/Index/index.41.cfs.zip                 |  Bin 12871 -> 0 bytes
 .../core/Index/index.41.nocfs.zip               |  Bin 23246 -> 0 bytes
 .../core/Index/index.42.cfs.zip                 |  Bin 12516 -> 0 bytes
 .../core/Index/index.42.nocfs.zip               |  Bin 22064 -> 0 bytes
 .../core/Index/index.45.cfs.zip                 |  Bin 12369 -> 0 bytes
 .../core/Index/index.45.nocfs.zip               |  Bin 21793 -> 0 bytes
 .../core/Index/index.461.cfs.zip                |  Bin 12329 -> 0 bytes
 .../core/Index/index.461.nocfs.zip              |  Bin 21780 -> 0 bytes
 .../core/Index/moreterms.40.zip                 |  Bin 78661 -> 0 bytes
 .../core/Index/unsupported.19.cfs.zip           |  Bin 2747 -> 0 bytes
 .../core/Index/unsupported.19.nocfs.zip         |  Bin 8528 -> 0 bytes
 .../core/Index/unsupported.20.cfs.zip           |  Bin 2747 -> 0 bytes
 .../core/Index/unsupported.20.nocfs.zip         |  Bin 8528 -> 0 bytes
 .../core/Index/unsupported.21.cfs.zip           |  Bin 2784 -> 0 bytes
 .../core/Index/unsupported.21.nocfs.zip         |  Bin 7705 -> 0 bytes
 .../core/Index/unsupported.22.cfs.zip           |  Bin 1913 -> 0 bytes
 .../core/Index/unsupported.22.nocfs.zip         |  Bin 5226 -> 0 bytes
 .../core/Index/unsupported.23.cfs.zip           |  Bin 2091 -> 0 bytes
 .../core/Index/unsupported.23.nocfs.zip         |  Bin 3375 -> 0 bytes
 .../core/Index/unsupported.24.cfs.zip           |  Bin 3654 -> 0 bytes
 .../core/Index/unsupported.24.nocfs.zip         |  Bin 7254 -> 0 bytes
 .../core/Index/unsupported.29.cfs.zip           |  Bin 4531 -> 0 bytes
 .../core/Index/unsupported.29.nocfs.zip         |  Bin 8733 -> 0 bytes
 .../core/Lucene.Net.Tests.csproj                |  635 ----
 src/Lucene.Net.Tests/core/Lucene.Net.snk        |  Bin 596 -> 0 bytes
 src/Lucene.Net.Tests/core/RectangularArrays.cs  |   52 -
 .../core/Search/BaseTestRangeFilter.cs          |  211 --
 .../core/Search/FuzzyTermOnShortTermsTest.cs    |  118 -
 .../core/Search/JustCompileSearch.cs            |  358 ---
 src/Lucene.Net.Tests/core/Search/MockFilter.cs  |   44 -
 .../core/Search/MultiCollectorTest.cs           |  118 -
 .../core/Search/Payloads/PayloadHelper.cs       |  158 -
 .../Search/Payloads/TestPayloadExplanations.cs  |  117 -
 .../Search/Payloads/TestPayloadNearQuery.cs     |  392 ---
 .../Search/Payloads/TestPayloadTermQuery.cs     |  367 ---
 .../core/Search/Similarities/TestSimilarity2.cs |  275 --
 .../Search/Similarities/TestSimilarityBase.cs   |  651 ----
 .../core/Search/SingleDocTestFilter.cs          |   44 -
 .../core/Search/Spans/JustCompileSearchSpans.cs |  165 -
 .../core/Search/Spans/MultiSpansWrapper.cs      |  215 --
 .../core/Search/Spans/TestBasics.cs             |  626 ----
 .../Search/Spans/TestFieldMaskingSpanQuery.cs   |  326 --
 .../core/Search/Spans/TestNearSpansOrdered.cs   |  203 --
 .../core/Search/Spans/TestPayloadSpans.cs       |  589 ----
 .../core/Search/Spans/TestSpanExplanations.cs   |  260 --
 .../Spans/TestSpanExplanationsOfNonMatches.cs   |  251 --
 .../core/Search/Spans/TestSpanFirstQuery.cs     |   74 -
 .../Spans/TestSpanMultiTermQueryWrapper.cs      |  245 --
 .../Search/Spans/TestSpanSearchEquivalence.cs   |  134 -
 .../core/Search/Spans/TestSpans.cs              |  571 ----
 .../core/Search/Spans/TestSpansAdvanced.cs      |  181 --
 .../core/Search/Spans/TestSpansAdvanced2.cs     |  124 -
 .../core/Search/TestAutomatonQuery.cs           |  278 --
 .../core/Search/TestAutomatonQueryUnicode.cs    |  139 -
 .../core/Search/TestBoolean2.cs                 |  423 ---
 .../core/Search/TestBooleanMinShouldMatch.cs    |  527 ----
 .../core/Search/TestBooleanOr.cs                |  253 --
 .../core/Search/TestBooleanQuery.cs             |  412 ---
 .../Search/TestBooleanQueryVisitSubscorers.cs   |  206 --
 .../core/Search/TestBooleanScorer.cs            |  330 --
 .../core/Search/TestCachingCollector.cs         |  252 --
 .../core/Search/TestCachingWrapperFilter.cs     |  513 ---
 .../core/Search/TestComplexExplanations.cs      |  389 ---
 .../TestComplexExplanationsOfNonMatches.cs      |  197 --
 .../core/Search/TestConjunctions.cs             |  161 -
 .../core/Search/TestConstantScoreQuery.cs       |  241 --
 .../TestControlledRealTimeReopenThread.cs       |  731 -----
 .../core/Search/TestCustomSearcherSort.cs       |  262 --
 .../core/Search/TestDateFilter.cs               |  165 -
 .../core/Search/TestDateSort.cs                 |  125 -
 .../core/Search/TestDisjunctionMaxQuery.cs      |  570 ----
 .../core/Search/TestDocBoost.cs                 |  122 -
 .../core/Search/TestDocIdSet.cs                 |  254 --
 .../core/Search/TestDocTermOrdsRangeFilter.cs   |  149 -
 .../core/Search/TestDocTermOrdsRewriteMethod.cs |  164 -
 .../core/Search/TestDocValuesScoring.cs         |  233 --
 .../core/Search/TestEarlyTermination.cs         |  124 -
 .../core/Search/TestElevationComparator.cs      |  240 --
 .../core/Search/TestExplanations.cs             |  270 --
 .../core/Search/TestFieldCache.cs               | 1058 -------
 .../core/Search/TestFieldCacheRangeFilter.cs    |  613 ----
 .../core/Search/TestFieldCacheRewriteMethod.cs  |   86 -
 .../core/Search/TestFieldCacheTermsFilter.cs    |   80 -
 .../core/Search/TestFieldValueFilter.cs         |  127 -
 .../core/Search/TestFilteredQuery.cs            |  719 -----
 .../core/Search/TestFilteredSearch.cs           |  112 -
 .../core/Search/TestFuzzyQuery.cs               |  389 ---
 .../core/Search/TestIndexSearcher.cs            |  145 -
 .../core/Search/TestLiveFieldValues.cs          |  245 --
 .../core/Search/TestMatchAllDocsQuery.cs        |  115 -
 .../core/Search/TestMinShouldMatch2.cs          |  431 ---
 .../core/Search/TestMultiPhraseQuery.cs         |  631 ----
 .../core/Search/TestMultiTermConstantScore.cs   |  563 ----
 .../core/Search/TestMultiTermQueryRewrites.cs   |  305 --
 .../core/Search/TestMultiThreadTermVectors.cs   |  244 --
 .../Search/TestMultiValuedNumericRangeQuery.cs  |   88 -
 .../core/Search/TestNGramPhraseQuery.cs         |  113 -
 src/Lucene.Net.Tests/core/Search/TestNot.cs     |   65 -
 .../core/Search/TestNumericRangeQuery32.cs      |  705 -----
 .../core/Search/TestNumericRangeQuery64.cs      |  752 -----
 .../core/Search/TestPhrasePrefixQuery.cs        |  108 -
 .../core/Search/TestPhraseQuery.cs              |  758 -----
 .../core/Search/TestPositionIncrement.cs        |  323 --
 .../Search/TestPositiveScoresOnlyCollector.cs   |  118 -
 .../core/Search/TestPrefixFilter.cs             |  114 -
 .../core/Search/TestPrefixInBooleanQuery.cs     |  127 -
 .../core/Search/TestPrefixQuery.cs              |   75 -
 .../core/Search/TestPrefixRandom.cs             |  158 -
 .../core/Search/TestQueryRescorer.cs            |  610 ----
 .../core/Search/TestQueryWrapperFilter.cs       |  166 -
 .../core/Search/TestRegexpQuery.cs              |  161 -
 .../core/Search/TestRegexpRandom.cs             |  156 -
 .../core/Search/TestRegexpRandom2.cs            |  194 --
 .../core/Search/TestSameScoresWithThreads.cs    |  158 -
 .../Search/TestScoreCachingWrappingScorer.cs    |  150 -
 .../core/Search/TestScorerPerf.cs               |  493 ---
 .../core/Search/TestSearchAfter.cs              |  370 ---
 .../core/Search/TestSearchWithThreads.cs        |  163 -
 .../core/Search/TestSearcherManager.cs          |  648 ----
 .../core/Search/TestShardSearching.cs           |  488 ---
 .../core/Search/TestSimilarity.cs               |  258 --
 .../core/Search/TestSimilarityProvider.cs       |  241 --
 .../core/Search/TestSimpleExplanations.cs       |  950 ------
 .../TestSimpleExplanationsOfNonMatches.cs       |  489 ---
 .../core/Search/TestSimpleSearchEquivalence.cs  |  230 --
 .../core/Search/TestSloppyPhraseQuery.cs        |  365 ---
 .../core/Search/TestSloppyPhraseQuery2.cs       |  247 --
 src/Lucene.Net.Tests/core/Search/TestSort.cs    | 1984 ------------
 .../core/Search/TestSortDocValues.cs            | 1036 ------
 .../core/Search/TestSortRandom.cs               |  365 ---
 .../core/Search/TestSortRescorer.cs             |  220 --
 .../core/Search/TestSubScorerFreqs.cs           |  230 --
 .../core/Search/TestTermRangeFilter.cs          |  195 --
 .../core/Search/TestTermRangeQuery.cs           |  375 ---
 .../core/Search/TestTermScorer.cs               |  203 --
 .../core/Search/TestTermVectors.cs              |  274 --
 .../core/Search/TestTimeLimitingCollector.cs    |  442 ---
 .../core/Search/TestTopDocsCollector.cs         |  232 --
 .../core/Search/TestTopDocsMerge.cs             |  361 ---
 .../core/Search/TestTopFieldCollector.cs        |  253 --
 .../core/Search/TestTopScoreDocCollector.cs     |   72 -
 .../core/Search/TestTotalHitCountCollector.cs   |   58 -
 .../core/Search/TestWildcard.cs                 |  375 ---
 .../core/Search/TestWildcardRandom.cs           |  159 -
 .../core/Store/TestBufferedChecksum.cs          |   79 -
 .../core/Store/TestBufferedIndexInput.cs        |  401 ---
 .../core/Store/TestByteArrayDataInput.cs        |   39 -
 .../core/Store/TestCopyBytes.cs                 |  200 --
 .../core/Store/TestDirectory.cs                 |  415 ---
 .../core/Store/TestFileSwitchDirectory.cs       |  186 --
 .../core/Store/TestFilterDirectory.cs           |   52 -
 .../core/Store/TestHugeRamFile.cs               |  128 -
 src/Lucene.Net.Tests/core/Store/TestLock.cs     |   78 -
 .../core/Store/TestLockFactory.cs               |  510 ---
 .../core/Store/TestMockDirectoryWrapper.cs      |  117 -
 .../core/Store/TestMultiMMap.cs                 |  438 ---
 .../core/Store/TestNRTCachingDirectory.cs       |  217 --
 .../core/Store/TestRAMDirectory.cs              |  231 --
 .../core/Store/TestRateLimiter.cs               |   49 -
 .../core/Store/TestWindowsMMap.cs               |  125 -
 .../core/Support/BaseBufferTestCase.cs          |  135 -
 src/Lucene.Net.Tests/core/Support/BigObject.cs  |   35 -
 .../core/Support/C5/ArrayBase.cs                |  485 ---
 .../core/Support/C5/ArrayList.cs                | 2252 -------------
 .../core/Support/C5/DropMultiplicity.cs         |   17 -
 src/Lucene.Net.Tests/core/Support/C5/Events.cs  |  893 ------
 .../core/Support/C5/GenericCollectionTester.cs  |   88 -
 src/Lucene.Net.Tests/core/Support/C5/HashBag.cs |  660 ----
 src/Lucene.Net.Tests/core/Support/C5/Sorting.cs |  239 --
 .../core/Support/C5/SupportClasses.cs           |  506 ---
 .../core/Support/C5/WeakViewList.cs             |  105 -
 .../core/Support/C5/Wrappers.cs                 | 2364 --------------
 .../core/Support/CollisionTester.cs             |   50 -
 src/Lucene.Net.Tests/core/Support/ReadFully.txt |   14 -
 .../core/Support/SmallObject.cs                 |   33 -
 .../core/Support/TestByteBuffer.cs              |  739 -----
 src/Lucene.Net.Tests/core/Support/TestCase.cs   |   54 -
 .../core/Support/TestCloseableThreadLocal.cs    |  108 -
 .../core/Support/TestDataInputStream.cs         |  231 --
 .../core/Support/TestDataOutputStream.cs        |  173 -
 .../core/Support/TestEquatableList.cs           |  167 -
 .../core/Support/TestExceptionSerialization.cs  |  102 -
 .../core/Support/TestHashMap.cs                 |  214 --
 .../core/Support/TestIDisposable.cs             |   67 -
 .../core/Support/TestLRUCache.cs                |   47 -
 .../core/Support/TestLinkedHashMap.cs           |  439 ---
 .../core/Support/TestLongBuffer.cs              |  523 ---
 .../core/Support/TestLurchTable.cs              | 1132 -------
 .../core/Support/TestLurchTableThreading.cs     |  250 --
 .../core/Support/TestOSClass.cs                 |   48 -
 .../core/Support/TestOldPatches.cs              |  292 --
 .../core/Support/TestSafeTextWriterWrapper.cs   |   47 -
 .../core/Support/TestSerialization.cs           |  102 -
 .../core/Support/TestStringBuilderExtensions.cs |   72 -
 .../core/Support/TestSupportClass.cs            |   86 -
 .../core/Support/TestThreadClass.cs             |   59 -
 .../core/Support/TestToStringUtils.cs           |   71 -
 .../core/Support/TestTreeDictionary.cs          |  522 ---
 .../core/Support/TestTreeSet.cs                 | 2970 ------------------
 .../core/Support/TestWeakDictionary.cs          |  148 -
 .../core/Support/TestWeakDictionaryBehavior.cs  |  291 --
 .../Support/TestWeakDictionaryPerformance.cs    |  134 -
 .../core/SupportClassException.cs               |   47 -
 src/Lucene.Net.Tests/core/Test.nunit            |   22 -
 src/Lucene.Net.Tests/core/TestApiConsistency.cs |  127 -
 src/Lucene.Net.Tests/core/TestAssertions.cs     |   78 -
 src/Lucene.Net.Tests/core/TestDemo.cs           |   89 -
 src/Lucene.Net.Tests/core/TestExternalCodecs.cs |  154 -
 .../core/TestMergeSchedulerExternal.cs          |  177 --
 src/Lucene.Net.Tests/core/TestSearch.cs         |  206 --
 .../core/TestSearchForDuplicates.cs             |  157 -
 .../core/TestWorstCaseTestBehavior.cs           |  150 -
 .../core/Util/Automaton/TestBasicOperations.cs  |  180 --
 .../Util/Automaton/TestCompiledAutomaton.cs     |  147 -
 .../core/Util/Automaton/TestDeterminism.cs      |   92 -
 .../Util/Automaton/TestDeterminizeLexicon.cs    |   72 -
 .../Util/Automaton/TestLevenshteinAutomata.cs   |  435 ---
 .../core/Util/Automaton/TestMinimize.cs         |   72 -
 .../Util/Automaton/TestSpecialOperations.cs     |   61 -
 .../core/Util/Automaton/TestUTF32ToUTF8.cs      |  287 --
 .../core/Util/BaseSortTestCase.cs               |  210 --
 .../core/Util/Cache/TestSimpleLRUCache.cs       |   77 -
 src/Lucene.Net.Tests/core/Util/Fst/Test2BFST.cs |  349 --
 .../core/Util/Fst/TestBytesStore.cs             |  433 ---
 src/Lucene.Net.Tests/core/Util/Fst/TestFSTs.cs  | 1904 -----------
 .../core/Util/Junitcompat/SorePoint.cs          |   36 -
 .../core/Util/Junitcompat/SoreType.cs           |   28 -
 .../Junitcompat/TestBeforeAfterOverrides.cs     |  103 -
 .../core/Util/Junitcompat/TestCodecReported.cs  |   56 -
 .../TestExceptionInBeforeClassHooks.cs          |  215 --
 .../Junitcompat/TestFailIfDirectoryNotClosed.cs |   64 -
 .../Junitcompat/TestFailIfUnreferencedFiles.cs  |   80 -
 .../Junitcompat/TestFailOnFieldCacheInsanity.cs |   88 -
 .../core/Util/Junitcompat/TestGroupFiltering.cs |   85 -
 .../core/Util/Junitcompat/TestJUnitRuleOrder.cs |  132 -
 .../Junitcompat/TestLeaveFilesIfTestFails.cs    |   87 -
 .../Util/Junitcompat/TestReproduceMessage.cs    |  389 ---
 .../TestReproduceMessageWithRepeated.cs         |   64 -
 .../TestSameRandomnessLocalePassedOrNot.cs      |   91 -
 .../Util/Junitcompat/TestSeedFromUncaught.cs    |   85 -
 .../Junitcompat/TestSetupTeardownChaining.cs    |   96 -
 .../TestSystemPropertiesInvariantRule.cs        |  196 --
 .../core/Util/Junitcompat/WithNestedTests.cs    |  198 --
 .../core/Util/Packed/TestEliasFanoDocIdSet.cs   |  124 -
 .../core/Util/Packed/TestEliasFanoSequence.cs   |  504 ---
 .../core/Util/Packed/TestPackedInts.cs          | 1547 ---------
 .../core/Util/StressRamUsageEstimator.cs        |  174 -
 .../core/Util/Test2BPagedBytes.cs               |   87 -
 src/Lucene.Net.Tests/core/Util/TestArrayUtil.cs |  357 ---
 .../core/Util/TestAttributeSource.cs            |  194 --
 src/Lucene.Net.Tests/core/Util/TestBroadWord.cs |  172 -
 .../core/Util/TestByteBlockPool.cs              |   70 -
 src/Lucene.Net.Tests/core/Util/TestBytesRef.cs  |   99 -
 .../core/Util/TestBytesRefArray.cs              |  115 -
 .../core/Util/TestBytesRefHash.cs               |  440 ---
 src/Lucene.Net.Tests/core/Util/TestCharsRef.cs  |  257 --
 .../core/Util/TestCloseableThreadLocal.cs       |   69 -
 .../core/Util/TestCollectionUtil.cs             |  114 -
 src/Lucene.Net.Tests/core/Util/TestConstants.cs |   60 -
 .../core/Util/TestDocIdBitSet.cs                |   79 -
 .../core/Util/TestDoubleBarrelLRUCache.cs       |  226 --
 .../core/Util/TestFieldCacheSanityChecker.cs    |  193 --
 .../core/Util/TestFilterIterator.cs             |  252 --
 .../core/Util/TestFixedBitSet.cs                |  563 ----
 src/Lucene.Net.Tests/core/Util/TestIOUtils.cs   |   91 -
 .../core/Util/TestIdentityHashSet.cs            |   63 -
 .../core/Util/TestInPlaceMergeSorter.cs         |  100 -
 .../core/Util/TestIndexableBinaryStringTools.cs |  212 --
 .../core/Util/TestIntroSorter.cs                |   96 -
 src/Lucene.Net.Tests/core/Util/TestIntsRef.cs   |   49 -
 .../core/Util/TestLongBitSet.cs                 |  450 ---
 src/Lucene.Net.Tests/core/Util/TestMathUtil.cs  |  215 --
 .../core/Util/TestMaxFailuresRule.cs            |  262 --
 .../core/Util/TestMergedIterator.cs             |  175 --
 .../core/Util/TestNamedSPILoader.cs             |   60 -
 .../core/Util/TestNumericUtils.cs               |  545 ----
 .../core/Util/TestOfflineSorter.cs              |  257 --
 .../core/Util/TestOpenBitSet.cs                 |  566 ----
 .../core/Util/TestPForDeltaDocIdSet.cs          |   86 -
 .../core/Util/TestPagedBytes.cs                 |  233 --
 .../core/Util/TestPriorityQueue.cs              |  633 ----
 .../core/Util/TestQueryBuilder.cs               |  437 ---
 .../core/Util/TestRamUsageEstimator.cs          |  148 -
 .../Util/TestRamUsageEstimatorOnWildAnimals.cs  |   70 -
 .../Util/TestRecyclingByteBlockAllocator.cs     |  151 -
 .../core/Util/TestRecyclingIntBlockAllocator.cs |  151 -
 .../core/Util/TestRollingBuffer.cs              |  114 -
 .../core/Util/TestSentinelIntSet.cs             |   76 -
 src/Lucene.Net.Tests/core/Util/TestSetOnce.cs   |  119 -
 .../core/Util/TestSloppyMath.cs                 |  133 -
 .../core/Util/TestSmallFloat.cs                 |  160 -
 .../core/Util/TestStringHelper.cs               |   35 -
 src/Lucene.Net.Tests/core/Util/TestTimSorter.cs |   96 -
 .../core/Util/TestUnicodeUtil.cs                |  254 --
 src/Lucene.Net.Tests/core/Util/TestVersion.cs   |   83 -
 .../core/Util/TestVersionComparator.cs          |   59 -
 .../core/Util/TestVirtualMethod.cs              |  161 -
 .../core/Util/TestWAH8DocIdSet.cs               |  154 -
 .../core/Util/TestWeakIdentityMap.cs            |  354 ---
 src/Lucene.Net.Tests/project.json               |  141 +-
 1009 files changed, 144592 insertions(+), 145228 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Analysis/TestCachingTokenFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Analysis/TestCachingTokenFilter.cs b/src/Lucene.Net.Tests/Analysis/TestCachingTokenFilter.cs
new file mode 100644
index 0000000..74220bf
--- /dev/null
+++ b/src/Lucene.Net.Tests/Analysis/TestCachingTokenFilter.cs
@@ -0,0 +1,138 @@
+using Lucene.Net.Analysis.TokenAttributes;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Analysis
+{
+    using Lucene.Net.Store;
+    using NUnit.Framework;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using DocsAndPositionsEnum = Lucene.Net.Index.DocsAndPositionsEnum;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Document = Documents.Document;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using MultiFields = Lucene.Net.Index.MultiFields;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using TextField = TextField;
+
+    [TestFixture]
+    public class TestCachingTokenFilter : BaseTokenStreamTestCase
+    {
+        private string[] Tokens = new string[] { "term1", "term2", "term3", "term2" };
+
+        [Test]
+        public virtual void TestCaching()
+        {
+            Directory dir = new RAMDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            Document doc = new Document();
+            TokenStream stream = new TokenStreamAnonymousInnerClassHelper(this);
+
+            stream = new CachingTokenFilter(stream);
+
+            doc.Add(new TextField("preanalyzed", stream));
+
+            // 1) we consume all tokens twice before we add the doc to the index
+            CheckTokens(stream);
+            stream.Reset();
+            CheckTokens(stream);
+
+            // 2) now add the document to the index and verify if all tokens are indexed
+            //    don't reset the stream here, the DocumentWriter should do that implicitly
+            writer.AddDocument(doc);
+
+            IndexReader reader = writer.Reader;
+            DocsAndPositionsEnum termPositions = MultiFields.GetTermPositionsEnum(reader, MultiFields.GetLiveDocs(reader), "preanalyzed", new BytesRef("term1"));
+            Assert.IsTrue(termPositions.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(1, termPositions.Freq);
+            Assert.AreEqual(0, termPositions.NextPosition());
+
+            termPositions = MultiFields.GetTermPositionsEnum(reader, MultiFields.GetLiveDocs(reader), "preanalyzed", new BytesRef("term2"));
+            Assert.IsTrue(termPositions.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(2, termPositions.Freq);
+            Assert.AreEqual(1, termPositions.NextPosition());
+            Assert.AreEqual(3, termPositions.NextPosition());
+
+            termPositions = MultiFields.GetTermPositionsEnum(reader, MultiFields.GetLiveDocs(reader), "preanalyzed", new BytesRef("term3"));
+            Assert.IsTrue(termPositions.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(1, termPositions.Freq);
+            Assert.AreEqual(2, termPositions.NextPosition());
+            reader.Dispose();
+            writer.Dispose();
+            // 3) reset stream and consume tokens again
+            stream.Reset();
+            CheckTokens(stream);
+            dir.Dispose();
+        }
+
+        private class TokenStreamAnonymousInnerClassHelper : TokenStream
+        {
+            private TestCachingTokenFilter OuterInstance;
+
+            public TokenStreamAnonymousInnerClassHelper(TestCachingTokenFilter outerInstance)
+            {
+                InitMembers(outerInstance);
+            }
+
+            public void InitMembers(TestCachingTokenFilter outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+                index = 0;
+                termAtt = AddAttribute<ICharTermAttribute>();
+                offsetAtt = AddAttribute<IOffsetAttribute>();
+            }
+
+            private int index;
+            private ICharTermAttribute termAtt;
+            private IOffsetAttribute offsetAtt;
+
+            public sealed override bool IncrementToken()
+            {
+                if (index == OuterInstance.Tokens.Length)
+                {
+                    return false;
+                }
+                else
+                {
+                    ClearAttributes();
+                    termAtt.Append(OuterInstance.Tokens[index++]);
+                    offsetAtt.SetOffset(0, 0);
+                    return true;
+                }
+            }
+        }
+
+        private void CheckTokens(TokenStream stream)
+        {
+            int count = 0;
+
+            ICharTermAttribute termAtt = stream.GetAttribute<ICharTermAttribute>();
+            while (stream.IncrementToken())
+            {
+                Assert.IsTrue(count < Tokens.Length);
+                Assert.AreEqual(Tokens[count], termAtt.ToString());
+                count++;
+            }
+
+            Assert.AreEqual(Tokens.Length, count);
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Analysis/TestCharFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Analysis/TestCharFilter.cs b/src/Lucene.Net.Tests/Analysis/TestCharFilter.cs
new file mode 100644
index 0000000..265baae
--- /dev/null
+++ b/src/Lucene.Net.Tests/Analysis/TestCharFilter.cs
@@ -0,0 +1,94 @@
+namespace Lucene.Net.Analysis
+{
+    using NUnit.Framework;
+    using System.IO;
+
+    /*
+             * Licensed to the Apache Software Foundation (ASF) under one or more
+             * contributor license agreements.  See the NOTICE file distributed with
+             * this work for additional information regarding copyright ownership.
+             * The ASF licenses this file to You under the Apache License, Version 2.0
+             * (the "License"); you may not use this file except in compliance with
+             * the License.  You may obtain a copy of the License at
+             *
+             *     http://www.apache.org/licenses/LICENSE-2.0
+             *
+             * Unless required by applicable law or agreed to in writing, software
+             * distributed under the License is distributed on an "AS IS" BASIS,
+             * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+             * See the License for the specific language governing permissions and
+             * limitations under the License.
+             */
+
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    [TestFixture]
+    public class TestCharFilter : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestCharFilter1()
+        {
+            CharFilter cs = new CharFilter1(new StringReader(""));
+            Assert.AreEqual(1, cs.CorrectOffset(0), "corrected offset is invalid");
+        }
+
+        [Test]
+        public virtual void TestCharFilter2()
+        {
+            CharFilter cs = new CharFilter2(new StringReader(""));
+            Assert.AreEqual(2, cs.CorrectOffset(0), "corrected offset is invalid");
+        }
+
+        [Test]
+        public virtual void TestCharFilter12()
+        {
+            CharFilter cs = new CharFilter2(new CharFilter1(new StringReader("")));
+            Assert.AreEqual(3, cs.CorrectOffset(0), "corrected offset is invalid");
+        }
+
+        [Test]
+        public virtual void TestCharFilter11()
+        {
+            CharFilter cs = new CharFilter1(new CharFilter1(new StringReader("")));
+            Assert.AreEqual(2, cs.CorrectOffset(0), "corrected offset is invalid");
+        }
+
+        internal class CharFilter1 : CharFilter
+        {
+            protected internal CharFilter1(TextReader @in)
+                : base(@in)
+            {
+            }
+
+            public override int Read(char[] cbuf, int off, int len)
+            {
+                int numRead = m_input.Read(cbuf, off, len);
+                return numRead == 0 ? -1 : numRead;
+            }
+
+            protected override int Correct(int currentOff)
+            {
+                return currentOff + 1;
+            }
+        }
+
+        internal class CharFilter2 : CharFilter
+        {
+            protected internal CharFilter2(TextReader @in)
+                : base(@in)
+            {
+            }
+
+            public override int Read(char[] cbuf, int off, int len)
+            {
+                int numRead = m_input.Read(cbuf, off, len);
+                return numRead == 0 ? -1 : numRead;
+            }
+
+            protected override int Correct(int currentOff)
+            {
+                return currentOff + 2;
+            }
+        }
+    }
+}
\ No newline at end of file


[52/72] [abbrv] lucenenet git commit: Lucene.Net.TestFramework: Renamed Codecs\compressing\ to Codecs\Compressing\

Posted by ni...@apache.org.
Lucene.Net.TestFramework: Renamed Codecs\compressing\ to Codecs\Compressing\


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/362f0d30
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/362f0d30
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/362f0d30

Branch: refs/heads/api-work
Commit: 362f0d30d0cc0c1e9689df23d38c7a5f22b46b89
Parents: 77e95cc
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 26 03:08:22 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Feb 27 06:17:55 2017 +0700

----------------------------------------------------------------------
 .../Codecs/Compressing/CompressingCodec.cs      | 112 ++++++++++++++++++
 .../Compressing/Dummy/DummyCompressingCodec.cs  | 113 +++++++++++++++++++
 .../Codecs/Compressing/FastCompressingCodec.cs  |  47 ++++++++
 .../FastDecompressionCompressingCodec.cs        |  47 ++++++++
 .../HighCompressionCompressingCodec.cs          |  47 ++++++++
 .../Codecs/compressing/CompressingCodec.cs      | 112 ------------------
 .../Codecs/compressing/FastCompressingCodec.cs  |  47 --------
 .../FastDecompressionCompressingCodec.cs        |  47 --------
 .../HighCompressionCompressingCodec.cs          |  47 --------
 .../compressing/dummy/DummyCompressingCodec.cs  | 113 -------------------
 .../Lucene.Net.TestFramework.csproj             |  10 +-
 11 files changed, 371 insertions(+), 371 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/362f0d30/src/Lucene.Net.TestFramework/Codecs/Compressing/CompressingCodec.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Compressing/CompressingCodec.cs b/src/Lucene.Net.TestFramework/Codecs/Compressing/CompressingCodec.cs
new file mode 100644
index 0000000..74bf859
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Compressing/CompressingCodec.cs
@@ -0,0 +1,112 @@
+using System;
+
+namespace Lucene.Net.Codecs.Compressing
+{
+    using Lucene.Net.Randomized.Generators;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using DummyCompressingCodec = Lucene.Net.Codecs.Compressing.Dummy.DummyCompressingCodec;
+    using Lucene46Codec = Lucene.Net.Codecs.Lucene46.Lucene46Codec;
+
+    //using RandomInts = com.carrotsearch.randomizedtesting.generators.RandomInts;
+
+    /// <summary>
+    /// A codec that uses <seealso cref="CompressingStoredFieldsFormat"/> for its stored
+    /// fields and delegates to <seealso cref="Lucene46Codec"/> for everything else.
+    /// </summary>
+    public abstract class CompressingCodec : FilterCodec
+    {
+        /// <summary>
+        /// Create a random instance.
+        /// </summary>
+        public static CompressingCodec RandomInstance(Random random, int chunkSize, bool withSegmentSuffix)
+        {
+            switch (random.Next(4))
+            {
+                case 0:
+                    return new FastCompressingCodec(chunkSize, withSegmentSuffix);
+
+                case 1:
+                    return new FastDecompressionCompressingCodec(chunkSize, withSegmentSuffix);
+
+                case 2:
+                    return new HighCompressionCompressingCodec(chunkSize, withSegmentSuffix);
+
+                case 3:
+                    return new DummyCompressingCodec(chunkSize, withSegmentSuffix);
+
+                default:
+                    throw new InvalidOperationException();
+            }
+        }
+
+        /// <summary>
+        /// Creates a random <seealso cref="CompressingCodec"/> that is using an empty segment
+        /// suffix
+        /// </summary>
+        public static CompressingCodec RandomInstance(Random random)
+        {
+            return RandomInstance(random, RandomInts.NextIntBetween(random, 1, 500), false);
+        }
+
+        /// <summary>
+        /// Creates a random <seealso cref="CompressingCodec"/> that is using a segment suffix
+        /// </summary>
+        public static CompressingCodec RandomInstance(Random random, bool withSegmentSuffix)
+        {
+            return RandomInstance(random, RandomInts.NextIntBetween(random, 1, 500), withSegmentSuffix);
+        }
+
+        private readonly CompressingStoredFieldsFormat StoredFieldsFormat_Renamed;
+        private readonly CompressingTermVectorsFormat TermVectorsFormat_Renamed;
+
+        /// <summary>
+        /// Creates a compressing codec with a given segment suffix
+        /// </summary>
+        protected CompressingCodec(string segmentSuffix, CompressionMode compressionMode, int chunkSize)
+            : base(new Lucene46Codec())
+        {
+            this.StoredFieldsFormat_Renamed = new CompressingStoredFieldsFormat(this.Name, segmentSuffix, compressionMode, chunkSize);
+            this.TermVectorsFormat_Renamed = new CompressingTermVectorsFormat(this.Name, segmentSuffix, compressionMode, chunkSize);
+        }
+
+        /// <summary>
+        /// Creates a compressing codec with an empty segment suffix
+        /// </summary>
+        protected CompressingCodec(CompressionMode compressionMode, int chunkSize)
+            : this("", compressionMode, chunkSize)
+        {
+        }
+
+        public override StoredFieldsFormat StoredFieldsFormat
+        {
+            get { return StoredFieldsFormat_Renamed; }
+        }
+
+        public override TermVectorsFormat TermVectorsFormat
+        {
+            get { return TermVectorsFormat_Renamed; }
+        }
+
+        public override string ToString()
+        {
+            return Name + "(storedFieldsFormat=" + StoredFieldsFormat_Renamed + ", termVectorsFormat=" + TermVectorsFormat_Renamed + ")";
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/362f0d30/src/Lucene.Net.TestFramework/Codecs/Compressing/Dummy/DummyCompressingCodec.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Compressing/Dummy/DummyCompressingCodec.cs b/src/Lucene.Net.TestFramework/Codecs/Compressing/Dummy/DummyCompressingCodec.cs
new file mode 100644
index 0000000..01ecf30
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Compressing/Dummy/DummyCompressingCodec.cs
@@ -0,0 +1,113 @@
+using System.Diagnostics;
+
+namespace Lucene.Net.Codecs.Compressing.Dummy
+{
+    using ArrayUtil = Lucene.Net.Util.ArrayUtil;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using DataInput = Lucene.Net.Store.DataInput;
+    using DataOutput = Lucene.Net.Store.DataOutput;
+
+    /// <summary>
+    /// CompressionCodec that does not compress data, useful for testing. </summary>
+    // In its own package to make sure the oal.codecs.compressing classes are
+    // visible enough to let people write their own CompressionMode
+    [CodecName("DummyCompressingStoredFields")]
+    public class DummyCompressingCodec : CompressingCodec
+    {
+        public static readonly CompressionMode DUMMY = new CompressionModeAnonymousInnerClassHelper();
+
+        private class CompressionModeAnonymousInnerClassHelper : CompressionMode
+        {
+            public CompressionModeAnonymousInnerClassHelper()
+            {
+            }
+
+            public override Compressor NewCompressor()
+            {
+                return DUMMY_COMPRESSOR;
+            }
+
+            public override Decompressor NewDecompressor()
+            {
+                return DUMMY_DECOMPRESSOR;
+            }
+
+            public override string ToString()
+            {
+                return "DUMMY";
+            }
+        }
+
+        private static readonly Decompressor DUMMY_DECOMPRESSOR = new DecompressorAnonymousInnerClassHelper();
+
+        private class DecompressorAnonymousInnerClassHelper : Decompressor
+        {
+            public DecompressorAnonymousInnerClassHelper()
+            {
+            }
+
+            public override void Decompress(DataInput @in, int originalLength, int offset, int length, BytesRef bytes)
+            {
+                Debug.Assert(offset + length <= originalLength);
+                if (bytes.Bytes.Length < originalLength)
+                {
+                    bytes.Bytes = new byte[ArrayUtil.Oversize(originalLength, 1)];
+                }
+                @in.ReadBytes(bytes.Bytes, 0, offset + length);
+                bytes.Offset = offset;
+                bytes.Length = length;
+            }
+
+            public override object Clone()
+            {
+                return this;
+            }
+        }
+
+        private static readonly Compressor DUMMY_COMPRESSOR = new CompressorAnonymousInnerClassHelper();
+
+        private class CompressorAnonymousInnerClassHelper : Compressor
+        {
+            public CompressorAnonymousInnerClassHelper()
+            {
+            }
+
+            public override void Compress(byte[] bytes, int off, int len, DataOutput @out)
+            {
+                @out.WriteBytes(bytes, off, len);
+            }
+        }
+
+        /// <summary>
+        /// Constructor that allows to configure the chunk size. </summary>
+        public DummyCompressingCodec(int chunkSize, bool withSegmentSuffix)
+            : base(withSegmentSuffix ? "DummyCompressingStoredFields" : "", DUMMY, chunkSize)
+        {
+        }
+
+        /// <summary>
+        /// Default constructor. </summary>
+        public DummyCompressingCodec()
+            : this(1 << 14, false)
+        {
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/362f0d30/src/Lucene.Net.TestFramework/Codecs/Compressing/FastCompressingCodec.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Compressing/FastCompressingCodec.cs b/src/Lucene.Net.TestFramework/Codecs/Compressing/FastCompressingCodec.cs
new file mode 100644
index 0000000..b574080
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Compressing/FastCompressingCodec.cs
@@ -0,0 +1,47 @@
+namespace Lucene.Net.Codecs.Compressing
+{
+    using Lucene42NormsFormat = Lucene.Net.Codecs.Lucene42.Lucene42NormsFormat;
+    using PackedInt32s = Lucene.Net.Util.Packed.PackedInt32s;
+
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// CompressionCodec that uses <seealso cref="CompressionMode#FAST"/> </summary>
+    [CodecName("FastCompressingStoredFields")]
+    public class FastCompressingCodec : CompressingCodec
+    {
+        /// <summary>
+        /// Constructor that allows to configure the chunk size. </summary>
+        public FastCompressingCodec(int chunkSize, bool withSegmentSuffix)
+            : base(withSegmentSuffix ? "FastCompressingStoredFields" : "", CompressionMode.FAST, chunkSize)
+        {
+        }
+
+        /// <summary>
+        /// Default constructor. </summary>
+        public FastCompressingCodec()
+            : this(1 << 14, false)
+        {
+        }
+
+        public override NormsFormat NormsFormat
+        {
+            get { return new Lucene42NormsFormat(PackedInt32s.FAST); }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/362f0d30/src/Lucene.Net.TestFramework/Codecs/Compressing/FastDecompressionCompressingCodec.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Compressing/FastDecompressionCompressingCodec.cs b/src/Lucene.Net.TestFramework/Codecs/Compressing/FastDecompressionCompressingCodec.cs
new file mode 100644
index 0000000..b5e6df3
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Compressing/FastDecompressionCompressingCodec.cs
@@ -0,0 +1,47 @@
+namespace Lucene.Net.Codecs.Compressing
+{
+    using Lucene42NormsFormat = Lucene.Net.Codecs.Lucene42.Lucene42NormsFormat;
+    using PackedInt32s = Lucene.Net.Util.Packed.PackedInt32s;
+
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// CompressionCodec that uses <seealso cref="CompressionMode#FAST_DECOMPRESSION"/> </summary>
+    [CodecName("FastDecompressionCompressingStoredFields")]
+    public class FastDecompressionCompressingCodec : CompressingCodec
+    {
+        /// <summary>
+        /// Constructor that allows to configure the chunk size. </summary>
+        public FastDecompressionCompressingCodec(int chunkSize, bool withSegmentSuffix)
+            : base(withSegmentSuffix ? "FastDecompressionCompressingStoredFields" : "", CompressionMode.FAST_DECOMPRESSION, chunkSize)
+        {
+        }
+
+        /// <summary>
+        /// Default constructor. </summary>
+        public FastDecompressionCompressingCodec()
+            : this(1 << 14, false)
+        {
+        }
+
+        public override NormsFormat NormsFormat
+        {
+            get { return new Lucene42NormsFormat(PackedInt32s.DEFAULT); }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/362f0d30/src/Lucene.Net.TestFramework/Codecs/Compressing/HighCompressionCompressingCodec.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Compressing/HighCompressionCompressingCodec.cs b/src/Lucene.Net.TestFramework/Codecs/Compressing/HighCompressionCompressingCodec.cs
new file mode 100644
index 0000000..95f78b4
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Compressing/HighCompressionCompressingCodec.cs
@@ -0,0 +1,47 @@
+namespace Lucene.Net.Codecs.Compressing
+{
+    using Lucene42NormsFormat = Lucene.Net.Codecs.Lucene42.Lucene42NormsFormat;
+    using PackedInt32s = Lucene.Net.Util.Packed.PackedInt32s;
+
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// CompressionCodec that uses <seealso cref="CompressionMode#HIGH_COMPRESSION"/> </summary>
+    [CodecName("HighCompressionCompressingStoredFields")]
+    public class HighCompressionCompressingCodec : CompressingCodec
+    {
+        /// <summary>
+        /// Constructor that allows to configure the chunk size. </summary>
+        public HighCompressionCompressingCodec(int chunkSize, bool withSegmentSuffix)
+            : base(withSegmentSuffix ? "HighCompressionCompressingStoredFields" : "", CompressionMode.HIGH_COMPRESSION, chunkSize)
+        {
+        }
+
+        /// <summary>
+        /// Default constructor. </summary>
+        public HighCompressionCompressingCodec()
+            : this(1 << 14, false)
+        {
+        }
+
+        public override NormsFormat NormsFormat
+        {
+            get { return new Lucene42NormsFormat(PackedInt32s.COMPACT); }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/362f0d30/src/Lucene.Net.TestFramework/Codecs/compressing/CompressingCodec.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/compressing/CompressingCodec.cs b/src/Lucene.Net.TestFramework/Codecs/compressing/CompressingCodec.cs
deleted file mode 100644
index f5f75bd..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/compressing/CompressingCodec.cs
+++ /dev/null
@@ -1,112 +0,0 @@
-using System;
-
-namespace Lucene.Net.Codecs.Compressing
-{
-    using Lucene.Net.Randomized.Generators;
-
-    /*
-         * Licensed to the Apache Software Foundation (ASF) under one or more
-         * contributor license agreements.  See the NOTICE file distributed with
-         * this work for additional information regarding copyright ownership.
-         * The ASF licenses this file to You under the Apache License, Version 2.0
-         * (the "License"); you may not use this file except in compliance with
-         * the License.  You may obtain a copy of the License at
-         *
-         *     http://www.apache.org/licenses/LICENSE-2.0
-         *
-         * Unless required by applicable law or agreed to in writing, software
-         * distributed under the License is distributed on an "AS IS" BASIS,
-         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-         * See the License for the specific language governing permissions and
-         * limitations under the License.
-         */
-
-    using DummyCompressingCodec = Lucene.Net.Codecs.Compressing.dummy.DummyCompressingCodec;
-    using Lucene46Codec = Lucene.Net.Codecs.Lucene46.Lucene46Codec;
-
-    //using RandomInts = com.carrotsearch.randomizedtesting.generators.RandomInts;
-
-    /// <summary>
-    /// A codec that uses <seealso cref="CompressingStoredFieldsFormat"/> for its stored
-    /// fields and delegates to <seealso cref="Lucene46Codec"/> for everything else.
-    /// </summary>
-    public abstract class CompressingCodec : FilterCodec
-    {
-        /// <summary>
-        /// Create a random instance.
-        /// </summary>
-        public static CompressingCodec RandomInstance(Random random, int chunkSize, bool withSegmentSuffix)
-        {
-            switch (random.Next(4))
-            {
-                case 0:
-                    return new FastCompressingCodec(chunkSize, withSegmentSuffix);
-
-                case 1:
-                    return new FastDecompressionCompressingCodec(chunkSize, withSegmentSuffix);
-
-                case 2:
-                    return new HighCompressionCompressingCodec(chunkSize, withSegmentSuffix);
-
-                case 3:
-                    return new DummyCompressingCodec(chunkSize, withSegmentSuffix);
-
-                default:
-                    throw new InvalidOperationException();
-            }
-        }
-
-        /// <summary>
-        /// Creates a random <seealso cref="CompressingCodec"/> that is using an empty segment
-        /// suffix
-        /// </summary>
-        public static CompressingCodec RandomInstance(Random random)
-        {
-            return RandomInstance(random, RandomInts.NextIntBetween(random, 1, 500), false);
-        }
-
-        /// <summary>
-        /// Creates a random <seealso cref="CompressingCodec"/> that is using a segment suffix
-        /// </summary>
-        public static CompressingCodec RandomInstance(Random random, bool withSegmentSuffix)
-        {
-            return RandomInstance(random, RandomInts.NextIntBetween(random, 1, 500), withSegmentSuffix);
-        }
-
-        private readonly CompressingStoredFieldsFormat StoredFieldsFormat_Renamed;
-        private readonly CompressingTermVectorsFormat TermVectorsFormat_Renamed;
-
-        /// <summary>
-        /// Creates a compressing codec with a given segment suffix
-        /// </summary>
-        protected CompressingCodec(string segmentSuffix, CompressionMode compressionMode, int chunkSize)
-            : base(new Lucene46Codec())
-        {
-            this.StoredFieldsFormat_Renamed = new CompressingStoredFieldsFormat(this.Name, segmentSuffix, compressionMode, chunkSize);
-            this.TermVectorsFormat_Renamed = new CompressingTermVectorsFormat(this.Name, segmentSuffix, compressionMode, chunkSize);
-        }
-
-        /// <summary>
-        /// Creates a compressing codec with an empty segment suffix
-        /// </summary>
-        protected CompressingCodec(CompressionMode compressionMode, int chunkSize)
-            : this("", compressionMode, chunkSize)
-        {
-        }
-
-        public override StoredFieldsFormat StoredFieldsFormat
-        {
-            get { return StoredFieldsFormat_Renamed; }
-        }
-
-        public override TermVectorsFormat TermVectorsFormat
-        {
-            get { return TermVectorsFormat_Renamed; }
-        }
-
-        public override string ToString()
-        {
-            return Name + "(storedFieldsFormat=" + StoredFieldsFormat_Renamed + ", termVectorsFormat=" + TermVectorsFormat_Renamed + ")";
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/362f0d30/src/Lucene.Net.TestFramework/Codecs/compressing/FastCompressingCodec.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/compressing/FastCompressingCodec.cs b/src/Lucene.Net.TestFramework/Codecs/compressing/FastCompressingCodec.cs
deleted file mode 100644
index b574080..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/compressing/FastCompressingCodec.cs
+++ /dev/null
@@ -1,47 +0,0 @@
-namespace Lucene.Net.Codecs.Compressing
-{
-    using Lucene42NormsFormat = Lucene.Net.Codecs.Lucene42.Lucene42NormsFormat;
-    using PackedInt32s = Lucene.Net.Util.Packed.PackedInt32s;
-
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    /// <summary>
-    /// CompressionCodec that uses <seealso cref="CompressionMode#FAST"/> </summary>
-    [CodecName("FastCompressingStoredFields")]
-    public class FastCompressingCodec : CompressingCodec
-    {
-        /// <summary>
-        /// Constructor that allows to configure the chunk size. </summary>
-        public FastCompressingCodec(int chunkSize, bool withSegmentSuffix)
-            : base(withSegmentSuffix ? "FastCompressingStoredFields" : "", CompressionMode.FAST, chunkSize)
-        {
-        }
-
-        /// <summary>
-        /// Default constructor. </summary>
-        public FastCompressingCodec()
-            : this(1 << 14, false)
-        {
-        }
-
-        public override NormsFormat NormsFormat
-        {
-            get { return new Lucene42NormsFormat(PackedInt32s.FAST); }
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/362f0d30/src/Lucene.Net.TestFramework/Codecs/compressing/FastDecompressionCompressingCodec.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/compressing/FastDecompressionCompressingCodec.cs b/src/Lucene.Net.TestFramework/Codecs/compressing/FastDecompressionCompressingCodec.cs
deleted file mode 100644
index b5e6df3..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/compressing/FastDecompressionCompressingCodec.cs
+++ /dev/null
@@ -1,47 +0,0 @@
-namespace Lucene.Net.Codecs.Compressing
-{
-    using Lucene42NormsFormat = Lucene.Net.Codecs.Lucene42.Lucene42NormsFormat;
-    using PackedInt32s = Lucene.Net.Util.Packed.PackedInt32s;
-
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    /// <summary>
-    /// CompressionCodec that uses <seealso cref="CompressionMode#FAST_DECOMPRESSION"/> </summary>
-    [CodecName("FastDecompressionCompressingStoredFields")]
-    public class FastDecompressionCompressingCodec : CompressingCodec
-    {
-        /// <summary>
-        /// Constructor that allows to configure the chunk size. </summary>
-        public FastDecompressionCompressingCodec(int chunkSize, bool withSegmentSuffix)
-            : base(withSegmentSuffix ? "FastDecompressionCompressingStoredFields" : "", CompressionMode.FAST_DECOMPRESSION, chunkSize)
-        {
-        }
-
-        /// <summary>
-        /// Default constructor. </summary>
-        public FastDecompressionCompressingCodec()
-            : this(1 << 14, false)
-        {
-        }
-
-        public override NormsFormat NormsFormat
-        {
-            get { return new Lucene42NormsFormat(PackedInt32s.DEFAULT); }
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/362f0d30/src/Lucene.Net.TestFramework/Codecs/compressing/HighCompressionCompressingCodec.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/compressing/HighCompressionCompressingCodec.cs b/src/Lucene.Net.TestFramework/Codecs/compressing/HighCompressionCompressingCodec.cs
deleted file mode 100644
index 95f78b4..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/compressing/HighCompressionCompressingCodec.cs
+++ /dev/null
@@ -1,47 +0,0 @@
-namespace Lucene.Net.Codecs.Compressing
-{
-    using Lucene42NormsFormat = Lucene.Net.Codecs.Lucene42.Lucene42NormsFormat;
-    using PackedInt32s = Lucene.Net.Util.Packed.PackedInt32s;
-
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    /// <summary>
-    /// CompressionCodec that uses <seealso cref="CompressionMode#HIGH_COMPRESSION"/> </summary>
-    [CodecName("HighCompressionCompressingStoredFields")]
-    public class HighCompressionCompressingCodec : CompressingCodec
-    {
-        /// <summary>
-        /// Constructor that allows to configure the chunk size. </summary>
-        public HighCompressionCompressingCodec(int chunkSize, bool withSegmentSuffix)
-            : base(withSegmentSuffix ? "HighCompressionCompressingStoredFields" : "", CompressionMode.HIGH_COMPRESSION, chunkSize)
-        {
-        }
-
-        /// <summary>
-        /// Default constructor. </summary>
-        public HighCompressionCompressingCodec()
-            : this(1 << 14, false)
-        {
-        }
-
-        public override NormsFormat NormsFormat
-        {
-            get { return new Lucene42NormsFormat(PackedInt32s.COMPACT); }
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/362f0d30/src/Lucene.Net.TestFramework/Codecs/compressing/dummy/DummyCompressingCodec.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/compressing/dummy/DummyCompressingCodec.cs b/src/Lucene.Net.TestFramework/Codecs/compressing/dummy/DummyCompressingCodec.cs
deleted file mode 100644
index 4fd416e..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/compressing/dummy/DummyCompressingCodec.cs
+++ /dev/null
@@ -1,113 +0,0 @@
-using System.Diagnostics;
-
-namespace Lucene.Net.Codecs.Compressing.dummy
-{
-    using ArrayUtil = Lucene.Net.Util.ArrayUtil;
-    using BytesRef = Lucene.Net.Util.BytesRef;
-
-    /*
-         * Licensed to the Apache Software Foundation (ASF) under one or more
-         * contributor license agreements.  See the NOTICE file distributed with
-         * this work for additional information regarding copyright ownership.
-         * The ASF licenses this file to You under the Apache License, Version 2.0
-         * (the "License"); you may not use this file except in compliance with
-         * the License.  You may obtain a copy of the License at
-         *
-         *     http://www.apache.org/licenses/LICENSE-2.0
-         *
-         * Unless required by applicable law or agreed to in writing, software
-         * distributed under the License is distributed on an "AS IS" BASIS,
-         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-         * See the License for the specific language governing permissions and
-         * limitations under the License.
-         */
-
-    using DataInput = Lucene.Net.Store.DataInput;
-    using DataOutput = Lucene.Net.Store.DataOutput;
-
-    /// <summary>
-    /// CompressionCodec that does not compress data, useful for testing. </summary>
-    // In its own package to make sure the oal.codecs.compressing classes are
-    // visible enough to let people write their own CompressionMode
-    [CodecName("DummyCompressingStoredFields")]
-    public class DummyCompressingCodec : CompressingCodec
-    {
-        public static readonly CompressionMode DUMMY = new CompressionModeAnonymousInnerClassHelper();
-
-        private class CompressionModeAnonymousInnerClassHelper : CompressionMode
-        {
-            public CompressionModeAnonymousInnerClassHelper()
-            {
-            }
-
-            public override Compressor NewCompressor()
-            {
-                return DUMMY_COMPRESSOR;
-            }
-
-            public override Decompressor NewDecompressor()
-            {
-                return DUMMY_DECOMPRESSOR;
-            }
-
-            public override string ToString()
-            {
-                return "DUMMY";
-            }
-        }
-
-        private static readonly Decompressor DUMMY_DECOMPRESSOR = new DecompressorAnonymousInnerClassHelper();
-
-        private class DecompressorAnonymousInnerClassHelper : Decompressor
-        {
-            public DecompressorAnonymousInnerClassHelper()
-            {
-            }
-
-            public override void Decompress(DataInput @in, int originalLength, int offset, int length, BytesRef bytes)
-            {
-                Debug.Assert(offset + length <= originalLength);
-                if (bytes.Bytes.Length < originalLength)
-                {
-                    bytes.Bytes = new byte[ArrayUtil.Oversize(originalLength, 1)];
-                }
-                @in.ReadBytes(bytes.Bytes, 0, offset + length);
-                bytes.Offset = offset;
-                bytes.Length = length;
-            }
-
-            public override object Clone()
-            {
-                return this;
-            }
-        }
-
-        private static readonly Compressor DUMMY_COMPRESSOR = new CompressorAnonymousInnerClassHelper();
-
-        private class CompressorAnonymousInnerClassHelper : Compressor
-        {
-            public CompressorAnonymousInnerClassHelper()
-            {
-            }
-
-            public override void Compress(byte[] bytes, int off, int len, DataOutput @out)
-            {
-                @out.WriteBytes(bytes, off, len);
-            }
-        }
-
-        /// <summary>
-        /// Constructor that allows to configure the chunk size. </summary>
-        public DummyCompressingCodec(int chunkSize, bool withSegmentSuffix)
-            : base(withSegmentSuffix ? "DummyCompressingStoredFields" : "", DUMMY, chunkSize)
-        {
-        }
-
-        /// <summary>
-        /// Default constructor. </summary>
-        public DummyCompressingCodec()
-            : this(1 << 14, false)
-        {
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/362f0d30/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj b/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
index f789033..3d3fd87 100644
--- a/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
+++ b/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
@@ -141,19 +141,19 @@
       <SubType>Code</SubType>
     </Compile>
     <Compile Include="Codecs\Bloom\TestBloomFilteredLucene41Postings.cs" />
-    <Compile Include="Codecs\compressing\CompressingCodec.cs">
+    <Compile Include="Codecs\Compressing\CompressingCodec.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Codecs\compressing\dummy\DummyCompressingCodec.cs">
+    <Compile Include="Codecs\Compressing\Dummy\DummyCompressingCodec.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Codecs\compressing\FastCompressingCodec.cs">
+    <Compile Include="Codecs\Compressing\FastCompressingCodec.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Codecs\compressing\FastDecompressionCompressingCodec.cs">
+    <Compile Include="Codecs\Compressing\FastDecompressionCompressingCodec.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Codecs\compressing\HighCompressionCompressingCodec.cs">
+    <Compile Include="Codecs\Compressing\HighCompressionCompressingCodec.cs">
       <SubType>Code</SubType>
     </Compile>
     <Compile Include="Codecs\lucene3x\PreFlexRWCodec.cs">


[27/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestIndexWriterOnJRECrash.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterOnJRECrash.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterOnJRECrash.cs
new file mode 100644
index 0000000..4254df5
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestIndexWriterOnJRECrash.cs
@@ -0,0 +1,281 @@
+// LUCENENET NOTE: Clearly this test is not applicable to .NET, but just 
+// adding the file to the project for completedness.
+
+//using System;
+//using System.Collections.Generic;
+//using System.Threading;
+//using Lucene.Net.Randomized;
+//using Lucene.Net.Randomized.Generators;
+
+//namespace Lucene.Net.Index
+//{
+
+//    /*
+//    ///  Licensed to the Apache Software Foundation (ASF) under one or more
+//    ///  contributor license agreements.  See the NOTICE file distributed with
+//    ///  this work for additional information regarding copyright ownership.
+//    ///  The ASF licenses this file to You under the Apache License, Version 2.0
+//    ///  (the "License"); you may not use this file except in compliance with
+//    ///  the License.  You may obtain a copy of the License at
+//    /// 
+//    ///      http://www.apache.org/licenses/LICENSE-2.0
+//    /// 
+//    ///  Unless required by applicable law or agreed to in writing, software
+//    ///  distributed under the License is distributed on an "AS IS" BASIS,
+//    ///  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//    ///  See the License for the specific language governing permissions and
+//    ///  limitations under the License.
+//    /// 
+//    */
+
+
+//    using Codec = Lucene.Net.Codecs.Codec;
+//    using BaseDirectoryWrapper = Lucene.Net.Store.BaseDirectoryWrapper;
+//    using Constants = Lucene.Net.Util.Constants;
+//    using TestUtil = Lucene.Net.Util.TestUtil;
+
+//    using NUnit.Framework;
+//    using Lucene.Net.Support;
+//    using System.IO;
+//    /// <summary>
+//    /// Runs TestNRTThreads in a separate process, crashes the JRE in the middle
+//    /// of execution, then runs checkindex to make sure its not corrupt.
+//    /// </summary>
+//    [TestFixture]
+//    public class TestIndexWriterOnJRECrash : TestNRTThreads
+//    {
+//        private DirectoryInfo TempDir;
+
+//        [SetUp]
+//        public override void SetUp()
+//        {
+//            base.SetUp();
+//            TempDir = CreateTempDir("jrecrash");
+//            TempDir.Delete();
+//            TempDir.mkdir();
+//        }
+
+//        [Test]
+//        public override void TestNRTThreads_Mem()
+//        {
+//            // if we are not the fork
+//            if (System.getProperty("tests.crashmode") == null)
+//            {
+//                // try up to 10 times to create an index
+//                for (int i = 0; i < 10; i++)
+//                {
+//                    ForkTest();
+//                    // if we succeeded in finding an index, we are done.
+//                    if (CheckIndexes(TempDir))
+//                    {
+//                        return;
+//                    }
+//                }
+//            }
+//            else
+//            {
+//                // TODO: the non-fork code could simply enable impersonation?
+//                AssumeFalse("does not support PreFlex, see LUCENE-3992", Codec.Default.Name.Equals("Lucene3x"));
+//                // we are the fork, setup a crashing thread
+//                int crashTime = TestUtil.NextInt(Random(), 3000, 4000);
+//                ThreadClass t = new ThreadAnonymousInnerClassHelper(this, crashTime);
+//                t.Priority = ThreadPriority.Highest;
+//                t.Start();
+//                // run the test until we crash.
+//                for (int i = 0; i < 1000; i++)
+//                {
+//                    base.TestNRTThreads_Mem();
+//                }
+//            }
+//        }
+
+//        private class ThreadAnonymousInnerClassHelper : ThreadClass
+//        {
+//            private readonly TestIndexWriterOnJRECrash OuterInstance;
+
+//            private int CrashTime;
+
+//            public ThreadAnonymousInnerClassHelper(TestIndexWriterOnJRECrash outerInstance, int crashTime)
+//            {
+//                this.OuterInstance = outerInstance;
+//                this.CrashTime = crashTime;
+//            }
+
+//            public override void Run()
+//            {
+//                try
+//                {
+//                    Thread.Sleep(CrashTime);
+//                }
+//                catch (ThreadInterruptedException e)
+//                {
+//                }
+//                OuterInstance.CrashJRE();
+//            }
+//        }
+
+//        /// <summary>
+//        /// fork ourselves in a new jvm. sets -Dtests.crashmode=true </summary>
+//        public virtual void ForkTest()
+//        {
+//            IList<string> cmd = new List<string>();
+//            cmd.Add(System.getProperty("java.home") + System.getProperty("file.separator") + "bin" + System.getProperty("file.separator") + "java");
+//            cmd.Add("-Xmx512m");
+//            cmd.Add("-Dtests.crashmode=true");
+//            // passing NIGHTLY to this test makes it run for much longer, easier to catch it in the act...
+//            cmd.Add("-Dtests.nightly=true");
+//            cmd.Add("-DtempDir=" + TempDir.Path);
+//            cmd.Add("-Dtests.seed=" + SeedUtils.formatSeed(Random().NextLong()));
+//            cmd.Add("-ea");
+//            cmd.Add("-cp");
+//            cmd.Add(System.getProperty("java.class.path"));
+//            cmd.Add("org.junit.runner.JUnitCore");
+//            cmd.Add(this.GetType().Name);
+//            ProcessBuilder pb = new ProcessBuilder(cmd);
+//            pb.directory(TempDir);
+//            pb.redirectErrorStream(true);
+//            Process p = pb.Start();
+
+//            // We pump everything to stderr.
+//            PrintStream childOut = System.err;
+//            Thread stdoutPumper = ThreadPumper.Start(p.InputStream, childOut);
+//            Thread stderrPumper = ThreadPumper.Start(p.ErrorStream, childOut);
+//            if (VERBOSE)
+//            {
+//                childOut.println(">>> Begin subprocess output");
+//            }
+//            p.waitFor();
+//            stdoutPumper.Join();
+//            stderrPumper.Join();
+//            if (VERBOSE)
+//            {
+//                childOut.println("<<< End subprocess output");
+//            }
+//        }
+
+//        /// <summary>
+//        /// A pipe thread. It'd be nice to reuse guava's implementation for this... </summary>
+//        internal class ThreadPumper
+//        {
+//            public static Thread Start(InputStream from, OutputStream to)
+//            {
+//                ThreadClass t = new ThreadAnonymousInnerClassHelper2(from, to);
+//                t.Start();
+//                return t;
+//            }
+
+//            private class ThreadAnonymousInnerClassHelper2 : ThreadClass
+//            {
+//                private InputStream From;
+//                private OutputStream To;
+
+//                public ThreadAnonymousInnerClassHelper2(InputStream from, OutputStream to)
+//                {
+//                    this.From = from;
+//                    this.To = to;
+//                }
+
+//                public override void Run()
+//                {
+//                    try
+//                    {
+//                        sbyte[] buffer = new sbyte[1024];
+//                        int len;
+//                        while ((len = From.Read(buffer)) != -1)
+//                        {
+//                            if (VERBOSE)
+//                            {
+//                                To.Write(buffer, 0, len);
+//                            }
+//                        }
+//                    }
+//                    catch (IOException e)
+//                    {
+//                        Console.Error.WriteLine("Couldn't pipe from the forked process: " + e.ToString());
+//                    }
+//                }
+//            }
+//        }
+
+//        /// <summary>
+//        /// Recursively looks for indexes underneath <code>file</code>,
+//        /// and runs checkindex on them. returns true if it found any indexes.
+//        /// </summary>
+//        public virtual bool CheckIndexes(DirectoryInfo file)
+//        {
+//            if (file.IsDirectory)
+//            {
+//                BaseDirectoryWrapper dir = NewFSDirectory(file);
+//                dir.CheckIndexOnClose = false; // don't double-checkindex
+//                if (DirectoryReader.IndexExists(dir))
+//                {
+//                    if (VERBOSE)
+//                    {
+//                        Console.Error.WriteLine("Checking index: " + file);
+//                    }
+//                    // LUCENE-4738: if we crashed while writing first
+//                    // commit it's possible index will be corrupt (by
+//                    // design we don't try to be smart about this case
+//                    // since that too risky):
+//                    if (SegmentInfos.GetLastCommitGeneration(dir) > 1)
+//                    {
+//                        TestUtil.CheckIndex(dir);
+//                    }
+//                    dir.Dispose();
+//                    return true;
+//                }
+//                dir.Dispose();
+//                foreach (FileInfo f in file.ListAll())
+//                {
+//                    if (CheckIndexes(f))
+//                    {
+//                        return true;
+//                    }
+//                }
+//            }
+//            return false;
+//        }
+
+//        /// <summary>
+//        /// currently, this only works/tested on Sun and IBM.
+//        /// </summary>
+//        public virtual void CrashJRE()
+//        {
+//            string vendor = Constants.JAVA_VENDOR;
+//            bool supportsUnsafeNpeDereference = vendor.StartsWith("Oracle") || vendor.StartsWith("Sun") || vendor.StartsWith("Apple");
+
+//            try
+//            {
+//                if (supportsUnsafeNpeDereference)
+//                {
+//                    try
+//                    {
+//                        Type clazz = Type.GetType("sun.misc.Unsafe");
+//                        Field field = clazz.GetDeclaredField("theUnsafe");
+//                        field.Accessible = true;
+//                        object o = field.Get(null);
+//                        Method m = clazz.GetMethod("putAddress", typeof(long), typeof(long));
+//                        m.invoke(o, 0L, 0L);
+//                    }
+//                    catch (Exception e)
+//                    {
+//                        Console.WriteLine("Couldn't kill the JVM via Unsafe.");
+//                        Console.WriteLine(e.StackTrace);
+//                    }
+//                }
+
+//                // Fallback attempt to Runtime.halt();
+//                Runtime.Runtime.halt(-1);
+//            }
+//            catch (Exception e)
+//            {
+//                Console.WriteLine("Couldn't kill the JVM.");
+//                Console.WriteLine(e.StackTrace);
+//            }
+
+//            // We couldn't get the JVM to crash for some reason.
+//            Assert.Fail();
+//        }
+//    }
+
+//}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestIndexWriterOutOfFileDescriptors.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterOutOfFileDescriptors.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterOutOfFileDescriptors.cs
new file mode 100644
index 0000000..24a3726
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestIndexWriterOutOfFileDescriptors.cs
@@ -0,0 +1,196 @@
+using System;
+using System.Collections.Generic;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Randomized.Generators;
+    using NUnit.Framework;
+    using System.IO;
+    using Directory = Lucene.Net.Store.Directory;
+    using IOContext = Lucene.Net.Store.IOContext;
+    using LineFileDocs = Lucene.Net.Util.LineFileDocs;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+    using PrintStreamInfoStream = Lucene.Net.Util.PrintStreamInfoStream;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    [TestFixture]
+    public class TestIndexWriterOutOfFileDescriptors : LuceneTestCase
+    {
+        [Test]
+        public virtual void Test()
+        {
+            MockDirectoryWrapper dir = NewMockFSDirectory(CreateTempDir("TestIndexWriterOutOfFileDescriptors"));
+            dir.PreventDoubleWrite = false;
+            double rate = Random().NextDouble() * 0.01;
+            //System.out.println("rate=" + rate);
+            dir.RandomIOExceptionRateOnOpen = rate;
+            int iters = AtLeast(20);
+            LineFileDocs docs = new LineFileDocs(Random(), DefaultCodecSupportsDocValues());
+            IndexReader r = null;
+            DirectoryReader r2 = null;
+            bool any = false;
+            MockDirectoryWrapper dirCopy = null;
+            int lastNumDocs = 0;
+            for (int iter = 0; iter < iters; iter++)
+            {
+                IndexWriter w = null;
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: iter=" + iter);
+                }
+                try
+                {
+                    MockAnalyzer analyzer = new MockAnalyzer(Random());
+                    analyzer.MaxTokenLength = TestUtil.NextInt(Random(), 1, IndexWriter.MAX_TERM_LENGTH);
+                    IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
+
+                    if (VERBOSE)
+                    {
+                        // Do this ourselves instead of relying on LTC so
+                        // we see incrementing messageID:
+                        iwc.InfoStream = new PrintStreamInfoStream(Console.Out);
+                    }
+                    var ms = iwc.MergeScheduler;
+                    if (ms is IConcurrentMergeScheduler)
+                    {
+                        ((IConcurrentMergeScheduler)ms).SetSuppressExceptions();
+                    }
+                    w = new IndexWriter(dir, iwc);
+                    if (r != null && Random().Next(5) == 3)
+                    {
+                        if (Random().NextBoolean())
+                        {
+                            if (VERBOSE)
+                            {
+                                Console.WriteLine("TEST: addIndexes IR[]");
+                            }
+                            w.AddIndexes(new IndexReader[] { r });
+                        }
+                        else
+                        {
+                            if (VERBOSE)
+                            {
+                                Console.WriteLine("TEST: addIndexes Directory[]");
+                            }
+                            w.AddIndexes(new Directory[] { dirCopy });
+                        }
+                    }
+                    else
+                    {
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("TEST: addDocument");
+                        }
+                        w.AddDocument(docs.NextDoc());
+                    }
+                    dir.RandomIOExceptionRateOnOpen = 0.0;
+                    w.Dispose();
+                    w = null;
+
+                    // NOTE: this is O(N^2)!  Only enable for temporary debugging:
+                    //dir.setRandomIOExceptionRateOnOpen(0.0);
+                    //TestUtil.CheckIndex(dir);
+                    //dir.setRandomIOExceptionRateOnOpen(rate);
+
+                    // Verify numDocs only increases, to catch IndexWriter
+                    // accidentally deleting the index:
+                    dir.RandomIOExceptionRateOnOpen = 0.0;
+                    Assert.IsTrue(DirectoryReader.IndexExists(dir));
+                    if (r2 == null)
+                    {
+                        r2 = DirectoryReader.Open(dir);
+                    }
+                    else
+                    {
+                        DirectoryReader r3 = DirectoryReader.OpenIfChanged(r2);
+                        if (r3 != null)
+                        {
+                            r2.Dispose();
+                            r2 = r3;
+                        }
+                    }
+                    Assert.IsTrue(r2.NumDocs >= lastNumDocs, "before=" + lastNumDocs + " after=" + r2.NumDocs);
+                    lastNumDocs = r2.NumDocs;
+                    //System.out.println("numDocs=" + lastNumDocs);
+                    dir.RandomIOExceptionRateOnOpen = rate;
+
+                    any = true;
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("TEST: iter=" + iter + ": success");
+                    }
+                }
+                catch (IOException ioe)
+                {
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("TEST: iter=" + iter + ": exception");
+                        Console.WriteLine(ioe.ToString());
+                        Console.Write(ioe.StackTrace);
+                    }
+                    if (w != null)
+                    {
+                        // NOTE: leave random IO exceptions enabled here,
+                        // to verify that rollback does not try to write
+                        // anything:
+                        w.Rollback();
+                    }
+                }
+
+                if (any && r == null && Random().NextBoolean())
+                {
+                    // Make a copy of a non-empty index so we can use
+                    // it to addIndexes later:
+                    dir.RandomIOExceptionRateOnOpen = 0.0;
+                    r = DirectoryReader.Open(dir);
+                    dirCopy = NewMockFSDirectory(CreateTempDir("TestIndexWriterOutOfFileDescriptors.copy"));
+                    HashSet<string> files = new HashSet<string>();
+                    foreach (string file in dir.ListAll())
+                    {
+                        dir.Copy(dirCopy, file, file, IOContext.DEFAULT);
+                        files.Add(file);
+                    }
+                    dirCopy.Sync(files);
+                    // Have IW kiss the dir so we remove any leftover
+                    // files ... we can easily have leftover files at
+                    // the time we take a copy because we are holding
+                    // open a reader:
+                    (new IndexWriter(dirCopy, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())))).Dispose();
+                    dirCopy.RandomIOExceptionRate = rate;
+                    dir.RandomIOExceptionRateOnOpen = rate;
+                }
+            }
+
+            if (r2 != null)
+            {
+                r2.Dispose();
+            }
+            if (r != null)
+            {
+                r.Dispose();
+                dirCopy.Dispose();
+            }
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestIndexWriterReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterReader.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterReader.cs
new file mode 100644
index 0000000..9207b98
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestIndexWriterReader.cs
@@ -0,0 +1,1419 @@
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.Threading;
+using Lucene.Net.Attributes;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Randomized.Generators;
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using System.Collections.Concurrent;
+    using Util;
+    using AlreadyClosedException = Lucene.Net.Store.AlreadyClosedException;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Codec = Lucene.Net.Codecs.Codec;
+    using Directory = Lucene.Net.Store.Directory;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using Document = Documents.Document;
+    using FakeIOException = Lucene.Net.Store.MockDirectoryWrapper.FakeIOException;
+    using Field = Field;
+    using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+    using InfoStream = Lucene.Net.Util.InfoStream;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+        /// Copyright 2004 The Apache Software Foundation
+        ///
+        /// Licensed under the Apache License, Version 2.0 (the "License");
+        /// you may not use this file except in compliance with the License.
+        /// You may obtain a copy of the License at
+        ///
+        ///     http://www.apache.org/licenses/LICENSE-2.0
+        ///
+        /// Unless required by applicable law or agreed to in writing, software
+        /// distributed under the License is distributed on an "AS IS" BASIS,
+        /// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+        /// See the License for the specific language governing permissions and
+        /// limitations under the License.
+        */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+    using Query = Lucene.Net.Search.Query;
+    using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+    using TermQuery = Lucene.Net.Search.TermQuery;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using TextField = TextField;
+    using TopDocs = Lucene.Net.Search.TopDocs;
+
+    [TestFixture]
+    public class TestIndexWriterReader : LuceneTestCase
+    {
+        private readonly int NumThreads = TEST_NIGHTLY ? 5 : 3;
+
+        public static int Count(Term t, IndexReader r)
+        {
+            int count = 0;
+            DocsEnum td = TestUtil.Docs(Random(), r, t.Field, new BytesRef(t.Text()), MultiFields.GetLiveDocs(r), null, 0);
+
+            if (td != null)
+            {
+                while (td.NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
+                {
+                    var _ = td.DocID;
+                    count++;
+                }
+            }
+            return count;
+        }
+
+        [Test]
+        public virtual void TestAddCloseOpen()
+        {
+            // Can't use assertNoDeletes: this test pulls a non-NRT
+            // reader in the end:
+            Directory dir1 = NewDirectory();
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+
+            IndexWriter writer = new IndexWriter(dir1, iwc);
+            for (int i = 0; i < 97; i++)
+            {
+                DirectoryReader reader = writer.Reader;
+                if (i == 0)
+                {
+                    writer.AddDocument(DocHelper.CreateDocument(i, "x", 1 + Random().Next(5)));
+                }
+                else
+                {
+                    int previous = Random().Next(i);
+                    // a check if the reader is current here could fail since there might be
+                    // merges going on.
+                    switch (Random().Next(5))
+                    {
+                        case 0:
+                        case 1:
+                        case 2:
+                            writer.AddDocument(DocHelper.CreateDocument(i, "x", 1 + Random().Next(5)));
+                            break;
+
+                        case 3:
+                            writer.UpdateDocument(new Term("id", "" + previous), DocHelper.CreateDocument(previous, "x", 1 + Random().Next(5)));
+                            break;
+
+                        case 4:
+                            writer.DeleteDocuments(new Term("id", "" + previous));
+                            break;
+                    }
+                }
+                Assert.IsFalse(reader.IsCurrent);
+                reader.Dispose();
+            }
+            writer.ForceMerge(1); // make sure all merging is done etc.
+            DirectoryReader dirReader = writer.Reader;
+            writer.Commit(); // no changes that are not visible to the reader
+            Assert.IsTrue(dirReader.IsCurrent);
+            writer.Dispose();
+            Assert.IsTrue(dirReader.IsCurrent); // all changes are visible to the reader
+            iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            writer = new IndexWriter(dir1, iwc);
+            Assert.IsTrue(dirReader.IsCurrent);
+            writer.AddDocument(DocHelper.CreateDocument(1, "x", 1 + Random().Next(5)));
+            Assert.IsTrue(dirReader.IsCurrent); // segments in ram but IW is different to the readers one
+            writer.Dispose();
+            Assert.IsFalse(dirReader.IsCurrent); // segments written
+            dirReader.Dispose();
+            dir1.Dispose();
+        }
+
+        [Test]
+        public virtual void TestUpdateDocument()
+        {
+            bool doFullMerge = true;
+
+            Directory dir1 = NewDirectory();
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            if (iwc.MaxBufferedDocs < 20)
+            {
+                iwc.SetMaxBufferedDocs(20);
+            }
+            // no merging
+            if (Random().NextBoolean())
+            {
+                iwc.SetMergePolicy(NoMergePolicy.NO_COMPOUND_FILES);
+            }
+            else
+            {
+                iwc.SetMergePolicy(NoMergePolicy.COMPOUND_FILES);
+            }
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: make index");
+            }
+            IndexWriter writer = new IndexWriter(dir1, iwc);
+
+            // create the index
+            CreateIndexNoClose(!doFullMerge, "index1", writer);
+
+            // writer.Flush(false, true, true);
+
+            // get a reader
+            DirectoryReader r1 = writer.Reader;
+            Assert.IsTrue(r1.IsCurrent);
+
+            string id10 = r1.Document(10).GetField("id").GetStringValue();
+
+            Document newDoc = r1.Document(10);
+            newDoc.RemoveField("id");
+            newDoc.Add(NewStringField("id", Convert.ToString(8000), Field.Store.YES));
+            writer.UpdateDocument(new Term("id", id10), newDoc);
+            Assert.IsFalse(r1.IsCurrent);
+
+            DirectoryReader r2 = writer.Reader;
+            Assert.IsTrue(r2.IsCurrent);
+            Assert.AreEqual(0, Count(new Term("id", id10), r2));
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: verify id");
+            }
+            Assert.AreEqual(1, Count(new Term("id", Convert.ToString(8000)), r2));
+
+            r1.Dispose();
+            Assert.IsTrue(r2.IsCurrent);
+            writer.Dispose();
+            Assert.IsTrue(r2.IsCurrent);
+
+            DirectoryReader r3 = DirectoryReader.Open(dir1);
+            Assert.IsTrue(r3.IsCurrent);
+            Assert.IsTrue(r2.IsCurrent);
+            Assert.AreEqual(0, Count(new Term("id", id10), r3));
+            Assert.AreEqual(1, Count(new Term("id", Convert.ToString(8000)), r3));
+
+            writer = new IndexWriter(dir1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document doc = new Document();
+            doc.Add(NewTextField("field", "a b c", Field.Store.NO));
+            writer.AddDocument(doc);
+            Assert.IsTrue(r2.IsCurrent);
+            Assert.IsTrue(r3.IsCurrent);
+
+            writer.Dispose();
+
+            Assert.IsFalse(r2.IsCurrent);
+            Assert.IsTrue(!r3.IsCurrent);
+
+            r2.Dispose();
+            r3.Dispose();
+
+            dir1.Dispose();
+        }
+
+        [Test]
+        public virtual void TestIsCurrent()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+
+            IndexWriter writer = new IndexWriter(dir, iwc);
+            Document doc = new Document();
+            doc.Add(NewTextField("field", "a b c", Field.Store.NO));
+            writer.AddDocument(doc);
+            writer.Dispose();
+
+            iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            writer = new IndexWriter(dir, iwc);
+            doc = new Document();
+            doc.Add(NewTextField("field", "a b c", Field.Store.NO));
+            DirectoryReader nrtReader = writer.Reader;
+            Assert.IsTrue(nrtReader.IsCurrent);
+            writer.AddDocument(doc);
+            Assert.IsFalse(nrtReader.IsCurrent); // should see the changes
+            writer.ForceMerge(1); // make sure we don't have a merge going on
+            Assert.IsFalse(nrtReader.IsCurrent);
+            nrtReader.Dispose();
+
+            DirectoryReader dirReader = DirectoryReader.Open(dir);
+            nrtReader = writer.Reader;
+
+            Assert.IsTrue(dirReader.IsCurrent);
+            Assert.IsTrue(nrtReader.IsCurrent); // nothing was committed yet so we are still current
+            Assert.AreEqual(2, nrtReader.MaxDoc); // sees the actual document added
+            Assert.AreEqual(1, dirReader.MaxDoc);
+            writer.Dispose(); // close is actually a commit both should see the changes
+            Assert.IsTrue(nrtReader.IsCurrent);
+            Assert.IsFalse(dirReader.IsCurrent); // this reader has been opened before the writer was closed / committed
+
+            dirReader.Dispose();
+            nrtReader.Dispose();
+            dir.Dispose();
+        }
+
+        /// <summary>
+        /// Test using IW.addIndexes
+        /// </summary>
+        [Test]
+        public virtual void TestAddIndexes()
+        {
+            bool doFullMerge = false;
+
+            Directory dir1 = GetAssertNoDeletesDirectory(NewDirectory());
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            if (iwc.MaxBufferedDocs < 20)
+            {
+                iwc.SetMaxBufferedDocs(20);
+            }
+            // no merging
+            if (Random().NextBoolean())
+            {
+                iwc.SetMergePolicy(NoMergePolicy.NO_COMPOUND_FILES);
+            }
+            else
+            {
+                iwc.SetMergePolicy(NoMergePolicy.COMPOUND_FILES);
+            }
+            IndexWriter writer = new IndexWriter(dir1, iwc);
+
+            // create the index
+            CreateIndexNoClose(!doFullMerge, "index1", writer);
+            writer.Flush(false, true);
+
+            // create a 2nd index
+            Directory dir2 = NewDirectory();
+            IndexWriter writer2 = new IndexWriter(dir2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            CreateIndexNoClose(!doFullMerge, "index2", writer2);
+            writer2.Dispose();
+
+            DirectoryReader r0 = writer.Reader;
+            Assert.IsTrue(r0.IsCurrent);
+            writer.AddIndexes(dir2);
+            Assert.IsFalse(r0.IsCurrent);
+            r0.Dispose();
+
+            DirectoryReader r1 = writer.Reader;
+            Assert.IsTrue(r1.IsCurrent);
+
+            writer.Commit();
+            Assert.IsTrue(r1.IsCurrent); // we have seen all changes - no change after opening the NRT reader
+
+            Assert.AreEqual(200, r1.MaxDoc);
+
+            int index2df = r1.DocFreq(new Term("indexname", "index2"));
+
+            Assert.AreEqual(100, index2df);
+
+            // verify the docs are from different indexes
+            Document doc5 = r1.Document(5);
+            Assert.AreEqual("index1", doc5.Get("indexname"));
+            Document doc150 = r1.Document(150);
+            Assert.AreEqual("index2", doc150.Get("indexname"));
+            r1.Dispose();
+            writer.Dispose();
+            dir1.Dispose();
+            dir2.Dispose();
+        }
+
+        [Test]
+        public virtual void ExposeCompTermVR()
+        {
+            bool doFullMerge = false;
+            Directory dir1 = GetAssertNoDeletesDirectory(NewDirectory());
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            if (iwc.MaxBufferedDocs < 20)
+            {
+                iwc.SetMaxBufferedDocs(20);
+            }
+            // no merging
+            if (Random().NextBoolean())
+            {
+                iwc.SetMergePolicy(NoMergePolicy.NO_COMPOUND_FILES);
+            }
+            else
+            {
+                iwc.SetMergePolicy(NoMergePolicy.COMPOUND_FILES);
+            }
+            IndexWriter writer = new IndexWriter(dir1, iwc);
+            CreateIndexNoClose(!doFullMerge, "index1", writer);
+            writer.Dispose();
+            dir1.Dispose();
+        }
+
+        [Test]
+        public virtual void TestAddIndexes2()
+        {
+            bool doFullMerge = false;
+
+            Directory dir1 = GetAssertNoDeletesDirectory(NewDirectory());
+            IndexWriter writer = new IndexWriter(dir1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+
+            // create a 2nd index
+            Directory dir2 = NewDirectory();
+            IndexWriter writer2 = new IndexWriter(dir2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            CreateIndexNoClose(!doFullMerge, "index2", writer2);
+            writer2.Dispose();
+
+            writer.AddIndexes(dir2);
+            writer.AddIndexes(dir2);
+            writer.AddIndexes(dir2);
+            writer.AddIndexes(dir2);
+            writer.AddIndexes(dir2);
+
+            IndexReader r1 = writer.Reader;
+            Assert.AreEqual(500, r1.MaxDoc);
+
+            r1.Dispose();
+            writer.Dispose();
+            dir1.Dispose();
+            dir2.Dispose();
+        }
+
+        /// <summary>
+        /// Deletes using IW.deleteDocuments
+        /// </summary>
+        [Test]
+        public virtual void TestDeleteFromIndexWriter()
+        {
+            bool doFullMerge = true;
+
+            Directory dir1 = GetAssertNoDeletesDirectory(NewDirectory());
+            IndexWriter writer = new IndexWriter(dir1, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetReaderTermsIndexDivisor(2));
+            // create the index
+            CreateIndexNoClose(!doFullMerge, "index1", writer);
+            writer.Flush(false, true);
+            // get a reader
+            IndexReader r1 = writer.Reader;
+
+            string id10 = r1.Document(10).GetField("id").GetStringValue();
+
+            // deleted IW docs should not show up in the next getReader
+            writer.DeleteDocuments(new Term("id", id10));
+            IndexReader r2 = writer.Reader;
+            Assert.AreEqual(1, Count(new Term("id", id10), r1));
+            Assert.AreEqual(0, Count(new Term("id", id10), r2));
+
+            string id50 = r1.Document(50).GetField("id").GetStringValue();
+            Assert.AreEqual(1, Count(new Term("id", id50), r1));
+
+            writer.DeleteDocuments(new Term("id", id50));
+
+            IndexReader r3 = writer.Reader;
+            Assert.AreEqual(0, Count(new Term("id", id10), r3));
+            Assert.AreEqual(0, Count(new Term("id", id50), r3));
+
+            string id75 = r1.Document(75).GetField("id").GetStringValue();
+            writer.DeleteDocuments(new TermQuery(new Term("id", id75)));
+            IndexReader r4 = writer.Reader;
+            Assert.AreEqual(1, Count(new Term("id", id75), r3));
+            Assert.AreEqual(0, Count(new Term("id", id75), r4));
+
+            r1.Dispose();
+            r2.Dispose();
+            r3.Dispose();
+            r4.Dispose();
+            writer.Dispose();
+
+            // reopen the writer to verify the delete made it to the directory
+            writer = new IndexWriter(dir1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            IndexReader w2r1 = writer.Reader;
+            Assert.AreEqual(0, Count(new Term("id", id10), w2r1));
+            w2r1.Dispose();
+            writer.Dispose();
+            dir1.Dispose();
+        }
+
+        [Test]
+        public virtual void TestAddIndexesAndDoDeletesThreads()
+        {
+            const int numIter = 2;
+            int numDirs = 3;
+
+            Directory mainDir = GetAssertNoDeletesDirectory(NewDirectory());
+
+            IndexWriter mainWriter = new IndexWriter(mainDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()));
+            TestUtil.ReduceOpenFiles(mainWriter);
+
+            AddDirectoriesThreads addDirThreads = new AddDirectoriesThreads(this, numIter, mainWriter);
+            addDirThreads.LaunchThreads(numDirs);
+            addDirThreads.JoinThreads();
+
+            //Assert.AreEqual(100 + numDirs * (3 * numIter / 4) * addDirThreads.numThreads
+            //    * addDirThreads.NUM_INIT_DOCS, addDirThreads.mainWriter.NumDocs);
+            Assert.AreEqual(addDirThreads.Count.Get(), addDirThreads.MainWriter.NumDocs);
+
+            addDirThreads.Close(true);
+
+            Assert.IsTrue(addDirThreads.Failures.Count == 0);
+
+            TestUtil.CheckIndex(mainDir);
+
+            IndexReader reader = DirectoryReader.Open(mainDir);
+            Assert.AreEqual(addDirThreads.Count.Get(), reader.NumDocs);
+            //Assert.AreEqual(100 + numDirs * (3 * numIter / 4) * addDirThreads.numThreads
+            //    * addDirThreads.NUM_INIT_DOCS, reader.NumDocs);
+            reader.Dispose();
+
+            addDirThreads.CloseDir();
+            mainDir.Dispose();
+        }
+
+        private class AddDirectoriesThreads
+        {
+            internal virtual void InitializeInstanceFields()
+            {
+                Threads = new ThreadClass[OuterInstance.NumThreads];
+            }
+
+            private readonly TestIndexWriterReader OuterInstance;
+
+            internal Directory AddDir;
+            internal const int NUM_INIT_DOCS = 100;
+            internal int NumDirs;
+            internal ThreadClass[] Threads;
+            internal IndexWriter MainWriter;
+            internal readonly IList<Exception> Failures = new List<Exception>();
+            internal IndexReader[] Readers;
+            internal bool DidClose = false;
+            internal AtomicInt32 Count = new AtomicInt32(0);
+            internal AtomicInt32 NumaddIndexes = new AtomicInt32(0);
+
+            public AddDirectoriesThreads(TestIndexWriterReader outerInstance, int numDirs, IndexWriter mainWriter)
+            {
+                this.OuterInstance = outerInstance;
+
+                InitializeInstanceFields();
+                this.NumDirs = numDirs;
+                this.MainWriter = mainWriter;
+                AddDir = NewDirectory();
+                IndexWriter writer = new IndexWriter(AddDir, OuterInstance.NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2));
+                TestUtil.ReduceOpenFiles(writer);
+                for (int i = 0; i < NUM_INIT_DOCS; i++)
+                {
+                    Document doc = DocHelper.CreateDocument(i, "addindex", 4);
+                    writer.AddDocument(doc);
+                }
+
+                writer.Dispose();
+
+                Readers = new IndexReader[numDirs];
+                for (int i = 0; i < numDirs; i++)
+                {
+                    Readers[i] = DirectoryReader.Open(AddDir);
+                }
+            }
+
+            internal virtual void JoinThreads()
+            {
+                for (int i = 0; i < OuterInstance.NumThreads; i++)
+                {
+#if !NETSTANDARD
+                    try
+                    {
+#endif
+                    Threads[i].Join();
+#if !NETSTANDARD
+                    }
+                    catch (ThreadInterruptedException ie)
+                    {
+                        throw new ThreadInterruptedException("Thread Interrupted Exception", ie);
+                    }
+#endif
+                }
+            }
+
+            internal virtual void Close(bool doWait)
+            {
+                DidClose = true;
+                if (doWait)
+                {
+                    MainWriter.WaitForMerges();
+                }
+                MainWriter.Dispose(doWait);
+            }
+
+            internal virtual void CloseDir()
+            {
+                for (int i = 0; i < NumDirs; i++)
+                {
+                    Readers[i].Dispose();
+                }
+                AddDir.Dispose();
+            }
+
+            internal virtual void Handle(Exception t)
+            {
+                Console.WriteLine(t.StackTrace);
+                lock (Failures)
+                {
+                    Failures.Add(t);
+                }
+            }
+
+            internal virtual void LaunchThreads(int numIter)
+            {
+                for (int i = 0; i < OuterInstance.NumThreads; i++)
+                {
+                    Threads[i] = new ThreadAnonymousInnerClassHelper(this, numIter);
+                }
+                for (int i = 0; i < OuterInstance.NumThreads; i++)
+                {
+                    Threads[i].Start();
+                }
+            }
+
+            private class ThreadAnonymousInnerClassHelper : ThreadClass
+            {
+                private readonly AddDirectoriesThreads OuterInstance;
+
+                private int NumIter;
+
+                public ThreadAnonymousInnerClassHelper(AddDirectoriesThreads outerInstance, int numIter)
+                {
+                    this.OuterInstance = outerInstance;
+                    this.NumIter = numIter;
+                }
+
+                public override void Run()
+                {
+                    try
+                    {
+                        Directory[] dirs = new Directory[OuterInstance.NumDirs];
+                        for (int k = 0; k < OuterInstance.NumDirs; k++)
+                        {
+                            dirs[k] = new MockDirectoryWrapper(Random(), new RAMDirectory(OuterInstance.AddDir, NewIOContext(Random())));
+                        }
+                        //int j = 0;
+                        //while (true) {
+                        // System.out.println(Thread.currentThread().getName() + ": iter
+                        // j=" + j);
+                        for (int x = 0; x < NumIter; x++)
+                        {
+                            // only do addIndexes
+                            OuterInstance.DoBody(x, dirs);
+                        }
+                        //if (numIter > 0 && j == numIter)
+                        //  break;
+                        //doBody(j++, dirs);
+                        //doBody(5, dirs);
+                        //}
+                    }
+                    catch (Exception t)
+                    {
+                        OuterInstance.Handle(t);
+                    }
+                }
+            }
+
+            internal virtual void DoBody(int j, Directory[] dirs)
+            {
+                switch (j % 4)
+                {
+                    case 0:
+                        MainWriter.AddIndexes(dirs);
+                        MainWriter.ForceMerge(1);
+                        break;
+
+                    case 1:
+                        MainWriter.AddIndexes(dirs);
+                        NumaddIndexes.IncrementAndGet();
+                        break;
+
+                    case 2:
+                        MainWriter.AddIndexes(Readers);
+                        break;
+
+                    case 3:
+                        MainWriter.Commit();
+                        break;
+                }
+                Count.AddAndGet(dirs.Length * NUM_INIT_DOCS);
+            }
+        }
+
+        [Test]
+        public virtual void TestIndexWriterReopenSegmentFullMerge()
+        {
+            DoTestIndexWriterReopenSegment(true);
+        }
+
+        [Test]
+        public virtual void TestIndexWriterReopenSegment()
+        {
+            DoTestIndexWriterReopenSegment(false);
+        }
+
+        /// <summary>
+        /// Tests creating a segment, then check to insure the segment can be seen via
+        /// IW.getReader
+        /// </summary>
+        public virtual void DoTestIndexWriterReopenSegment(bool doFullMerge)
+        {
+            Directory dir1 = GetAssertNoDeletesDirectory(NewDirectory());
+            IndexWriter writer = new IndexWriter(dir1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            IndexReader r1 = writer.Reader;
+            Assert.AreEqual(0, r1.MaxDoc);
+            CreateIndexNoClose(false, "index1", writer);
+            writer.Flush(!doFullMerge, true);
+
+            IndexReader iwr1 = writer.Reader;
+            Assert.AreEqual(100, iwr1.MaxDoc);
+
+            IndexReader r2 = writer.Reader;
+            Assert.AreEqual(r2.MaxDoc, 100);
+            // add 100 documents
+            for (int x = 10000; x < 10000 + 100; x++)
+            {
+                Document d = DocHelper.CreateDocument(x, "index1", 5);
+                writer.AddDocument(d);
+            }
+            writer.Flush(false, true);
+            // verify the reader was reopened internally
+            IndexReader iwr2 = writer.Reader;
+            Assert.IsTrue(iwr2 != r1);
+            Assert.AreEqual(200, iwr2.MaxDoc);
+            // should have flushed out a segment
+            IndexReader r3 = writer.Reader;
+            Assert.IsTrue(r2 != r3);
+            Assert.AreEqual(200, r3.MaxDoc);
+
+            // dec ref the readers rather than close them because
+            // closing flushes changes to the writer
+            r1.Dispose();
+            iwr1.Dispose();
+            r2.Dispose();
+            r3.Dispose();
+            iwr2.Dispose();
+            writer.Dispose();
+
+            // test whether the changes made it to the directory
+            writer = new IndexWriter(dir1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            IndexReader w2r1 = writer.Reader;
+            // insure the deletes were actually flushed to the directory
+            Assert.AreEqual(200, w2r1.MaxDoc);
+            w2r1.Dispose();
+            writer.Dispose();
+
+            dir1.Dispose();
+        }
+
+        /*
+         * Delete a document by term and return the doc id
+         *
+         * public static int deleteDocument(Term term, IndexWriter writer) throws
+         * IOException { IndexReader reader = writer.GetReader(); TermDocs td =
+         * reader.termDocs(term); int doc = -1; //if (td.Next()) { // doc = td.Doc();
+         * //} //writer.DeleteDocuments(term); td.Dispose(); return doc; }
+         */
+
+        public void CreateIndex(Random random, Directory dir1, string indexName, bool multiSegment)
+        {
+            IndexWriter w = new IndexWriter(dir1, NewIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer(random)).SetMergePolicy(new LogDocMergePolicy()));
+            for (int i = 0; i < 100; i++)
+            {
+                w.AddDocument(DocHelper.CreateDocument(i, indexName, 4));
+            }
+            if (!multiSegment)
+            {
+                w.ForceMerge(1);
+            }
+            w.Dispose();
+        }
+
+        public static void CreateIndexNoClose(bool multiSegment, string indexName, IndexWriter w)
+        {
+            for (int i = 0; i < 100; i++)
+            {
+                w.AddDocument(DocHelper.CreateDocument(i, indexName, 4));
+            }
+            if (!multiSegment)
+            {
+                w.ForceMerge(1);
+            }
+        }
+
+        private class MyWarmer : IndexWriter.IndexReaderWarmer
+        {
+            internal int WarmCount;
+
+            public override void Warm(AtomicReader reader)
+            {
+                WarmCount++;
+            }
+        }
+
+        [Test]
+        public virtual void TestMergeWarmer([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            Directory dir1 = GetAssertNoDeletesDirectory(NewDirectory());
+            // Enroll warmer
+            MyWarmer warmer = new MyWarmer();
+            var config = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))
+                            .SetMaxBufferedDocs(2)
+                            .SetMergedSegmentWarmer(warmer)
+                            .SetMergeScheduler(scheduler)
+                            .SetMergePolicy(NewLogMergePolicy());
+            IndexWriter writer = new IndexWriter(dir1, config);
+
+            // create the index
+            CreateIndexNoClose(false, "test", writer);
+
+            // get a reader to put writer into near real-time mode
+            IndexReader r1 = writer.Reader;
+
+            ((LogMergePolicy)writer.Config.MergePolicy).MergeFactor = 2;
+
+            //int num = AtLeast(100);
+            int num = 101;
+            for (int i = 0; i < num; i++)
+            {
+                writer.AddDocument(DocHelper.CreateDocument(i, "test", 4));
+            }
+            ((IConcurrentMergeScheduler)writer.Config.MergeScheduler).Sync();
+
+            Assert.IsTrue(warmer.WarmCount > 0);
+            Console.WriteLine("Count {0}", warmer.WarmCount);
+            int count = warmer.WarmCount;
+
+            var newDocument = DocHelper.CreateDocument(17, "test", 4);
+            writer.AddDocument(newDocument);
+            writer.ForceMerge(1);
+            Assert.IsTrue(warmer.WarmCount > count);
+
+            writer.Dispose();
+            r1.Dispose();
+            dir1.Dispose();
+        }
+
+        [Test]
+        public virtual void TestAfterCommit([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            Directory dir1 = GetAssertNoDeletesDirectory(NewDirectory());
+            var config = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergeScheduler(scheduler);
+            IndexWriter writer = new IndexWriter(dir1, config);
+            writer.Commit();
+
+            // create the index
+            CreateIndexNoClose(false, "test", writer);
+
+            // get a reader to put writer into near real-time mode
+            DirectoryReader r1 = writer.Reader;
+            TestUtil.CheckIndex(dir1);
+            writer.Commit();
+            TestUtil.CheckIndex(dir1);
+            Assert.AreEqual(100, r1.NumDocs);
+
+            for (int i = 0; i < 10; i++)
+            {
+                writer.AddDocument(DocHelper.CreateDocument(i, "test", 4));
+            }
+            ((IConcurrentMergeScheduler)writer.Config.MergeScheduler).Sync();
+
+            DirectoryReader r2 = DirectoryReader.OpenIfChanged(r1);
+            if (r2 != null)
+            {
+                r1.Dispose();
+                r1 = r2;
+            }
+            Assert.AreEqual(110, r1.NumDocs);
+            writer.Dispose();
+            r1.Dispose();
+            dir1.Dispose();
+        }
+
+        // Make sure reader remains usable even if IndexWriter closes
+        [Test]
+        public virtual void TestAfterClose()
+        {
+            Directory dir1 = GetAssertNoDeletesDirectory(NewDirectory());
+            IndexWriter writer = new IndexWriter(dir1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+
+            // create the index
+            CreateIndexNoClose(false, "test", writer);
+
+            DirectoryReader r = writer.Reader;
+            writer.Dispose();
+
+            TestUtil.CheckIndex(dir1);
+
+            // reader should remain usable even after IndexWriter is closed:
+            Assert.AreEqual(100, r.NumDocs);
+            Query q = new TermQuery(new Term("indexname", "test"));
+            IndexSearcher searcher = NewSearcher(r);
+            Assert.AreEqual(100, searcher.Search(q, 10).TotalHits);
+            try
+            {
+                DirectoryReader.OpenIfChanged(r);
+                Assert.Fail("failed to hit AlreadyClosedException");
+            }
+#pragma warning disable 168
+            catch (AlreadyClosedException ace)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            r.Dispose();
+            dir1.Dispose();
+        }
+
+        // Stress test reopen during addIndexes
+        [Test]
+        public virtual void TestDuringAddIndexes()
+        {
+            Directory dir1 = GetAssertNoDeletesDirectory(NewDirectory());
+            IndexWriter writer = new IndexWriter(dir1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy(2)));
+
+            // create the index
+            CreateIndexNoClose(false, "test", writer);
+            writer.Commit();
+
+            Directory[] dirs = new Directory[10];
+            for (int i = 0; i < 10; i++)
+            {
+                dirs[i] = new MockDirectoryWrapper(Random(), new RAMDirectory(dir1, NewIOContext(Random())));
+            }
+
+            DirectoryReader r = writer.Reader;
+
+            const float SECONDS = 0.5f;
+
+            long endTime = (long)(Environment.TickCount + 1000.0 * SECONDS);
+            IList<Exception> excs = new SynchronizedList<Exception>();
+
+            // Only one thread can addIndexes at a time, because
+            // IndexWriter acquires a write lock in each directory:
+            var threads = new ThreadClass[1];
+            for (int i = 0; i < threads.Length; i++)
+            {
+                threads[i] = new ThreadAnonymousInnerClassHelper(writer, dirs, endTime, excs);
+                threads[i].SetDaemon(true);
+                threads[i].Start();
+            }
+
+            int lastCount = 0;
+            while (Environment.TickCount < endTime)
+            {
+                DirectoryReader r2 = DirectoryReader.OpenIfChanged(r);
+                if (r2 != null)
+                {
+                    r.Dispose();
+                    r = r2;
+                }
+                Query q = new TermQuery(new Term("indexname", "test"));
+                IndexSearcher searcher = NewSearcher(r);
+                int count = searcher.Search(q, 10).TotalHits;
+                Assert.IsTrue(count >= lastCount);
+                lastCount = count;
+            }
+
+            for (int i = 0; i < threads.Length; i++)
+            {
+                threads[i].Join();
+            }
+            // final check
+            DirectoryReader dr2 = DirectoryReader.OpenIfChanged(r);
+            if (dr2 != null)
+            {
+                r.Dispose();
+                r = dr2;
+            }
+            Query q2 = new TermQuery(new Term("indexname", "test"));
+            IndexSearcher searcher_ = NewSearcher(r);
+            int count_ = searcher_.Search(q2, 10).TotalHits;
+            Assert.IsTrue(count_ >= lastCount);
+
+            Assert.AreEqual(0, excs.Count);
+            r.Dispose();
+            if (dir1 is MockDirectoryWrapper)
+            {
+                ICollection<string> openDeletedFiles = ((MockDirectoryWrapper)dir1).OpenDeletedFiles;
+                Assert.AreEqual(0, openDeletedFiles.Count, "openDeleted=" + openDeletedFiles);
+            }
+
+            writer.Dispose();
+
+            dir1.Dispose();
+        }
+
+        private class ThreadAnonymousInnerClassHelper : ThreadClass
+        {
+            private IndexWriter Writer;
+            private Directory[] Dirs;
+            private long EndTime;
+            private IList<Exception> Excs;
+
+            public ThreadAnonymousInnerClassHelper(IndexWriter writer, Directory[] dirs, long endTime, IList<Exception> excs)
+            {
+                this.Writer = writer;
+                this.Dirs = dirs;
+                this.EndTime = endTime;
+                this.Excs = excs;
+            }
+
+            public override void Run()
+            {
+                do
+                {
+                    try
+                    {
+                        Writer.AddIndexes(Dirs);
+                        Writer.MaybeMerge();
+                    }
+                    catch (Exception t)
+                    {
+                        Excs.Add(t);
+                        throw new Exception(t.Message, t);
+                    }
+                } while (Environment.TickCount < EndTime);
+            }
+        }
+
+        private Directory GetAssertNoDeletesDirectory(Directory directory)
+        {
+            if (directory is MockDirectoryWrapper)
+            {
+                ((MockDirectoryWrapper)directory).AssertNoDeleteOpenFile = true;
+            }
+            return directory;
+        }
+
+        // Stress test reopen during add/delete
+        [Test]
+        public virtual void TestDuringAddDelete()
+        {
+            Directory dir1 = NewDirectory();
+            var writer = new IndexWriter(dir1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy(2)));
+
+            // create the index
+            CreateIndexNoClose(false, "test", writer);
+            writer.Commit();
+
+            DirectoryReader r = writer.Reader;
+
+            const float SECONDS = 0.5f;
+
+            long endTime = (long)(Environment.TickCount + 1000.0 * SECONDS);
+            ConcurrentQueue<Exception> excs = new ConcurrentQueue<Exception>();
+
+            var threads = new ThreadClass[NumThreads];
+            for (int i = 0; i < NumThreads; i++)
+            {
+                threads[i] = new ThreadAnonymousInnerClassHelper2(writer, r, endTime, excs);
+                threads[i].SetDaemon(true);
+                threads[i].Start();
+            }
+
+            int sum = 0;
+            while (Environment.TickCount < endTime)
+            {
+                DirectoryReader r2 = DirectoryReader.OpenIfChanged(r);
+                if (r2 != null)
+                {
+                    r.Dispose();
+                    r = r2;
+                }
+                Query q = new TermQuery(new Term("indexname", "test"));
+                IndexSearcher searcher = NewSearcher(r);
+                sum += searcher.Search(q, 10).TotalHits;
+            }
+
+            for (int i = 0; i < NumThreads; i++)
+            {
+                threads[i].Join();
+            }
+            // at least search once
+            DirectoryReader dr2 = DirectoryReader.OpenIfChanged(r);
+            if (dr2 != null)
+            {
+                r.Dispose();
+                r = dr2;
+            }
+            Query q2 = new TermQuery(new Term("indexname", "test"));
+            IndexSearcher indSearcher = NewSearcher(r);
+            sum += indSearcher.Search(q2, 10).TotalHits;
+            Assert.IsTrue(sum > 0, "no documents found at all");
+
+            Assert.AreEqual(0, excs.Count);
+            writer.Dispose();
+
+            r.Dispose();
+            dir1.Dispose();
+        }
+
+        private class ThreadAnonymousInnerClassHelper2 : ThreadClass
+        {
+            private IndexWriter Writer;
+            private DirectoryReader r;
+            private long EndTime;
+            private ConcurrentQueue<Exception> Excs;
+
+            public ThreadAnonymousInnerClassHelper2(IndexWriter writer, DirectoryReader r, long endTime, ConcurrentQueue<Exception> excs)
+            {
+                this.Writer = writer;
+                this.r = r;
+                this.EndTime = endTime;
+                this.Excs = excs;
+                rand = new Random(Random().Next());
+            }
+
+            internal readonly Random rand;
+
+            public override void Run()
+            {
+                int count = 0;
+                do
+                {
+                    try
+                    {
+                        for (int docUpto = 0; docUpto < 10; docUpto++)
+                        {
+                            Writer.AddDocument(DocHelper.CreateDocument(10 * count + docUpto, "test", 4));
+                        }
+                        count++;
+                        int limit = count * 10;
+                        for (int delUpto = 0; delUpto < 5; delUpto++)
+                        {
+                            int x = rand.Next(limit);
+                            Writer.DeleteDocuments(new Term("field3", "b" + x));
+                        }
+                    }
+                    catch (Exception t)
+                    {
+                        Excs.Enqueue(t);
+                        throw new Exception(t.Message, t);
+                    }
+                } while (Environment.TickCount < EndTime);
+            }
+        }
+
+        [Test]
+        public virtual void TestForceMergeDeletes()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()));
+            Document doc = new Document();
+            doc.Add(NewTextField("field", "a b c", Field.Store.NO));
+            Field id = NewStringField("id", "", Field.Store.NO);
+            doc.Add(id);
+            id.SetStringValue("0");
+            w.AddDocument(doc);
+            id.SetStringValue("1");
+            w.AddDocument(doc);
+            w.DeleteDocuments(new Term("id", "0"));
+
+            IndexReader r = w.Reader;
+            w.ForceMergeDeletes();
+            w.Dispose();
+            r.Dispose();
+            r = DirectoryReader.Open(dir);
+            Assert.AreEqual(1, r.NumDocs);
+            Assert.IsFalse(r.HasDeletions);
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestDeletesNumDocs()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document doc = new Document();
+            doc.Add(NewTextField("field", "a b c", Field.Store.NO));
+            Field id = NewStringField("id", "", Field.Store.NO);
+            doc.Add(id);
+            id.SetStringValue("0");
+            w.AddDocument(doc);
+            id.SetStringValue("1");
+            w.AddDocument(doc);
+            IndexReader r = w.Reader;
+            Assert.AreEqual(2, r.NumDocs);
+            r.Dispose();
+
+            w.DeleteDocuments(new Term("id", "0"));
+            r = w.Reader;
+            Assert.AreEqual(1, r.NumDocs);
+            r.Dispose();
+
+            w.DeleteDocuments(new Term("id", "1"));
+            r = w.Reader;
+            Assert.AreEqual(0, r.NumDocs);
+            r.Dispose();
+
+            w.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestEmptyIndex()
+        {
+            // Ensures that getReader works on an empty index, which hasn't been committed yet.
+            Directory dir = NewDirectory();
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            IndexReader r = w.Reader;
+            Assert.AreEqual(0, r.NumDocs);
+            r.Dispose();
+            w.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestSegmentWarmer()
+        {
+            Directory dir = NewDirectory();
+            AtomicBoolean didWarm = new AtomicBoolean();
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2).SetReaderPooling(true).SetMergedSegmentWarmer(new IndexReaderWarmerAnonymousInnerClassHelper(this, didWarm)).
+                    SetMergePolicy(NewLogMergePolicy(10)));
+
+            Document doc = new Document();
+            doc.Add(NewStringField("foo", "bar", Field.Store.NO));
+            for (int i = 0; i < 20; i++)
+            {
+                w.AddDocument(doc);
+            }
+            w.WaitForMerges();
+            w.Dispose();
+            dir.Dispose();
+            Assert.IsTrue(didWarm.Get());
+        }
+
+        private class IndexReaderWarmerAnonymousInnerClassHelper : IndexWriter.IndexReaderWarmer
+        {
+            private readonly TestIndexWriterReader OuterInstance;
+
+            private AtomicBoolean DidWarm;
+
+            public IndexReaderWarmerAnonymousInnerClassHelper(TestIndexWriterReader outerInstance, AtomicBoolean didWarm)
+            {
+                this.OuterInstance = outerInstance;
+                this.DidWarm = didWarm;
+            }
+
+            public override void Warm(AtomicReader r)
+            {
+                IndexSearcher s = OuterInstance.NewSearcher(r);
+                TopDocs hits = s.Search(new TermQuery(new Term("foo", "bar")), 10);
+                Assert.AreEqual(20, hits.TotalHits);
+                DidWarm.Set(true);
+            }
+        }
+
+        [Test]
+        public virtual void TestSimpleMergedSegmentWramer()
+        {
+            Directory dir = NewDirectory();
+            AtomicBoolean didWarm = new AtomicBoolean();
+            InfoStream infoStream = new InfoStreamAnonymousInnerClassHelper(this, didWarm);
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2).SetReaderPooling(true).SetInfoStream(infoStream).SetMergedSegmentWarmer(new SimpleMergedSegmentWarmer(infoStream)).SetMergePolicy(NewLogMergePolicy(10)));
+
+            Document doc = new Document();
+            doc.Add(NewStringField("foo", "bar", Field.Store.NO));
+            for (int i = 0; i < 20; i++)
+            {
+                w.AddDocument(doc);
+            }
+            w.WaitForMerges();
+            w.Dispose();
+            dir.Dispose();
+            Assert.IsTrue(didWarm.Get());
+        }
+
+        private class InfoStreamAnonymousInnerClassHelper : InfoStream
+        {
+            private readonly TestIndexWriterReader OuterInstance;
+
+            private AtomicBoolean DidWarm;
+
+            public InfoStreamAnonymousInnerClassHelper(TestIndexWriterReader outerInstance, AtomicBoolean didWarm)
+            {
+                this.OuterInstance = outerInstance;
+                this.DidWarm = didWarm;
+            }
+
+            public override void Dispose()
+            {
+            }
+
+            public override void Message(string component, string message)
+            {
+                if ("SMSW".Equals(component))
+                {
+                    DidWarm.Set(true);
+                }
+            }
+
+            public override bool IsEnabled(string component)
+            {
+                return true;
+            }
+        }
+
+        [Test]
+        public virtual void TestNoTermsIndex()
+        {
+            // Some Codecs don't honor the ReaderTermsIndexDivisor, so skip the test if
+            // they're picked.
+            AssumeFalse("PreFlex codec does not support ReaderTermsIndexDivisor!", "Lucene3x".Equals(Codec.Default.Name));
+
+            IndexWriterConfig conf = (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetReaderTermsIndexDivisor(-1);
+
+            // Don't proceed if picked Codec is in the list of illegal ones.
+            string format = TestUtil.GetPostingsFormat("f");
+            AssumeFalse("Format: " + format + " does not support ReaderTermsIndexDivisor!", (format.Equals("FSTPulsing41") || format.Equals("FSTOrdPulsing41") || format.Equals("FST41") || format.Equals("FSTOrd41") || format.Equals("SimpleText") || format.Equals("Memory") || format.Equals("MockRandom") || format.Equals("Direct")));
+
+            Directory dir = NewDirectory();
+            IndexWriter w = new IndexWriter(dir, conf);
+            Document doc = new Document();
+            doc.Add(new TextField("f", "val", Field.Store.NO));
+            w.AddDocument(doc);
+            SegmentReader r = GetOnlySegmentReader(DirectoryReader.Open(w, true));
+            try
+            {
+                TestUtil.Docs(Random(), r, "f", new BytesRef("val"), null, null, DocsEnum.FLAG_NONE);
+                Assert.Fail("should have failed to seek since terms index was not loaded.");
+            }
+#pragma warning disable 168
+            catch (InvalidOperationException e)
+#pragma warning restore 168
+            {
+                // expected - we didn't load the term index
+            }
+            finally
+            {
+                r.Dispose();
+                w.Dispose();
+                dir.Dispose();
+            }
+        }
+
+        [Test]
+        public virtual void TestReopenAfterNoRealChange()
+        {
+            Directory d = GetAssertNoDeletesDirectory(NewDirectory());
+            IndexWriter w = new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+
+            DirectoryReader r = w.Reader; // start pooling readers
+
+            DirectoryReader r2 = DirectoryReader.OpenIfChanged(r);
+            Assert.IsNull(r2);
+
+            w.AddDocument(new Document());
+            DirectoryReader r3 = DirectoryReader.OpenIfChanged(r);
+            Assert.IsNotNull(r3);
+            Assert.IsTrue(r3.Version != r.Version);
+            Assert.IsTrue(r3.IsCurrent);
+
+            // Deletes nothing in reality...:
+            w.DeleteDocuments(new Term("foo", "bar"));
+
+            // ... but IW marks this as not current:
+            Assert.IsFalse(r3.IsCurrent);
+            DirectoryReader r4 = DirectoryReader.OpenIfChanged(r3);
+            Assert.IsNull(r4);
+
+            // Deletes nothing in reality...:
+            w.DeleteDocuments(new Term("foo", "bar"));
+            DirectoryReader r5 = DirectoryReader.OpenIfChanged(r3, w, true);
+            Assert.IsNull(r5);
+
+            r3.Dispose();
+
+            w.Dispose();
+            d.Dispose();
+        }
+
+        [Test]
+        public virtual void TestNRTOpenExceptions()
+        {
+            // LUCENE-5262: test that several failed attempts to obtain an NRT reader
+            // don't leak file handles.
+            MockDirectoryWrapper dir = (MockDirectoryWrapper)GetAssertNoDeletesDirectory(NewMockDirectory());
+            AtomicBoolean shouldFail = new AtomicBoolean();
+            dir.FailOn(new FailureAnonymousInnerClassHelper(shouldFail));
+
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            conf.SetMergePolicy(NoMergePolicy.COMPOUND_FILES); // prevent merges from getting in the way
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            // create a segment and open an NRT reader
+            writer.AddDocument(new Document());
+            writer.Reader.Dispose();
+
+            // add a new document so a new NRT reader is required
+            writer.AddDocument(new Document());
+
+            // try to obtain an NRT reader twice: first time it fails and closes all the
+            // other NRT readers. second time it fails, but also fails to close the
+            // other NRT reader, since it is already marked closed!
+            for (int i = 0; i < 2; i++)
+            {
+                shouldFail.Set(true);
+                try
+                {
+                    writer.Reader.Dispose();
+                }
+#pragma warning disable 168
+                catch (FakeIOException e)
+#pragma warning restore 168
+                {
+                    // expected
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("hit expected fake IOE");
+                    }
+                }
+            }
+
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        private class FailureAnonymousInnerClassHelper : MockDirectoryWrapper.Failure
+        {
+            private readonly AtomicBoolean ShouldFail;
+
+            public FailureAnonymousInnerClassHelper(AtomicBoolean shouldFail)
+            {
+                this.ShouldFail = shouldFail;
+            }
+
+            public override void Eval(MockDirectoryWrapper dir)
+            {
+                if (ShouldFail.Get() && StackTraceHelper.DoesStackTraceContainMethod("GetReadOnlyClone"))
+                {
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("TEST: now fail; exc:");
+                        Console.WriteLine((new Exception()).StackTrace);
+                    }
+                    ShouldFail.Set(false);
+                    throw new FakeIOException();
+                }
+            }
+        }
+
+        /// <summary>
+        /// Make sure if all we do is open NRT reader against
+        ///  writer, we don't see merge starvation.
+        /// </summary>
+        [Test]
+        public virtual void TestTooManySegments()
+        {
+            Directory dir = GetAssertNoDeletesDirectory(NewDirectory());
+            // Don't use newIndexWriterConfig, because we need a
+            // "sane" mergePolicy:
+            IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter w = new IndexWriter(dir, iwc);
+            // Create 500 segments:
+            for (int i = 0; i < 500; i++)
+            {
+                Document doc = new Document();
+                doc.Add(NewStringField("id", "" + i, Field.Store.NO));
+                w.AddDocument(doc);
+                IndexReader r = DirectoryReader.Open(w, true);
+                // Make sure segment count never exceeds 100:
+                Assert.IsTrue(r.Leaves.Count < 100);
+                r.Dispose();
+            }
+            w.Dispose();
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file


[57/72] [abbrv] lucenenet git commit: Lucene.Net.TestFramework: Renamed Codecs\lucene3x\ to Codecs\Lucene3x\

Posted by ni...@apache.org.
Lucene.Net.TestFramework: Renamed Codecs\lucene3x\ to Codecs\Lucene3x\


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/8304ca82
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/8304ca82
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/8304ca82

Branch: refs/heads/api-work
Commit: 8304ca827465fcbfdd6993bfd19864c48bedf0d5
Parents: 362f0d3
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 26 03:10:42 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Feb 27 06:17:56 2017 +0700

----------------------------------------------------------------------
 .../Codecs/Lucene3x/PreFlexRWCodec.cs           | 151 +++++++++
 .../Lucene3x/PreFlexRWFieldInfosFormat.cs       |  45 +++
 .../Lucene3x/PreFlexRWFieldInfosReader.cs       | 133 ++++++++
 .../Lucene3x/PreFlexRWFieldInfosWriter.cs       | 130 ++++++++
 .../Codecs/Lucene3x/PreFlexRWFieldsWriter.cs    | 269 +++++++++++++++
 .../Codecs/Lucene3x/PreFlexRWNormsConsumer.cs   | 116 +++++++
 .../Codecs/Lucene3x/PreFlexRWNormsFormat.cs     |  35 ++
 .../Codecs/Lucene3x/PreFlexRWPostingsFormat.cs  |  87 +++++
 .../Lucene3x/PreFlexRWSegmentInfoFormat.cs      |  37 ++
 .../Lucene3x/PreFlexRWSegmentInfoWriter.cs      |  47 +++
 .../Codecs/Lucene3x/PreFlexRWSkipListWriter.cs  | 138 ++++++++
 .../Lucene3x/PreFlexRWStoredFieldsFormat.cs     |  34 ++
 .../Lucene3x/PreFlexRWStoredFieldsWriter.cs     | 214 ++++++++++++
 .../Lucene3x/PreFlexRWTermVectorsFormat.cs      |  74 ++++
 .../Lucene3x/PreFlexRWTermVectorsWriter.cs      | 243 ++++++++++++++
 .../Codecs/Lucene3x/TermInfosWriter.cs          | 334 +++++++++++++++++++
 .../Codecs/lucene3x/PreFlexRWCodec.cs           | 151 ---------
 .../lucene3x/PreFlexRWFieldInfosFormat.cs       |  45 ---
 .../lucene3x/PreFlexRWFieldInfosReader.cs       | 133 --------
 .../lucene3x/PreFlexRWFieldInfosWriter.cs       | 130 --------
 .../Codecs/lucene3x/PreFlexRWFieldsWriter.cs    | 269 ---------------
 .../Codecs/lucene3x/PreFlexRWNormsConsumer.cs   | 116 -------
 .../Codecs/lucene3x/PreFlexRWNormsFormat.cs     |  35 --
 .../Codecs/lucene3x/PreFlexRWPostingsFormat.cs  |  87 -----
 .../lucene3x/PreFlexRWSegmentInfoFormat.cs      |  37 --
 .../lucene3x/PreFlexRWSegmentInfoWriter.cs      |  47 ---
 .../Codecs/lucene3x/PreFlexRWSkipListWriter.cs  | 138 --------
 .../lucene3x/PreFlexRWStoredFieldsFormat.cs     |  34 --
 .../lucene3x/PreFlexRWStoredFieldsWriter.cs     | 214 ------------
 .../lucene3x/PreFlexRWTermVectorsFormat.cs      |  74 ----
 .../lucene3x/PreFlexRWTermVectorsWriter.cs      | 243 --------------
 .../Codecs/lucene3x/TermInfosWriter.cs          | 334 -------------------
 .../Lucene.Net.TestFramework.csproj             |  32 +-
 33 files changed, 2103 insertions(+), 2103 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8304ca82/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWCodec.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWCodec.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWCodec.cs
new file mode 100644
index 0000000..4d265d9
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWCodec.cs
@@ -0,0 +1,151 @@
+namespace Lucene.Net.Codecs.Lucene3x
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /// <summary>
+    /// Writes 3.x-like indexes (not perfect emulation yet) for testing only!
+    /// @lucene.experimental
+    /// </summary>
+#pragma warning disable 612, 618
+    public class PreFlexRWCodec : Lucene3xCodec
+    {
+        private readonly PostingsFormat Postings = new PreFlexRWPostingsFormat();
+        private readonly Lucene3xNormsFormat Norms = new PreFlexRWNormsFormat();
+        private readonly FieldInfosFormat FieldInfos = new PreFlexRWFieldInfosFormat();
+        private readonly TermVectorsFormat TermVectors = new PreFlexRWTermVectorsFormat();
+        private readonly SegmentInfoFormat SegmentInfos = new PreFlexRWSegmentInfoFormat();
+        private readonly StoredFieldsFormat StoredFields = new PreFlexRWStoredFieldsFormat();
+        private readonly bool _oldFormatImpersonationIsActive;
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Creates the codec with OldFormatImpersonationIsActive = true.
+        /// </summary>
+        /// <remarks>
+        /// Added so that SPIClassIterator can locate this Codec.  The iterator
+        /// only recognises classes that have empty constructors.
+        /// </remarks>
+        public PreFlexRWCodec()
+            : this(true)
+        { }
+
+        /// <summary>
+        /// </summary>
+        /// <param name="oldFormatImpersonationIsActive">
+        /// LUCENENET specific
+        /// Added to remove dependency on then-static <see cref="LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/>
+        /// </param>
+        public PreFlexRWCodec(bool oldFormatImpersonationIsActive) : base()
+        {
+            _oldFormatImpersonationIsActive = oldFormatImpersonationIsActive;
+        }
+
+        public override PostingsFormat PostingsFormat
+        {
+            get
+            {
+                if (_oldFormatImpersonationIsActive)
+                {
+                    return Postings;
+                }
+                else
+                {
+                    return base.PostingsFormat;
+                }
+            }
+        }
+
+        public override NormsFormat NormsFormat
+        {
+            get
+            {
+                if (_oldFormatImpersonationIsActive)
+                {
+                    return Norms;
+                }
+                else
+                {
+                    return base.NormsFormat;
+                }
+            }
+        }
+
+        public override SegmentInfoFormat SegmentInfoFormat
+        {
+            get
+            {
+                if (_oldFormatImpersonationIsActive)
+                {
+                    return SegmentInfos;
+                }
+                else
+                {
+                    return base.SegmentInfoFormat;
+                }
+            }
+        }
+
+        public override FieldInfosFormat FieldInfosFormat
+        {
+            get
+            {
+                if (_oldFormatImpersonationIsActive)
+                {
+                    return FieldInfos;
+                }
+                else
+                {
+                    return base.FieldInfosFormat;
+                }
+            }
+        }
+
+        public override TermVectorsFormat TermVectorsFormat
+        {
+            get
+            {
+                if (_oldFormatImpersonationIsActive)
+                {
+                    return TermVectors;
+                }
+                else
+                {
+                    return base.TermVectorsFormat;
+                }
+            }
+        }
+
+        public override StoredFieldsFormat StoredFieldsFormat
+        {
+            get
+            {
+                if (_oldFormatImpersonationIsActive)
+                {
+                    return StoredFields;
+                }
+                else
+                {
+                    return base.StoredFieldsFormat;
+                }
+            }
+        }
+    }
+#pragma warning restore 612, 618
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8304ca82/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldInfosFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldInfosFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldInfosFormat.cs
new file mode 100644
index 0000000..a02fe7f
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldInfosFormat.cs
@@ -0,0 +1,45 @@
+namespace Lucene.Net.Codecs.Lucene3x
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    ///
+    /// <summary>
+    /// @lucene.internal
+    /// @lucene.experimental
+    /// </summary>
+#pragma warning disable 612, 618
+    internal class PreFlexRWFieldInfosFormat : Lucene3xFieldInfosFormat
+    {
+        public override FieldInfosReader FieldInfosReader
+        {
+            get
+            {
+                return new PreFlexRWFieldInfosReader();
+            }
+        }
+
+        public override FieldInfosWriter FieldInfosWriter
+        {
+            get
+            {
+                return new PreFlexRWFieldInfosWriter();
+            }
+        }
+    }
+#pragma warning restore 612, 618
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8304ca82/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldInfosReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldInfosReader.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldInfosReader.cs
new file mode 100644
index 0000000..458951e
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldInfosReader.cs
@@ -0,0 +1,133 @@
+namespace Lucene.Net.Codecs.Lucene3x
+{
+    using System.Collections.Generic;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using CorruptIndexException = Lucene.Net.Index.CorruptIndexException;
+    using Directory = Lucene.Net.Store.Directory;
+    using DocValuesType = Lucene.Net.Index.DocValuesType;
+    using FieldInfo = Lucene.Net.Index.FieldInfo;
+    using FieldInfos = Lucene.Net.Index.FieldInfos;
+    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
+    using IndexFormatTooNewException = Lucene.Net.Index.IndexFormatTooNewException;
+    using IndexFormatTooOldException = Lucene.Net.Index.IndexFormatTooOldException;
+    using IndexInput = Lucene.Net.Store.IndexInput;
+    using IndexOptions = Lucene.Net.Index.IndexOptions;
+    using IOContext = Lucene.Net.Store.IOContext;
+    using SegmentInfo = Lucene.Net.Index.SegmentInfo;
+
+    /// <summary>
+    /// @lucene.internal
+    /// @lucene.experimental
+    /// </summary>
+    internal class PreFlexRWFieldInfosReader : FieldInfosReader
+    {
+        internal const int FORMAT_MINIMUM = PreFlexRWFieldInfosWriter.FORMAT_START;
+
+        public override FieldInfos Read(Directory directory, string segmentName, string segmentSuffix, IOContext iocontext)
+        {
+            string fileName = IndexFileNames.SegmentFileName(segmentName, "", PreFlexRWFieldInfosWriter.FIELD_INFOS_EXTENSION);
+            IndexInput input = directory.OpenInput(fileName, iocontext);
+
+            try
+            {
+                int format = input.ReadVInt32();
+
+                if (format > FORMAT_MINIMUM)
+                {
+                    throw new IndexFormatTooOldException(input, format, FORMAT_MINIMUM, PreFlexRWFieldInfosWriter.FORMAT_CURRENT);
+                }
+                if (format < PreFlexRWFieldInfosWriter.FORMAT_CURRENT && format != PreFlexRWFieldInfosWriter.FORMAT_PREFLEX_RW)
+                {
+                    throw new IndexFormatTooNewException(input, format, FORMAT_MINIMUM, PreFlexRWFieldInfosWriter.FORMAT_CURRENT);
+                }
+
+                int size = input.ReadVInt32(); //read in the size
+                FieldInfo[] infos = new FieldInfo[size];
+
+                for (int i = 0; i < size; i++)
+                {
+                    string name = input.ReadString();
+                    int fieldNumber = format == PreFlexRWFieldInfosWriter.FORMAT_PREFLEX_RW ? input.ReadInt32() : i;
+                    byte bits = input.ReadByte();
+                    bool isIndexed = (bits & PreFlexRWFieldInfosWriter.IS_INDEXED) != 0;
+                    bool storeTermVector = (bits & PreFlexRWFieldInfosWriter.STORE_TERMVECTOR) != 0;
+                    bool omitNorms = (bits & PreFlexRWFieldInfosWriter.OMIT_NORMS) != 0;
+                    bool storePayloads = (bits & PreFlexRWFieldInfosWriter.STORE_PAYLOADS) != 0;
+                    IndexOptions? indexOptions;
+                    if (!isIndexed)
+                    {
+                        indexOptions = null;
+                    }
+                    else if ((bits & PreFlexRWFieldInfosWriter.OMIT_TERM_FREQ_AND_POSITIONS) != 0)
+                    {
+                        indexOptions = IndexOptions.DOCS_ONLY;
+                    }
+                    else if ((bits & PreFlexRWFieldInfosWriter.OMIT_POSITIONS) != 0)
+                    {
+                        if (format <= PreFlexRWFieldInfosWriter.FORMAT_OMIT_POSITIONS)
+                        {
+                            indexOptions = IndexOptions.DOCS_AND_FREQS;
+                        }
+                        else
+                        {
+                            throw new CorruptIndexException("Corrupt fieldinfos, OMIT_POSITIONS set but format=" + format + " (resource: " + input + ")");
+                        }
+                    }
+                    else
+                    {
+                        indexOptions = IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
+                    }
+
+                    // LUCENE-3027: past indices were able to write
+                    // storePayloads=true when omitTFAP is also true,
+                    // which is invalid.  We correct that, here:
+                    if (indexOptions != IndexOptions.DOCS_AND_FREQS_AND_POSITIONS)
+                    {
+                        storePayloads = false;
+                    }
+
+                    DocValuesType? normType = isIndexed && !omitNorms ? (DocValuesType?)DocValuesType.NUMERIC : null;
+                    if (format == PreFlexRWFieldInfosWriter.FORMAT_PREFLEX_RW && normType != null)
+                    {
+                        // RW can have norms but doesn't write them
+                        normType = input.ReadByte() != 0 ? (DocValuesType?)DocValuesType.NUMERIC : null;
+                    }
+
+                    infos[i] = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector, omitNorms, storePayloads, indexOptions, null, normType, null);
+                }
+
+                if (input.FilePointer != input.Length)
+                {
+                    throw new CorruptIndexException("did not read all bytes from file \"" + fileName + "\": read " + input.FilePointer + " vs size " + input.Length + " (resource: " + input + ")");
+                }
+                return new FieldInfos(infos);
+            }
+            finally
+            {
+                input.Dispose();
+            }
+        }
+
+        public static void Files(Directory dir, SegmentInfo info, ISet<string> files)
+        {
+            files.Add(IndexFileNames.SegmentFileName(info.Name, "", PreFlexRWFieldInfosWriter.FIELD_INFOS_EXTENSION));
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8304ca82/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldInfosWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldInfosWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldInfosWriter.cs
new file mode 100644
index 0000000..e0fef49
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldInfosWriter.cs
@@ -0,0 +1,130 @@
+using System.Diagnostics;
+
+namespace Lucene.Net.Codecs.Lucene3x
+{
+    using Directory = Lucene.Net.Store.Directory;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using FieldInfo = Lucene.Net.Index.FieldInfo;
+    using FieldInfos = Lucene.Net.Index.FieldInfos;
+    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
+    using IndexOptions = Lucene.Net.Index.IndexOptions;
+    using IndexOutput = Lucene.Net.Store.IndexOutput;
+    using IOContext = Lucene.Net.Store.IOContext;
+    using IOUtils = Lucene.Net.Util.IOUtils;
+
+    /// <summary>
+    /// @lucene.internal
+    /// @lucene.experimental
+    /// </summary>
+    internal class PreFlexRWFieldInfosWriter : FieldInfosWriter
+    {
+        // TODO move to test-framework preflex RW?
+
+        /// <summary>
+        /// Extension of field infos </summary>
+        internal const string FIELD_INFOS_EXTENSION = "fnm";
+
+        // First used in 2.9; prior to 2.9 there was no format header
+        internal const int FORMAT_START = -2;
+
+        // First used in 3.4: omit only positional information
+        internal const int FORMAT_OMIT_POSITIONS = -3;
+
+        internal static readonly int FORMAT_PREFLEX_RW = int.MinValue;
+
+        // whenever you add a new format, make it 1 smaller (negative version logic)!
+        internal const int FORMAT_CURRENT = FORMAT_OMIT_POSITIONS;
+
+        internal const sbyte IS_INDEXED = 0x1;
+        internal const sbyte STORE_TERMVECTOR = 0x2;
+        internal const sbyte OMIT_NORMS = 0x10;
+        internal const sbyte STORE_PAYLOADS = 0x20;
+        internal const sbyte OMIT_TERM_FREQ_AND_POSITIONS = 0x40;
+        internal const sbyte OMIT_POSITIONS = -128;
+
+        public override void Write(Directory directory, string segmentName, string segmentSuffix, FieldInfos infos, IOContext context)
+        {
+            string fileName = IndexFileNames.SegmentFileName(segmentName, "", FIELD_INFOS_EXTENSION);
+            IndexOutput output = directory.CreateOutput(fileName, context);
+            bool success = false;
+            try
+            {
+                output.WriteVInt32(FORMAT_PREFLEX_RW);
+                output.WriteVInt32(infos.Count);
+                foreach (FieldInfo fi in infos)
+                {
+                    sbyte bits = 0x0;
+                    if (fi.HasVectors)
+                    {
+                        bits |= STORE_TERMVECTOR;
+                    }
+                    if (fi.OmitsNorms)
+                    {
+                        bits |= OMIT_NORMS;
+                    }
+                    if (fi.HasPayloads)
+                    {
+                        bits |= STORE_PAYLOADS;
+                    }
+                    if (fi.IsIndexed)
+                    {
+                        bits |= IS_INDEXED;
+                        Debug.Assert(fi.IndexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS || !fi.HasPayloads);
+                        if (fi.IndexOptions == IndexOptions.DOCS_ONLY)
+                        {
+                            bits |= OMIT_TERM_FREQ_AND_POSITIONS;
+                        }
+                        else if (fi.IndexOptions == IndexOptions.DOCS_AND_FREQS)
+                        {
+                            bits |= OMIT_POSITIONS;
+                        }
+                    }
+                    output.WriteString(fi.Name);
+                    /*
+                     * we need to write the field number since IW tries
+                     * to stabelize the field numbers across segments so the
+                     * FI ordinal is not necessarily equivalent to the field number
+                     */
+                    output.WriteInt32(fi.Number);
+                    output.WriteByte((byte)bits);
+                    if (fi.IsIndexed && !fi.OmitsNorms)
+                    {
+                        // to allow null norm types we need to indicate if norms are written
+                        // only in RW case
+                        output.WriteByte((byte)(sbyte)(fi.NormType == null ? 0 : 1));
+                    }
+                    Debug.Assert(fi.Attributes == null); // not used or supported
+                }
+                success = true;
+            }
+            finally
+            {
+                if (success)
+                {
+                    output.Dispose();
+                }
+                else
+                {
+                    IOUtils.CloseWhileHandlingException(output);
+                }
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8304ca82/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldsWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldsWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldsWriter.cs
new file mode 100644
index 0000000..b0c8174
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWFieldsWriter.cs
@@ -0,0 +1,269 @@
+using System.Collections.Generic;
+using System.Diagnostics;
+
+namespace Lucene.Net.Codecs.Lucene3x
+{
+    using BytesRef = Lucene.Net.Util.BytesRef;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using CorruptIndexException = Lucene.Net.Index.CorruptIndexException;
+    using FieldInfo = Lucene.Net.Index.FieldInfo;
+    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
+    using IndexOptions = Lucene.Net.Index.IndexOptions;
+    using IndexOutput = Lucene.Net.Store.IndexOutput;
+    using IOUtils = Lucene.Net.Util.IOUtils;
+    using SegmentWriteState = Lucene.Net.Index.SegmentWriteState;
+
+#pragma warning disable 612, 618
+    internal class PreFlexRWFieldsWriter : FieldsConsumer
+    {
+        private readonly TermInfosWriter TermsOut;
+        private readonly IndexOutput FreqOut;
+        private readonly IndexOutput ProxOut;
+        private readonly PreFlexRWSkipListWriter SkipListWriter;
+        private readonly int TotalNumDocs;
+
+        public PreFlexRWFieldsWriter(SegmentWriteState state)
+        {
+            TermsOut = new TermInfosWriter(state.Directory, state.SegmentInfo.Name, state.FieldInfos, state.TermIndexInterval);
+
+            bool success = false;
+            try
+            {
+                string freqFile = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, "", Lucene3xPostingsFormat.FREQ_EXTENSION);
+                FreqOut = state.Directory.CreateOutput(freqFile, state.Context);
+                TotalNumDocs = state.SegmentInfo.DocCount;
+                success = true;
+            }
+            finally
+            {
+                if (!success)
+                {
+                    IOUtils.CloseWhileHandlingException(TermsOut);
+                }
+            }
+
+            success = false;
+            try
+            {
+                if (state.FieldInfos.HasProx)
+                {
+                    string proxFile = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, "", Lucene3xPostingsFormat.PROX_EXTENSION);
+                    ProxOut = state.Directory.CreateOutput(proxFile, state.Context);
+                }
+                else
+                {
+                    ProxOut = null;
+                }
+                success = true;
+            }
+            finally
+            {
+                if (!success)
+                {
+                    IOUtils.CloseWhileHandlingException(TermsOut, FreqOut);
+                }
+            }
+
+            SkipListWriter = new PreFlexRWSkipListWriter(TermsOut.SkipInterval, TermsOut.MaxSkipLevels, TotalNumDocs, FreqOut, ProxOut);
+            //System.out.println("\nw start seg=" + segment);
+        }
+
+        public override TermsConsumer AddField(FieldInfo field)
+        {
+            Debug.Assert(field.Number != -1);
+            if (field.IndexOptions >= IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS)
+            {
+                throw new System.NotSupportedException("this codec cannot index offsets");
+            }
+            //System.out.println("w field=" + field.Name + " storePayload=" + field.storePayloads + " number=" + field.number);
+            return new PreFlexTermsWriter(this, field);
+        }
+
+        public override void Dispose()
+        {
+            IOUtils.Close(TermsOut, FreqOut, ProxOut);
+        }
+
+        private class PreFlexTermsWriter : TermsConsumer
+        {
+            internal virtual void InitializeInstanceFields()
+            {
+                postingsWriter = new PostingsWriter(this);
+            }
+
+            private readonly PreFlexRWFieldsWriter OuterInstance;
+
+            internal readonly FieldInfo FieldInfo;
+            internal readonly bool OmitTF;
+            internal readonly bool StorePayloads;
+
+            internal readonly TermInfo TermInfo = new TermInfo();
+            internal PostingsWriter postingsWriter;
+
+            public PreFlexTermsWriter(PreFlexRWFieldsWriter outerInstance, FieldInfo fieldInfo)
+            {
+                this.OuterInstance = outerInstance;
+
+                InitializeInstanceFields();
+                this.FieldInfo = fieldInfo;
+                OmitTF = fieldInfo.IndexOptions == IndexOptions.DOCS_ONLY;
+                StorePayloads = fieldInfo.HasPayloads;
+            }
+
+            internal class PostingsWriter : PostingsConsumer
+            {
+                private readonly PreFlexRWFieldsWriter.PreFlexTermsWriter OuterInstance;
+
+                public PostingsWriter(PreFlexRWFieldsWriter.PreFlexTermsWriter outerInstance)
+                {
+                    this.OuterInstance = outerInstance;
+                }
+
+                internal int LastDocID;
+                internal int LastPayloadLength = -1;
+                internal int LastPosition;
+                internal int Df;
+
+                public PostingsWriter Reset()
+                {
+                    Df = 0;
+                    LastDocID = 0;
+                    LastPayloadLength = -1;
+                    return this;
+                }
+
+                public override void StartDoc(int docID, int termDocFreq)
+                {
+                    //System.out.println("    w doc=" + docID);
+
+                    int delta = docID - LastDocID;
+                    if (docID < 0 || (Df > 0 && delta <= 0))
+                    {
+                        throw new CorruptIndexException("docs out of order (" + docID + " <= " + LastDocID + " )");
+                    }
+
+                    if ((++Df % OuterInstance.OuterInstance.TermsOut.SkipInterval) == 0)
+                    {
+                        OuterInstance.OuterInstance.SkipListWriter.SetSkipData(LastDocID, OuterInstance.StorePayloads, LastPayloadLength);
+                        OuterInstance.OuterInstance.SkipListWriter.BufferSkip(Df);
+                    }
+
+                    LastDocID = docID;
+
+                    Debug.Assert(docID < OuterInstance.OuterInstance.TotalNumDocs, "docID=" + docID + " totalNumDocs=" + OuterInstance.OuterInstance.TotalNumDocs);
+
+                    if (OuterInstance.OmitTF)
+                    {
+                        OuterInstance.OuterInstance.FreqOut.WriteVInt32(delta);
+                    }
+                    else
+                    {
+                        int code = delta << 1;
+                        if (termDocFreq == 1)
+                        {
+                            OuterInstance.OuterInstance.FreqOut.WriteVInt32(code | 1);
+                        }
+                        else
+                        {
+                            OuterInstance.OuterInstance.FreqOut.WriteVInt32(code);
+                            OuterInstance.OuterInstance.FreqOut.WriteVInt32(termDocFreq);
+                        }
+                    }
+                    LastPosition = 0;
+                }
+
+                public override void AddPosition(int position, BytesRef payload, int startOffset, int endOffset)
+                {
+                    Debug.Assert(OuterInstance.OuterInstance.ProxOut != null);
+                    Debug.Assert(startOffset == -1);
+                    Debug.Assert(endOffset == -1);
+                    //System.out.println("      w pos=" + position + " payl=" + payload);
+                    int delta = position - LastPosition;
+                    LastPosition = position;
+
+                    if (OuterInstance.StorePayloads)
+                    {
+                        int payloadLength = payload == null ? 0 : payload.Length;
+                        if (payloadLength != LastPayloadLength)
+                        {
+                            //System.out.println("        write payload len=" + payloadLength);
+                            LastPayloadLength = payloadLength;
+                            OuterInstance.OuterInstance.ProxOut.WriteVInt32((delta << 1) | 1);
+                            OuterInstance.OuterInstance.ProxOut.WriteVInt32(payloadLength);
+                        }
+                        else
+                        {
+                            OuterInstance.OuterInstance.ProxOut.WriteVInt32(delta << 1);
+                        }
+                        if (payloadLength > 0)
+                        {
+                            OuterInstance.OuterInstance.ProxOut.WriteBytes(payload.Bytes, payload.Offset, payload.Length);
+                        }
+                    }
+                    else
+                    {
+                        OuterInstance.OuterInstance.ProxOut.WriteVInt32(delta);
+                    }
+                }
+
+                public override void FinishDoc()
+                {
+                }
+            }
+
+            public override PostingsConsumer StartTerm(BytesRef text)
+            {
+                //System.out.println("  w term=" + text.utf8ToString());
+                OuterInstance.SkipListWriter.ResetSkip();
+                TermInfo.FreqPointer = OuterInstance.FreqOut.FilePointer;
+                if (OuterInstance.ProxOut != null)
+                {
+                    TermInfo.ProxPointer = OuterInstance.ProxOut.FilePointer;
+                }
+                return postingsWriter.Reset();
+            }
+
+            public override void FinishTerm(BytesRef text, TermStats stats)
+            {
+                if (stats.DocFreq > 0)
+                {
+                    long skipPointer = OuterInstance.SkipListWriter.WriteSkip(OuterInstance.FreqOut);
+                    TermInfo.DocFreq = stats.DocFreq;
+                    TermInfo.SkipOffset = (int)(skipPointer - TermInfo.FreqPointer);
+                    //System.out.println("  w finish term=" + text.utf8ToString() + " fnum=" + fieldInfo.number);
+                    OuterInstance.TermsOut.Add(FieldInfo.Number, text, TermInfo);
+                }
+            }
+
+            public override void Finish(long sumTotalTermCount, long sumDocFreq, int docCount)
+            {
+            }
+
+            public override IComparer<BytesRef> Comparer
+            {
+                get
+                {
+                    return BytesRef.UTF8SortedAsUTF16Comparer;
+                }
+            }
+        }
+    }
+#pragma warning restore 612, 618
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8304ca82/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWNormsConsumer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWNormsConsumer.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWNormsConsumer.cs
new file mode 100644
index 0000000..2a91121
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWNormsConsumer.cs
@@ -0,0 +1,116 @@
+using System;
+using System.Diagnostics;
+
+namespace Lucene.Net.Codecs.Lucene3x
+{
+    using System.Collections.Generic;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using FieldInfo = Lucene.Net.Index.FieldInfo;
+    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
+    using IndexOutput = Lucene.Net.Store.IndexOutput;
+    using IOContext = Lucene.Net.Store.IOContext;
+    using IOUtils = Lucene.Net.Util.IOUtils;
+
+    /// <summary>
+    /// Writes and Merges Lucene 3.x norms format
+    /// @lucene.experimental
+    /// </summary>
+    internal class PreFlexRWNormsConsumer : DocValuesConsumer
+    {
+        /// <summary>
+        /// norms header placeholder </summary>
+        private static readonly sbyte[] NORMS_HEADER = new sbyte[] { (sbyte)'N', (sbyte)'R', (sbyte)'M', -1 };
+
+        /// <summary>
+        /// Extension of norms file </summary>
+        private const string NORMS_EXTENSION = "nrm";
+
+        /// <summary>
+        /// Extension of separate norms file </summary>
+        /// @deprecated Only for reading existing 3.x indexes
+        [Obsolete("Only for reading existing 3.x indexes")]
+        private const string SEPARATE_NORMS_EXTENSION = "s";
+
+        private readonly IndexOutput @out;
+        private int LastFieldNumber = -1; // only for assert
+
+        public PreFlexRWNormsConsumer(Directory directory, string segment, IOContext context)
+        {
+            string normsFileName = IndexFileNames.SegmentFileName(segment, "", NORMS_EXTENSION);
+            bool success = false;
+            IndexOutput output = null;
+            try
+            {
+                output = directory.CreateOutput(normsFileName, context);
+                // output.WriteBytes(NORMS_HEADER, 0, NORMS_HEADER.Length);
+                foreach (var @sbyte in NORMS_HEADER)
+                {
+                    output.WriteByte((byte)@sbyte);
+                }
+                @out = output;
+                success = true;
+            }
+            finally
+            {
+                if (!success)
+                {
+                    IOUtils.CloseWhileHandlingException(output);
+                }
+            }
+        }
+
+        public override void AddNumericField(FieldInfo field, IEnumerable<long?> values)
+        {
+            Debug.Assert(field.Number > LastFieldNumber, "writing norms fields out of order" + LastFieldNumber + " -> " + field.Number);
+            foreach (var n in values)
+            {
+                if (((sbyte)(byte)(long)n) < sbyte.MinValue || ((sbyte)(byte)(long)n) > sbyte.MaxValue)
+                {
+                    throw new System.NotSupportedException("3.x cannot index norms that won't fit in a byte, got: " + ((sbyte)(byte)(long)n));
+                }
+                @out.WriteByte((byte)(sbyte)n);
+            }
+            LastFieldNumber = field.Number;
+        }
+
+        protected override void Dispose(bool disposing)
+        {
+            if (disposing)
+                IOUtils.Close(@out);
+        }
+
+        public override void AddBinaryField(FieldInfo field, IEnumerable<BytesRef> values)
+        {
+            throw new InvalidOperationException();
+        }
+
+        public override void AddSortedField(FieldInfo field, IEnumerable<BytesRef> values, IEnumerable<long?> docToOrd)
+        {
+            throw new InvalidOperationException();
+        }
+
+        public override void AddSortedSetField(FieldInfo field, IEnumerable<BytesRef> values, IEnumerable<long?> docToOrdCount, IEnumerable<long?> ords)
+        {
+            throw new InvalidOperationException();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8304ca82/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWNormsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWNormsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWNormsFormat.cs
new file mode 100644
index 0000000..d85d5d3
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWNormsFormat.cs
@@ -0,0 +1,35 @@
+namespace Lucene.Net.Codecs.Lucene3x
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    using SegmentWriteState = Lucene.Net.Index.SegmentWriteState;
+
+    /// <summary>
+    /// @lucene.internal
+    /// @lucene.experimental
+    /// </summary>
+#pragma warning disable 612, 618
+    internal class PreFlexRWNormsFormat : Lucene3xNormsFormat
+    {
+        public override DocValuesConsumer NormsConsumer(SegmentWriteState state)
+        {
+            return new PreFlexRWNormsConsumer(state.Directory, state.SegmentInfo.Name, state.Context);
+        }
+    }
+#pragma warning restore 612, 618
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8304ca82/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWPostingsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWPostingsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWPostingsFormat.cs
new file mode 100644
index 0000000..962d95c
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWPostingsFormat.cs
@@ -0,0 +1,87 @@
+using System;
+using System.Reflection;
+using System.Diagnostics;
+
+namespace Lucene.Net.Codecs.Lucene3x
+{
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using SegmentReadState = Lucene.Net.Index.SegmentReadState;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using SegmentWriteState = Lucene.Net.Index.SegmentWriteState;
+
+    /// <summary>
+    /// Codec, only for testing, that can write and read the
+    ///  pre-flex index format.
+    ///
+    /// @lucene.experimental
+    /// </summary>
+#pragma warning disable 612, 618
+    internal class PreFlexRWPostingsFormat : Lucene3xPostingsFormat
+    {
+        public PreFlexRWPostingsFormat()
+        {
+            // NOTE: we impersonate the PreFlex codec so that it can
+            // read the segments we write!
+        }
+
+        public override FieldsConsumer FieldsConsumer(SegmentWriteState state)
+        {
+            return new PreFlexRWFieldsWriter(state);
+        }
+
+        public override FieldsProducer FieldsProducer(SegmentReadState state)
+        {
+            // Whenever IW opens readers, eg for merging, we have to
+            // keep terms order in UTF16:
+
+            return new Lucene3xFieldsAnonymousInnerClassHelper(this, state.Directory, state.FieldInfos, state.SegmentInfo, state.Context, state.TermsIndexDivisor);
+        }
+
+        private class Lucene3xFieldsAnonymousInnerClassHelper : Lucene3xFields
+        {
+            private readonly PreFlexRWPostingsFormat OuterInstance;
+
+            public Lucene3xFieldsAnonymousInnerClassHelper(PreFlexRWPostingsFormat outerInstance, Store.Directory directory, Index.FieldInfos fieldInfos, Index.SegmentInfo segmentInfo, Store.IOContext context, int termsIndexDivisor)
+                : base(directory, fieldInfos, segmentInfo, context, termsIndexDivisor)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            protected internal override bool SortTermsByUnicode()
+            {
+                // We carefully peek into stack track above us: if
+                // we are part of a "merge", we must sort by UTF16:
+                bool unicodeSortOrder = true;
+
+                if(Util.StackTraceHelper.DoesStackTraceContainMethod("Merge"))
+                {
+                       unicodeSortOrder = false;
+                        if (LuceneTestCase.VERBOSE)
+                        {
+                            Console.WriteLine("NOTE: PreFlexRW codec: forcing legacy UTF16 term sort order");
+                        }
+                }
+
+                return unicodeSortOrder;
+            }
+        }
+    }
+#pragma warning restore 612, 618
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8304ca82/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWSegmentInfoFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWSegmentInfoFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWSegmentInfoFormat.cs
new file mode 100644
index 0000000..86d7e4d
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWSegmentInfoFormat.cs
@@ -0,0 +1,37 @@
+namespace Lucene.Net.Codecs.Lucene3x
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// @lucene.experimental
+    /// </summary>
+#pragma warning disable 612, 618
+    internal class PreFlexRWSegmentInfoFormat : Lucene3xSegmentInfoFormat
+    {
+        private readonly SegmentInfoWriter Writer = new PreFlexRWSegmentInfoWriter();
+
+        public override SegmentInfoWriter SegmentInfoWriter
+        {
+            get
+            {
+                return Writer;
+            }
+        }
+    }
+#pragma warning restore 612, 618
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8304ca82/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWSegmentInfoWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWSegmentInfoWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWSegmentInfoWriter.cs
new file mode 100644
index 0000000..3019c51
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWSegmentInfoWriter.cs
@@ -0,0 +1,47 @@
+namespace Lucene.Net.Codecs.Lucene3x
+{
+    using Directory = Lucene.Net.Store.Directory;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using FieldInfos = Lucene.Net.Index.FieldInfos;
+    using IOContext = Lucene.Net.Store.IOContext;
+    using SegmentInfo = Lucene.Net.Index.SegmentInfo;
+    using SegmentInfos = Lucene.Net.Index.SegmentInfos;
+
+    /// <summary>
+    /// PreFlex implementation of <seealso cref="SegmentInfoWriter"/>.
+    /// @lucene.experimental
+    /// </summary>
+#pragma warning disable 612, 618
+    internal class PreFlexRWSegmentInfoWriter : SegmentInfoWriter
+    {
+        // NOTE: this is not "really" 3.x format, because we are
+        // writing each SI to its own file, vs 3.x where the list
+        // of segments and SI for each segment is written into a
+        // single segments_N file
+
+        /// <summary>
+        /// Save a single segment's info. </summary>
+        public override void Write(Directory dir, SegmentInfo si, FieldInfos fis, IOContext ioContext)
+        {
+            SegmentInfos.Write3xInfo(dir, si, ioContext);
+        }
+    }
+#pragma warning restore 612, 618
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8304ca82/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWSkipListWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWSkipListWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWSkipListWriter.cs
new file mode 100644
index 0000000..0ce2d24
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWSkipListWriter.cs
@@ -0,0 +1,138 @@
+namespace Lucene.Net.Codecs.Lucene3x
+{
+    using Lucene.Net.Support;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using IndexOutput = Lucene.Net.Store.IndexOutput;
+
+    /// <summary>
+    /// PreFlexRW skiplist implementation.
+    /// @lucene.experimental
+    /// </summary>
+    public class PreFlexRWSkipListWriter : MultiLevelSkipListWriter
+    {
+        private int[] LastSkipDoc;
+        private int[] LastSkipPayloadLength;
+        private long[] LastSkipFreqPointer;
+        private long[] LastSkipProxPointer;
+
+        private IndexOutput FreqOutput;
+        private IndexOutput ProxOutput;
+
+        private int CurDoc;
+        private bool CurStorePayloads;
+        private int CurPayloadLength;
+        private long CurFreqPointer;
+        private long CurProxPointer;
+
+        public PreFlexRWSkipListWriter(int skipInterval, int numberOfSkipLevels, int docCount, IndexOutput freqOutput, IndexOutput proxOutput)
+            : base(skipInterval, numberOfSkipLevels, docCount)
+        {
+            this.FreqOutput = freqOutput;
+            this.ProxOutput = proxOutput;
+
+            LastSkipDoc = new int[numberOfSkipLevels];
+            LastSkipPayloadLength = new int[numberOfSkipLevels];
+            LastSkipFreqPointer = new long[numberOfSkipLevels];
+            LastSkipProxPointer = new long[numberOfSkipLevels];
+        }
+
+        /// <summary>
+        /// Sets the values for the current skip data.
+        /// </summary>
+        public virtual void SetSkipData(int doc, bool storePayloads, int payloadLength)
+        {
+            this.CurDoc = doc;
+            this.CurStorePayloads = storePayloads;
+            this.CurPayloadLength = payloadLength;
+            this.CurFreqPointer = FreqOutput.FilePointer;
+            if (ProxOutput != null)
+            {
+                this.CurProxPointer = ProxOutput.FilePointer;
+            }
+        }
+
+        public override void ResetSkip()
+        {
+            base.ResetSkip();
+            Arrays.Fill(LastSkipDoc, 0);
+            Arrays.Fill(LastSkipPayloadLength, -1); // we don't have to write the first length in the skip list
+            Arrays.Fill(LastSkipFreqPointer, FreqOutput.FilePointer);
+            if (ProxOutput != null)
+            {
+                Arrays.Fill(LastSkipProxPointer, ProxOutput.FilePointer);
+            }
+        }
+
+        protected override void WriteSkipData(int level, IndexOutput skipBuffer)
+        {
+            // To efficiently store payloads in the posting lists we do not store the length of
+            // every payload. Instead we omit the length for a payload if the previous payload had
+            // the same length.
+            // However, in order to support skipping the payload length at every skip point must be known.
+            // So we use the same length encoding that we use for the posting lists for the skip data as well:
+            // Case 1: current field does not store payloads
+            //           SkipDatum                 --> DocSkip, FreqSkip, ProxSkip
+            //           DocSkip,FreqSkip,ProxSkip --> VInt
+            //           DocSkip records the document number before every SkipInterval th  document in TermFreqs.
+            //           Document numbers are represented as differences from the previous value in the sequence.
+            // Case 2: current field stores payloads
+            //           SkipDatum                 --> DocSkip, PayloadLength?, FreqSkip,ProxSkip
+            //           DocSkip,FreqSkip,ProxSkip --> VInt
+            //           PayloadLength             --> VInt
+            //         In this case DocSkip/2 is the difference between
+            //         the current and the previous value. If DocSkip
+            //         is odd, then a PayloadLength encoded as VInt follows,
+            //         if DocSkip is even, then it is assumed that the
+            //         current payload length equals the length at the previous
+            //         skip point
+            if (CurStorePayloads)
+            {
+                int delta = CurDoc - LastSkipDoc[level];
+                if (CurPayloadLength == LastSkipPayloadLength[level])
+                {
+                    // the current payload length equals the length at the previous skip point,
+                    // so we don't store the length again
+                    skipBuffer.WriteVInt32(delta * 2);
+                }
+                else
+                {
+                    // the payload length is different from the previous one. We shift the DocSkip,
+                    // set the lowest bit and store the current payload length as VInt.
+                    skipBuffer.WriteVInt32(delta * 2 + 1);
+                    skipBuffer.WriteVInt32(CurPayloadLength);
+                    LastSkipPayloadLength[level] = CurPayloadLength;
+                }
+            }
+            else
+            {
+                // current field does not store payloads
+                skipBuffer.WriteVInt32(CurDoc - LastSkipDoc[level]);
+            }
+
+            skipBuffer.WriteVInt32((int)(CurFreqPointer - LastSkipFreqPointer[level]));
+            skipBuffer.WriteVInt32((int)(CurProxPointer - LastSkipProxPointer[level]));
+
+            LastSkipDoc[level] = CurDoc;
+
+            LastSkipFreqPointer[level] = CurFreqPointer;
+            LastSkipProxPointer[level] = CurProxPointer;
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8304ca82/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWStoredFieldsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWStoredFieldsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWStoredFieldsFormat.cs
new file mode 100644
index 0000000..63ffc4a
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWStoredFieldsFormat.cs
@@ -0,0 +1,34 @@
+namespace Lucene.Net.Codecs.Lucene3x
+{
+    using Directory = Lucene.Net.Store.Directory;
+    using IOContext = Lucene.Net.Store.IOContext;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using SegmentInfo = Lucene.Net.Index.SegmentInfo;
+
+#pragma warning disable 612, 618
+    internal class PreFlexRWStoredFieldsFormat : Lucene3xStoredFieldsFormat
+    {
+        public override StoredFieldsWriter FieldsWriter(Directory directory, SegmentInfo segmentInfo, IOContext context)
+        {
+            return new PreFlexRWStoredFieldsWriter(directory, segmentInfo.Name, context);
+        }
+    }
+#pragma warning restore 612, 618
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8304ca82/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWStoredFieldsWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWStoredFieldsWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWStoredFieldsWriter.cs
new file mode 100644
index 0000000..628564a
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWStoredFieldsWriter.cs
@@ -0,0 +1,214 @@
+using System;
+using System.Diagnostics;
+
+namespace Lucene.Net.Codecs.Lucene3x
+{
+    using Lucene.Net.Support;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+
+    /// <summary>
+    /// Copyright 2004 The Apache Software Foundation
+    ///
+    /// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+    /// use this file except in compliance with the License. You may obtain a copy of
+    /// the License at
+    ///
+    /// http://www.apache.org/licenses/LICENSE-2.0
+    ///
+    /// Unless required by applicable law or agreed to in writing, software
+    /// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+    /// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+    /// License for the specific language governing permissions and limitations under
+    /// the License.
+    /// </summary>
+
+    using FieldInfo = Lucene.Net.Index.FieldInfo;
+    using FieldInfos = Lucene.Net.Index.FieldInfos;
+    using IIndexableField = Lucene.Net.Index.IIndexableField;
+    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
+    using IndexOutput = Lucene.Net.Store.IndexOutput;
+    using IOContext = Lucene.Net.Store.IOContext;
+    using IOUtils = Lucene.Net.Util.IOUtils;
+
+    /// <summary>
+    /// @lucene.experimental </summary>
+#pragma warning disable 612, 618
+    internal sealed class PreFlexRWStoredFieldsWriter : StoredFieldsWriter
+    {
+        private readonly Directory Directory;
+        private readonly string Segment;
+        private IndexOutput FieldsStream;
+        private IndexOutput IndexStream;
+
+        public PreFlexRWStoredFieldsWriter(Directory directory, string segment, IOContext context)
+        {
+            Debug.Assert(directory != null);
+            this.Directory = directory;
+            this.Segment = segment;
+
+            bool success = false;
+            try
+            {
+                FieldsStream = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", Lucene3xStoredFieldsReader.FIELDS_EXTENSION), context);
+                IndexStream = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", Lucene3xStoredFieldsReader.FIELDS_INDEX_EXTENSION), context);
+
+                FieldsStream.WriteInt32(Lucene3xStoredFieldsReader.FORMAT_CURRENT);
+                IndexStream.WriteInt32(Lucene3xStoredFieldsReader.FORMAT_CURRENT);
+
+                success = true;
+            }
+            finally
+            {
+                if (!success)
+                {
+                    Abort();
+                }
+            }
+        }
+
+        // Writes the contents of buffer into the fields stream
+        // and adds a new entry for this document into the index
+        // stream.  this assumes the buffer was already written
+        // in the correct fields format.
+        public override void StartDocument(int numStoredFields)
+        {
+            IndexStream.WriteInt64(FieldsStream.FilePointer);
+            FieldsStream.WriteVInt32(numStoredFields);
+        }
+
+        protected override void Dispose(bool disposing)
+        {
+            if (disposing)
+            {
+                try
+                {
+                    IOUtils.Close(FieldsStream, IndexStream);
+                }
+                finally
+                {
+                    FieldsStream = IndexStream = null;
+                }
+            }
+        }
+
+        public override void Abort()
+        {
+            try
+            {
+                Dispose();
+            }
+#pragma warning disable 168
+            catch (Exception ignored)
+#pragma warning restore 168
+            {
+            }
+            IOUtils.DeleteFilesIgnoringExceptions(Directory, IndexFileNames.SegmentFileName(Segment, "", Lucene3xStoredFieldsReader.FIELDS_EXTENSION), IndexFileNames.SegmentFileName(Segment, "", Lucene3xStoredFieldsReader.FIELDS_INDEX_EXTENSION));
+        }
+
+        public override void WriteField(FieldInfo info, IIndexableField field)
+        {
+            FieldsStream.WriteVInt32(info.Number);
+            int bits = 0;
+            BytesRef bytes;
+            string @string;
+            // TODO: maybe a field should serialize itself?
+            // this way we don't bake into indexer all these
+            // specific encodings for different fields?  and apps
+            // can customize...
+
+            object number = field.GetNumericValue();
+            if (number != null)
+            {
+                if (number is sbyte? || number is short? || number is int?)
+                {
+                    bits |= Lucene3xStoredFieldsReader.FIELD_IS_NUMERIC_INT;
+                }
+                else if (number is long?)
+                {
+                    bits |= Lucene3xStoredFieldsReader.FIELD_IS_NUMERIC_LONG;
+                }
+                else if (number is float?)
+                {
+                    bits |= Lucene3xStoredFieldsReader.FIELD_IS_NUMERIC_FLOAT;
+                }
+                else if (number is double?)
+                {
+                    bits |= Lucene3xStoredFieldsReader.FIELD_IS_NUMERIC_DOUBLE;
+                }
+                else
+                {
+                    throw new System.ArgumentException("cannot store numeric type " + number.GetType());
+                }
+                @string = null;
+                bytes = null;
+            }
+            else
+            {
+                bytes = field.GetBinaryValue();
+                if (bytes != null)
+                {
+                    bits |= Lucene3xStoredFieldsReader.FIELD_IS_BINARY;
+                    @string = null;
+                }
+                else
+                {
+                    @string = field.GetStringValue();
+                    if (@string == null)
+                    {
+                        throw new System.ArgumentException("field " + field.Name + " is stored but does not have binaryValue, stringValue nor numericValue");
+                    }
+                }
+            }
+
+            FieldsStream.WriteByte((byte)(sbyte)bits);
+
+            if (bytes != null)
+            {
+                FieldsStream.WriteVInt32(bytes.Length);
+                FieldsStream.WriteBytes(bytes.Bytes, bytes.Offset, bytes.Length);
+            }
+            else if (@string != null)
+            {
+                FieldsStream.WriteString(field.GetStringValue());
+            }
+            else
+            {
+                if (number is sbyte? || number is short? || number is int?)
+                {
+                    FieldsStream.WriteInt32((int)number);
+                }
+                else if (number is long?)
+                {
+                    FieldsStream.WriteInt64((long)number);
+                }
+                else if (number is float?)
+                {
+                    FieldsStream.WriteInt32(Number.SingleToInt32Bits((float)number));
+                }
+                else if (number is double?)
+                {
+                    FieldsStream.WriteInt64(BitConverter.DoubleToInt64Bits((double)number));
+                }
+                else
+                {
+                    Debug.Assert(false);
+                }
+            }
+        }
+
+        public override void Finish(FieldInfos fis, int numDocs)
+        {
+            if (4 + ((long)numDocs) * 8 != IndexStream.FilePointer)
+            // this is most likely a bug in Sun JRE 1.6.0_04/_05;
+            // we detect that the bug has struck, here, and
+            // throw an exception to prevent the corruption from
+            // entering the index.  See LUCENE-1282 for
+            // details.
+            {
+                throw new Exception("fdx size mismatch: docCount is " + numDocs + " but fdx file size is " + IndexStream.FilePointer + " file=" + IndexStream.ToString() + "; now aborting this merge to prevent index corruption");
+            }
+        }
+    }
+#pragma warning restore 612, 618
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8304ca82/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWTermVectorsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWTermVectorsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWTermVectorsFormat.cs
new file mode 100644
index 0000000..871ee07
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWTermVectorsFormat.cs
@@ -0,0 +1,74 @@
+using System;
+using System.Diagnostics;
+
+namespace Lucene.Net.Codecs.Lucene3x
+{
+    using Directory = Lucene.Net.Store.Directory;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using FieldInfos = Lucene.Net.Index.FieldInfos;
+    using IOContext = Lucene.Net.Store.IOContext;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using SegmentInfo = Lucene.Net.Index.SegmentInfo;
+
+#pragma warning disable 612, 618
+    internal class PreFlexRWTermVectorsFormat : Lucene3xTermVectorsFormat
+    {
+        public override TermVectorsWriter VectorsWriter(Directory directory, SegmentInfo segmentInfo, IOContext context)
+        {
+            return new PreFlexRWTermVectorsWriter(directory, segmentInfo.Name, context);
+        }
+
+        public override TermVectorsReader VectorsReader(Directory directory, SegmentInfo segmentInfo, FieldInfos fieldInfos, IOContext context)
+        {
+            return new Lucene3xTermVectorsReaderAnonymousInnerClassHelper(this, directory, segmentInfo, fieldInfos, context);
+        }
+
+        private class Lucene3xTermVectorsReaderAnonymousInnerClassHelper : Lucene3xTermVectorsReader
+        {
+            private readonly PreFlexRWTermVectorsFormat OuterInstance;
+
+            public Lucene3xTermVectorsReaderAnonymousInnerClassHelper(PreFlexRWTermVectorsFormat outerInstance, Directory directory, SegmentInfo segmentInfo, FieldInfos fieldInfos, IOContext context)
+                : base(directory, segmentInfo, fieldInfos, context)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            protected internal override bool SortTermsByUnicode()
+            {
+
+                // We carefully peek into stack track above us: if
+                // we are part of a "merge", we must sort by UTF16:
+                bool unicodeSortOrder = true;
+
+                if (Util.StackTraceHelper.DoesStackTraceContainMethod("Merge"))
+                {
+                        unicodeSortOrder = false;
+                        if (LuceneTestCase.VERBOSE)
+                        {
+                            Console.WriteLine("NOTE: PreFlexRW codec: forcing legacy UTF16 vector term sort order");
+                        }
+                }
+
+                return unicodeSortOrder;
+            }
+        }
+    }
+#pragma warning restore 612, 618
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8304ca82/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWTermVectorsWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWTermVectorsWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWTermVectorsWriter.cs
new file mode 100644
index 0000000..db3e4c3
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/PreFlexRWTermVectorsWriter.cs
@@ -0,0 +1,243 @@
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+
+namespace Lucene.Net.Codecs.Lucene3x
+{
+    using ArrayUtil = Lucene.Net.Util.ArrayUtil;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using FieldInfo = Lucene.Net.Index.FieldInfo;
+    using FieldInfos = Lucene.Net.Index.FieldInfos;
+    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
+    using IndexOutput = Lucene.Net.Store.IndexOutput;
+    using IOContext = Lucene.Net.Store.IOContext;
+    using IOUtils = Lucene.Net.Util.IOUtils;
+    using StringHelper = Lucene.Net.Util.StringHelper;
+
+#pragma warning disable 612, 618
+    internal sealed class PreFlexRWTermVectorsWriter : TermVectorsWriter
+    {
+        private readonly Directory Directory;
+        private readonly string Segment;
+        private IndexOutput Tvx = null, Tvd = null, Tvf = null;
+
+        public PreFlexRWTermVectorsWriter(Directory directory, string segment, IOContext context)
+        {
+            this.Directory = directory;
+            this.Segment = segment;
+            bool success = false;
+            try
+            {
+                // Open files for TermVector storage
+                Tvx = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", Lucene3xTermVectorsReader.VECTORS_INDEX_EXTENSION), context);
+                Tvx.WriteInt32(Lucene3xTermVectorsReader.FORMAT_CURRENT);
+                Tvd = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", Lucene3xTermVectorsReader.VECTORS_DOCUMENTS_EXTENSION), context);
+                Tvd.WriteInt32(Lucene3xTermVectorsReader.FORMAT_CURRENT);
+                Tvf = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", Lucene3xTermVectorsReader.VECTORS_FIELDS_EXTENSION), context);
+                Tvf.WriteInt32(Lucene3xTermVectorsReader.FORMAT_CURRENT);
+                success = true;
+            }
+            finally
+            {
+                if (!success)
+                {
+                    Abort();
+                }
+            }
+        }
+
+        public override void StartDocument(int numVectorFields)
+        {
+            LastFieldName = null;
+            this.NumVectorFields = numVectorFields;
+            Tvx.WriteInt64(Tvd.FilePointer);
+            Tvx.WriteInt64(Tvf.FilePointer);
+            Tvd.WriteVInt32(numVectorFields);
+            FieldCount = 0;
+            Fps = ArrayUtil.Grow(Fps, numVectorFields);
+        }
+
+        private long[] Fps = new long[10]; // pointers to the tvf before writing each field
+        private int FieldCount = 0; // number of fields we have written so far for this document
+        private int NumVectorFields = 0; // total number of fields we will write for this document
+        private string LastFieldName;
+
+        public override void StartField(FieldInfo info, int numTerms, bool positions, bool offsets, bool payloads)
+        {
+            Debug.Assert(LastFieldName == null || info.Name.CompareTo(LastFieldName) > 0, "fieldName=" + info.Name + " lastFieldName=" + LastFieldName);
+            LastFieldName = info.Name;
+            if (payloads)
+            {
+                throw new System.NotSupportedException("3.x codec does not support payloads on vectors!");
+            }
+            this.Positions = positions;
+            this.Offsets = offsets;
+            LastTerm.Length = 0;
+            Fps[FieldCount++] = Tvf.FilePointer;
+            Tvd.WriteVInt32(info.Number);
+            Tvf.WriteVInt32(numTerms);
+            sbyte bits = 0x0;
+            if (positions)
+            {
+                bits |= Lucene3xTermVectorsReader.STORE_POSITIONS_WITH_TERMVECTOR;
+            }
+            if (offsets)
+            {
+                bits |= Lucene3xTermVectorsReader.STORE_OFFSET_WITH_TERMVECTOR;
+            }
+            Tvf.WriteByte((byte)bits);
+
+            Debug.Assert(FieldCount <= NumVectorFields);
+            if (FieldCount == NumVectorFields)
+            {
+                // last field of the document
+                // this is crazy because the file format is crazy!
+                for (int i = 1; i < FieldCount; i++)
+                {
+                    Tvd.WriteVInt64(Fps[i] - Fps[i - 1]);
+                }
+            }
+        }
+
+        private readonly BytesRef LastTerm = new BytesRef(10);
+
+        // NOTE: we override addProx, so we don't need to buffer when indexing.
+        // we also don't buffer during bulk merges.
+        private int[] OffsetStartBuffer = new int[10];
+
+        private int[] OffsetEndBuffer = new int[10];
+        private int OffsetIndex = 0;
+        private int OffsetFreq = 0;
+        private bool Positions = false;
+        private bool Offsets = false;
+
+        public override void StartTerm(BytesRef term, int freq)
+        {
+            int prefix = StringHelper.BytesDifference(LastTerm, term);
+            int suffix = term.Length - prefix;
+            Tvf.WriteVInt32(prefix);
+            Tvf.WriteVInt32(suffix);
+            Tvf.WriteBytes(term.Bytes, term.Offset + prefix, suffix);
+            Tvf.WriteVInt32(freq);
+            LastTerm.CopyBytes(term);
+            LastPosition = LastOffset = 0;
+
+            if (Offsets && Positions)
+            {
+                // we might need to buffer if its a non-bulk merge
+                OffsetStartBuffer = ArrayUtil.Grow(OffsetStartBuffer, freq);
+                OffsetEndBuffer = ArrayUtil.Grow(OffsetEndBuffer, freq);
+                OffsetIndex = 0;
+                OffsetFreq = freq;
+            }
+        }
+
+        internal int LastPosition = 0;
+        internal int LastOffset = 0;
+
+        public override void AddPosition(int position, int startOffset, int endOffset, BytesRef payload)
+        {
+            Debug.Assert(payload == null);
+            if (Positions && Offsets)
+            {
+                // write position delta
+                Tvf.WriteVInt32(position - LastPosition);
+                LastPosition = position;
+
+                // buffer offsets
+                OffsetStartBuffer[OffsetIndex] = startOffset;
+                OffsetEndBuffer[OffsetIndex] = endOffset;
+                OffsetIndex++;
+
+                // dump buffer if we are done
+                if (OffsetIndex == OffsetFreq)
+                {
+                    for (int i = 0; i < OffsetIndex; i++)
+                    {
+                        Tvf.WriteVInt32(OffsetStartBuffer[i] - LastOffset);
+                        Tvf.WriteVInt32(OffsetEndBuffer[i] - OffsetStartBuffer[i]);
+                        LastOffset = OffsetEndBuffer[i];
+                    }
+                }
+            }
+            else if (Positions)
+            {
+                // write position delta
+                Tvf.WriteVInt32(position - LastPosition);
+                LastPosition = position;
+            }
+            else if (Offsets)
+            {
+                // write offset deltas
+                Tvf.WriteVInt32(startOffset - LastOffset);
+                Tvf.WriteVInt32(endOffset - startOffset);
+                LastOffset = endOffset;
+            }
+        }
+
+        public override void Abort()
+        {
+            try
+            {
+                Dispose();
+            }
+#pragma warning disable 168
+            catch (Exception ignored)
+#pragma warning restore 168
+            {
+            }
+            IOUtils.DeleteFilesIgnoringExceptions(Directory, IndexFileNames.SegmentFileName(Segment, "", Lucene3xTermVectorsReader.VECTORS_INDEX_EXTENSION), IndexFileNames.SegmentFileName(Segment, "", Lucene3xTermVectorsReader.VECTORS_DOCUMENTS_EXTENSION), IndexFileNames.SegmentFileName(Segment, "", Lucene3xTermVectorsReader.VECTORS_FIELDS_EXTENSION));
+        }
+
+        public override void Finish(FieldInfos fis, int numDocs)
+        {
+            if (4 + ((long)numDocs) * 16 != Tvx.FilePointer)
+            // this is most likely a bug in Sun JRE 1.6.0_04/_05;
+            // we detect that the bug has struck, here, and
+            // throw an exception to prevent the corruption from
+            // entering the index.  See LUCENE-1282 for
+            // details.
+            {
+                throw new Exception("tvx size mismatch: mergedDocs is " + numDocs + " but tvx size is " + Tvx.FilePointer + " file=" + Tvx.ToString() + "; now aborting this merge to prevent index corruption");
+            }
+        }
+
+        /// <summary>
+        /// Close all streams. </summary>
+        protected override void Dispose(bool disposing)
+        {
+            // make an effort to close all streams we can but remember and re-throw
+            // the first exception encountered in this process
+            IOUtils.Close(Tvx, Tvd, Tvf);
+            Tvx = Tvd = Tvf = null;
+        }
+
+        public override IComparer<BytesRef> Comparer
+        {
+            get
+            {
+                return BytesRef.UTF8SortedAsUTF16Comparer;
+            }
+        }
+    }
+#pragma warning restore 612, 618
+}
\ No newline at end of file


[29/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestIndexWriterExceptions.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterExceptions.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterExceptions.cs
new file mode 100644
index 0000000..5d45b7d
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestIndexWriterExceptions.cs
@@ -0,0 +1,2584 @@
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.Threading;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Randomized.Generators;
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using System.IO;
+    using Util;
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Analyzer = Lucene.Net.Analysis.Analyzer;
+    using BaseDirectoryWrapper = Lucene.Net.Store.BaseDirectoryWrapper;
+    using BinaryDocValuesField = BinaryDocValuesField;
+    using IBits = Lucene.Net.Util.IBits;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using CannedTokenStream = Lucene.Net.Analysis.CannedTokenStream;
+    using Directory = Lucene.Net.Store.Directory;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldType = FieldType;
+    using IndexInput = Lucene.Net.Store.IndexInput;
+    using IndexOutput = Lucene.Net.Store.IndexOutput;
+    using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+    using InfoStream = Lucene.Net.Util.InfoStream;
+    using IOContext = Lucene.Net.Store.IOContext;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+    using MockTokenizer = Lucene.Net.Analysis.MockTokenizer;
+    using NumericDocValuesField = NumericDocValuesField;
+    using PhraseQuery = Lucene.Net.Search.PhraseQuery;
+    using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+    using SortedDocValuesField = SortedDocValuesField;
+    using SortedSetDocValuesField = SortedSetDocValuesField;
+    using StringField = StringField;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using TextField = TextField;
+    using Token = Lucene.Net.Analysis.Token;
+    using TokenFilter = Lucene.Net.Analysis.TokenFilter;
+    using TokenStream = Lucene.Net.Analysis.TokenStream;
+
+    [TestFixture]
+    public class TestIndexWriterExceptions : LuceneTestCase
+    {
+        private class DocCopyIterator : IEnumerable<Document>
+        {
+            internal readonly Document Doc;
+            internal readonly int Count;
+
+            /* private field types */
+            /* private field types */
+
+            internal static readonly FieldType Custom1 = new FieldType(TextField.TYPE_NOT_STORED);
+            internal static readonly FieldType Custom2 = new FieldType();
+            internal static readonly FieldType Custom3 = new FieldType();
+            internal static readonly FieldType Custom4 = new FieldType(StringField.TYPE_NOT_STORED);
+            internal static readonly FieldType Custom5 = new FieldType(TextField.TYPE_STORED);
+
+            static DocCopyIterator()
+            {
+                Custom1.StoreTermVectors = true;
+                Custom1.StoreTermVectorPositions = true;
+                Custom1.StoreTermVectorOffsets = true;
+
+                Custom2.IsStored = true;
+                Custom2.IsIndexed = true;
+
+                Custom3.IsStored = true;
+
+                Custom4.StoreTermVectors = true;
+                Custom4.StoreTermVectorPositions = true;
+                Custom4.StoreTermVectorOffsets = true;
+
+                Custom5.StoreTermVectors = true;
+                Custom5.StoreTermVectorPositions = true;
+                Custom5.StoreTermVectorOffsets = true;
+            }
+
+            public DocCopyIterator(Document doc, int count)
+            {
+                this.Count = count;
+                this.Doc = doc;
+            }
+
+            public virtual IEnumerator<Document> GetEnumerator()
+            {
+                return new IteratorAnonymousInnerClassHelper(this);
+            }
+
+            System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator()
+            {
+                return GetEnumerator();
+            }
+
+            private class IteratorAnonymousInnerClassHelper : IEnumerator<Document>
+            {
+                private readonly DocCopyIterator OuterInstance;
+
+                public IteratorAnonymousInnerClassHelper(DocCopyIterator outerInstance)
+                {
+                    this.OuterInstance = outerInstance;
+                }
+
+                internal int upto;
+                private Document current;
+
+                public bool MoveNext()
+                {
+                    if (upto >= OuterInstance.Count)
+                    {
+                        return false;
+                    }
+
+                    upto++;
+                    current = OuterInstance.Doc;
+                    return true;
+                }
+
+                public Document Current
+                {
+                    get { return current; }
+                }
+
+                object System.Collections.IEnumerator.Current
+                {
+                    get { return Current; }
+                }
+
+                public void Reset()
+                {
+                    throw new NotImplementedException();
+                }
+
+                public void Dispose()
+                {
+                }
+            }
+        }
+
+        private class IndexerThread : ThreadClass
+        {
+            private DateTime unixEpoch = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
+
+            private readonly TestIndexWriterExceptions OuterInstance;
+
+            internal IndexWriter Writer;
+
+            internal readonly Random r = new Random(Random().Next());
+            internal volatile Exception Failure = null;
+
+            public IndexerThread(TestIndexWriterExceptions outerInstance, int i, IndexWriter writer)
+            {
+                this.OuterInstance = outerInstance;
+                Name = "Indexer " + i;
+                this.Writer = writer;
+            }
+
+            public override void Run()
+            {
+                Document doc = new Document();
+
+                doc.Add(OuterInstance.NewTextField(r, "content1", "aaa bbb ccc ddd", Field.Store.YES));
+                doc.Add(OuterInstance.NewField(r, "content6", "aaa bbb ccc ddd", DocCopyIterator.Custom1));
+                doc.Add(OuterInstance.NewField(r, "content2", "aaa bbb ccc ddd", DocCopyIterator.Custom2));
+                doc.Add(OuterInstance.NewField(r, "content3", "aaa bbb ccc ddd", DocCopyIterator.Custom3));
+
+                doc.Add(OuterInstance.NewTextField(r, "content4", "aaa bbb ccc ddd", Field.Store.NO));
+                doc.Add(OuterInstance.NewStringField(r, "content5", "aaa bbb ccc ddd", Field.Store.NO));
+                if (DefaultCodecSupportsDocValues())
+                {
+                    doc.Add(new NumericDocValuesField("numericdv", 5));
+                    doc.Add(new BinaryDocValuesField("binarydv", new BytesRef("hello")));
+                    doc.Add(new SortedDocValuesField("sorteddv", new BytesRef("world")));
+                }
+                if (DefaultCodecSupportsSortedSet())
+                {
+                    doc.Add(new SortedSetDocValuesField("sortedsetdv", new BytesRef("hellllo")));
+                    doc.Add(new SortedSetDocValuesField("sortedsetdv", new BytesRef("again")));
+                }
+
+                doc.Add(OuterInstance.NewField(r, "content7", "aaa bbb ccc ddd", DocCopyIterator.Custom4));
+
+                Field idField = OuterInstance.NewField(r, "id", "", DocCopyIterator.Custom2);
+                doc.Add(idField);
+
+                long stopTime = ((long)(DateTime.UtcNow - unixEpoch).TotalMilliseconds) + 500;
+
+                do
+                {
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine(Thread.CurrentThread.Name + ": TEST: IndexerThread: cycle");
+                    }
+                    OuterInstance.DoFail.Value = (this.Instance);
+                    string id = "" + r.Next(50);
+                    idField.SetStringValue(id);
+                    Term idTerm = new Term("id", id);
+                    try
+                    {
+                        if (r.NextBoolean())
+                        {
+                            Writer.UpdateDocuments(idTerm, new DocCopyIterator(doc, TestUtil.NextInt(r, 1, 20)));
+                        }
+                        else
+                        {
+                            Writer.UpdateDocument(idTerm, doc);
+                        }
+                    }
+                    catch (TestPoint1Exception re)
+                    {
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine(Thread.CurrentThread.Name + ": EXC: ");
+                            Console.WriteLine(re.StackTrace);
+                        }
+                        try
+                        {
+                            TestUtil.CheckIndex(Writer.Directory);
+                        }
+                        catch (IOException ioe)
+                        {
+                            Console.WriteLine(Thread.CurrentThread.Name + ": unexpected exception1");
+                            Console.WriteLine(ioe.StackTrace);
+                            Failure = ioe;
+                            break;
+                        }
+                    }
+                    catch (Exception t)
+                    {
+                        Console.WriteLine(Thread.CurrentThread.Name + ": unexpected exception2");
+                        Console.WriteLine(t.StackTrace);
+                        Failure = t;
+                        break;
+                    }
+
+                    OuterInstance.DoFail.Value = (null);
+
+                    // After a possible exception (above) I should be able
+                    // to add a new document without hitting an
+                    // exception:
+                    try
+                    {
+                        Writer.UpdateDocument(idTerm, doc);
+                    }
+                    catch (Exception t)
+                    {
+                        Console.WriteLine(Thread.CurrentThread.Name + ": unexpected exception3");
+                        Console.WriteLine(t.StackTrace);
+                        Failure = t;
+                        break;
+                    }
+                } while (((long)(DateTime.UtcNow - unixEpoch).TotalMilliseconds) < stopTime);
+            }
+        }
+
+        internal ThreadLocal<Thread> DoFail = new ThreadLocal<Thread>();
+
+        private class TestPoint1 : RandomIndexWriter.TestPoint
+        {
+            private readonly TestIndexWriterExceptions OuterInstance;
+
+            public TestPoint1(TestIndexWriterExceptions outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            internal Random r = new Random(Random().Next());
+
+            public void Apply(string name)
+            {
+                if (OuterInstance.DoFail.Value != null && !name.Equals("startDoFlush") && r.Next(40) == 17)
+                {
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine(Thread.CurrentThread.Name + ": NOW FAIL: " + name);
+                        Console.WriteLine((new Exception()).StackTrace);
+                    }
+                    throw new TestPoint1Exception(Thread.CurrentThread.Name + ": intentionally failing at " + name);
+                }
+            }
+        }
+
+        private class TestPoint1Exception : Exception
+        {
+            public TestPoint1Exception(string message) : base(message)
+            {
+            }
+        }
+
+        [Test]
+        public virtual void TestRandomExceptions([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            if (VERBOSE)
+            {
+                Console.WriteLine("\nTEST: start testRandomExceptions");
+            }
+            Directory dir = NewDirectory();
+
+            MockAnalyzer analyzer = new MockAnalyzer(Random());
+            analyzer.EnableChecks = false; // disable workflow checking as we forcefully close() in exceptional cases.
+
+
+            var config = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer)
+                            .SetRAMBufferSizeMB(0.1)
+                            .SetMergeScheduler(scheduler);
+
+            scheduler.SetSuppressExceptions();
+
+            IndexWriter writer = RandomIndexWriter.MockIndexWriter(dir, config , new TestPoint1(this));
+            //writer.SetMaxBufferedDocs(10);
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: initial commit");
+            }
+            writer.Commit();
+
+            IndexerThread thread = new IndexerThread(this, 0, writer);
+            thread.Run();
+            if (thread.Failure != null)
+            {
+                Console.WriteLine(thread.Failure.StackTrace);
+                Assert.Fail("thread " + thread.Name + ": hit unexpected failure");
+            }
+
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: commit after thread start");
+            }
+            writer.Commit();
+
+            try
+            {
+                writer.Dispose();
+            }
+            catch (Exception t)
+            {
+                Console.WriteLine("exception during close:");
+                Console.WriteLine(t.StackTrace);
+                writer.Rollback();
+            }
+
+            // Confirm that when doc hits exception partway through tokenization, it's deleted:
+            IndexReader r2 = DirectoryReader.Open(dir);
+            int count = r2.DocFreq(new Term("content4", "aaa"));
+            int count2 = r2.DocFreq(new Term("content4", "ddd"));
+            Assert.AreEqual(count, count2);
+            r2.Dispose();
+
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestRandomExceptionsThreads([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            Directory dir = NewDirectory();
+            MockAnalyzer analyzer = new MockAnalyzer(Random());
+            analyzer.EnableChecks = false; // disable workflow checking as we forcefully close() in exceptional cases.
+
+            var config = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer)
+                            .SetRAMBufferSizeMB(0.2)
+                            .SetMergeScheduler(scheduler);
+
+            IndexWriter writer = RandomIndexWriter.MockIndexWriter(dir, config, new TestPoint1(this));
+            scheduler.SetSuppressExceptions();
+
+            //writer.SetMaxBufferedDocs(10);
+            writer.Commit();
+
+            const int NUM_THREADS = 4;
+
+            IndexerThread[] threads = new IndexerThread[NUM_THREADS];
+            for (int i = 0; i < NUM_THREADS; i++)
+            {
+                threads[i] = new IndexerThread(this, i, writer);
+                threads[i].Start();
+            }
+
+            for (int i = 0; i < NUM_THREADS; i++)
+            {
+                threads[i].Join();
+            }
+
+            for (int i = 0; i < NUM_THREADS; i++)
+            {
+                if (threads[i].Failure != null)
+                {
+                    Assert.Fail("thread " + threads[i].Name + ": hit unexpected failure");
+                }
+            }
+
+            writer.Commit();
+
+            try
+            {
+                writer.Dispose();
+            }
+            catch (Exception t)
+            {
+                Console.WriteLine("exception during close:");
+                Console.WriteLine(t.StackTrace);
+                writer.Rollback();
+            }
+
+            // Confirm that when doc hits exception partway through tokenization, it's deleted:
+            IndexReader r2 = DirectoryReader.Open(dir);
+            int count = r2.DocFreq(new Term("content4", "aaa"));
+            int count2 = r2.DocFreq(new Term("content4", "ddd"));
+            Assert.AreEqual(count, count2);
+            r2.Dispose();
+
+            dir.Dispose();
+        }
+
+        // LUCENE-1198
+        private sealed class TestPoint2 : RandomIndexWriter.TestPoint
+        {
+            internal bool DoFail;
+
+            public void Apply(string name)
+            {
+                if (DoFail && name.Equals("DocumentsWriterPerThread addDocument start"))
+                {
+                    throw new Exception("intentionally failing");
+                }
+            }
+        }
+
+        private static string CRASH_FAIL_MESSAGE = "I'm experiencing problems";
+
+        private class CrashingFilter : TokenFilter
+        {
+            private readonly TestIndexWriterExceptions OuterInstance;
+
+            internal string FieldName;
+            internal int Count;
+
+            public CrashingFilter(TestIndexWriterExceptions outerInstance, string fieldName, TokenStream input)
+                : base(input)
+            {
+                this.OuterInstance = outerInstance;
+                this.FieldName = fieldName;
+            }
+
+            public sealed override bool IncrementToken()
+            {
+                if (this.FieldName.Equals("crash") && Count++ >= 4)
+                {
+                    throw new IOException(CRASH_FAIL_MESSAGE);
+                }
+                return m_input.IncrementToken();
+            }
+
+            public override void Reset()
+            {
+                base.Reset();
+                Count = 0;
+            }
+        }
+
+        [Test]
+        public virtual void TestExceptionDocumentsWriterInit()
+        {
+            Directory dir = NewDirectory();
+            TestPoint2 testPoint = new TestPoint2();
+            IndexWriter w = RandomIndexWriter.MockIndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())), testPoint);
+            Document doc = new Document();
+            doc.Add(NewTextField("field", "a field", Field.Store.YES));
+            w.AddDocument(doc);
+            testPoint.DoFail = true;
+            try
+            {
+                w.AddDocument(doc);
+                Assert.Fail("did not hit exception");
+            }
+#pragma warning disable 168
+            catch (Exception re)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            w.Dispose();
+            dir.Dispose();
+        }
+
+        // LUCENE-1208
+        [Test]
+        public virtual void TestExceptionJustBeforeFlush()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter w = RandomIndexWriter.MockIndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2), new TestPoint1(this));
+            Document doc = new Document();
+            doc.Add(NewTextField("field", "a field", Field.Store.YES));
+            w.AddDocument(doc);
+
+            Analyzer analyzer = new TEJBFAnalyzerAnonymousInnerClassHelper(this, Analyzer.PER_FIELD_REUSE_STRATEGY);
+
+            Document crashDoc = new Document();
+            crashDoc.Add(NewTextField("crash", "do it on token 4", Field.Store.YES));
+            try
+            {
+                w.AddDocument(crashDoc, analyzer);
+                Assert.Fail("did not hit expected exception");
+            }
+#pragma warning disable 168
+            catch (IOException ioe)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            w.AddDocument(doc);
+            w.Dispose();
+            dir.Dispose();
+        }
+
+        private class TEJBFAnalyzerAnonymousInnerClassHelper : Analyzer
+        {
+            private readonly TestIndexWriterExceptions OuterInstance;
+
+            public TEJBFAnalyzerAnonymousInnerClassHelper(TestIndexWriterExceptions outerInstance, Analyzer.ReuseStrategy PER_FIELD_REUSE_STRATEGY)
+                : base(PER_FIELD_REUSE_STRATEGY)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                MockTokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
+                tokenizer.EnableChecks = false; // disable workflow checking as we forcefully close() in exceptional cases.
+                return new TokenStreamComponents(tokenizer, new CrashingFilter(OuterInstance, fieldName, tokenizer));
+            }
+        }
+
+        private sealed class TestPoint3 : RandomIndexWriter.TestPoint
+        {
+            internal bool DoFail;
+            internal bool Failed;
+
+            public void Apply(string name)
+            {
+                if (DoFail && name.Equals("startMergeInit"))
+                {
+                    Failed = true;
+                    throw new Exception("intentionally failing");
+                }
+            }
+        }
+
+        // LUCENE-1210
+        [Test]
+        public virtual void TestExceptionOnMergeInit([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy());
+
+            scheduler.SetSuppressExceptions();
+            conf.SetMergeScheduler(scheduler);
+            ((LogMergePolicy)conf.MergePolicy).MergeFactor = 2;
+            TestPoint3 testPoint = new TestPoint3();
+            IndexWriter w = RandomIndexWriter.MockIndexWriter(dir, conf, testPoint);
+            testPoint.DoFail = true;
+            Document doc = new Document();
+            doc.Add(NewTextField("field", "a field", Field.Store.YES));
+            for (int i = 0; i < 10; i++)
+            {
+                try
+                {
+                    w.AddDocument(doc);
+                }
+                catch (Exception)
+                {
+                    break;
+                }
+            }
+
+            ((IConcurrentMergeScheduler)w.Config.MergeScheduler).Sync();
+            Assert.IsTrue(testPoint.Failed);
+            w.Dispose();
+            dir.Dispose();
+        }
+
+        // LUCENE-1072
+        [Test]
+        public virtual void TestExceptionFromTokenStream()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new TEFTSAnalyzerAnonymousInnerClassHelper(this));
+            conf.SetMaxBufferedDocs(Math.Max(3, conf.MaxBufferedDocs));
+
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            Document doc = new Document();
+            string contents = "aa bb cc dd ee ff gg hh ii jj kk";
+            doc.Add(NewTextField("content", contents, Field.Store.NO));
+            try
+            {
+                writer.AddDocument(doc);
+                Assert.Fail("did not hit expected exception");
+            }
+#pragma warning disable 168
+            catch (Exception e)
+#pragma warning restore 168
+            {
+            }
+
+            // Make sure we can add another normal document
+            doc = new Document();
+            doc.Add(NewTextField("content", "aa bb cc dd", Field.Store.NO));
+            writer.AddDocument(doc);
+
+            // Make sure we can add another normal document
+            doc = new Document();
+            doc.Add(NewTextField("content", "aa bb cc dd", Field.Store.NO));
+            writer.AddDocument(doc);
+
+            writer.Dispose();
+            IndexReader reader = DirectoryReader.Open(dir);
+            Term t = new Term("content", "aa");
+            Assert.AreEqual(3, reader.DocFreq(t));
+
+            // Make sure the doc that hit the exception was marked
+            // as deleted:
+            DocsEnum tdocs = TestUtil.Docs(Random(), reader, t.Field, new BytesRef(t.Text()), MultiFields.GetLiveDocs(reader), null, 0);
+
+            int count = 0;
+            while (tdocs.NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
+            {
+                count++;
+            }
+            Assert.AreEqual(2, count);
+
+            Assert.AreEqual(reader.DocFreq(new Term("content", "gg")), 0);
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        private class TEFTSAnalyzerAnonymousInnerClassHelper : Analyzer
+        {
+            private readonly TestIndexWriterExceptions OuterInstance;
+
+            public TEFTSAnalyzerAnonymousInnerClassHelper(TestIndexWriterExceptions outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                MockTokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
+                tokenizer.EnableChecks = false; // disable workflow checking as we forcefully close() in exceptional cases.
+                return new TokenStreamComponents(tokenizer, new TokenFilterAnonymousInnerClassHelper(this, tokenizer));
+            }
+
+            private class TokenFilterAnonymousInnerClassHelper : TokenFilter
+            {
+                private readonly TEFTSAnalyzerAnonymousInnerClassHelper OuterInstance;
+
+                public TokenFilterAnonymousInnerClassHelper(TEFTSAnalyzerAnonymousInnerClassHelper outerInstance, MockTokenizer tokenizer)
+                    : base(tokenizer)
+                {
+                    this.OuterInstance = outerInstance;
+                    count = 0;
+                }
+
+                private int count;
+
+                public sealed override bool IncrementToken()
+                {
+                    if (count++ == 5)
+                    {
+                        throw new IOException();
+                    }
+                    return m_input.IncrementToken();
+                }
+
+                public override void Reset()
+                {
+                    base.Reset();
+                    this.count = 0;
+                }
+            }
+        }
+
+        private class FailOnlyOnFlush : MockDirectoryWrapper.Failure
+        {
+            new internal bool DoFail = false;
+            internal int Count;
+
+            public override void SetDoFail()
+            {
+                this.DoFail = true;
+            }
+
+            public override void ClearDoFail()
+            {
+                this.DoFail = false;
+            }
+
+            public override void Eval(MockDirectoryWrapper dir)
+            {
+                if (DoFail)
+                {
+                    bool sawAppend = StackTraceHelper.DoesStackTraceContainMethod(typeof(FreqProxTermsWriterPerField).Name, "Flush");
+                    bool sawFlush = StackTraceHelper.DoesStackTraceContainMethod("Flush");
+
+                    if (sawAppend && sawFlush && Count++ >= 30)
+                    {
+                        DoFail = false;
+                        throw new IOException("now failing during flush");
+                    }
+                }
+            }
+        }
+
+        // LUCENE-1072: make sure an errant exception on flushing
+        // one segment only takes out those docs in that one flush
+        [Test]
+        public virtual void TestDocumentsWriterAbort()
+        {
+            MockDirectoryWrapper dir = NewMockDirectory();
+            FailOnlyOnFlush failure = new FailOnlyOnFlush();
+            failure.SetDoFail();
+            dir.FailOn(failure);
+
+            IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2));
+            Document doc = new Document();
+            string contents = "aa bb cc dd ee ff gg hh ii jj kk";
+            doc.Add(NewTextField("content", contents, Field.Store.NO));
+            bool hitError = false;
+            for (int i = 0; i < 200; i++)
+            {
+                try
+                {
+                    writer.AddDocument(doc);
+                }
+#pragma warning disable 168
+                catch (IOException ioe)
+#pragma warning restore 168
+                {
+                    // only one flush should fail:
+                    Assert.IsFalse(hitError);
+                    hitError = true;
+                }
+            }
+            Assert.IsTrue(hitError);
+            writer.Dispose();
+            IndexReader reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(198, reader.DocFreq(new Term("content", "aa")));
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestDocumentsWriterExceptions()
+        {
+            Analyzer analyzer = new TDWEAnalyzerAnonymousInnerClassHelper(this, Analyzer.PER_FIELD_REUSE_STRATEGY);
+
+            for (int i = 0; i < 2; i++)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: cycle i=" + i);
+                }
+                Directory dir = NewDirectory();
+                IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetMergePolicy(NewLogMergePolicy()));
+
+                // don't allow a sudden merge to clean up the deleted
+                // doc below:
+                LogMergePolicy lmp = (LogMergePolicy)writer.Config.MergePolicy;
+                lmp.MergeFactor = Math.Max(lmp.MergeFactor, 5);
+
+                Document doc = new Document();
+                doc.Add(NewField("contents", "here are some contents", DocCopyIterator.Custom5));
+                writer.AddDocument(doc);
+                writer.AddDocument(doc);
+                doc.Add(NewField("crash", "this should crash after 4 terms", DocCopyIterator.Custom5));
+                doc.Add(NewField("other", "this will not get indexed", DocCopyIterator.Custom5));
+                try
+                {
+                    writer.AddDocument(doc);
+                    Assert.Fail("did not hit expected exception");
+                }
+                catch (IOException ioe)
+                {
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("TEST: hit expected exception");
+                        Console.WriteLine(ioe.StackTrace);
+                    }
+                }
+
+                if (0 == i)
+                {
+                    doc = new Document();
+                    doc.Add(NewField("contents", "here are some contents", DocCopyIterator.Custom5));
+                    writer.AddDocument(doc);
+                    writer.AddDocument(doc);
+                }
+                writer.Dispose();
+
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: open reader");
+                }
+                IndexReader reader = DirectoryReader.Open(dir);
+                if (i == 0)
+                {
+                    int expected = 5;
+                    Assert.AreEqual(expected, reader.DocFreq(new Term("contents", "here")));
+                    Assert.AreEqual(expected, reader.MaxDoc);
+                    int numDel = 0;
+                    IBits liveDocs = MultiFields.GetLiveDocs(reader);
+                    Assert.IsNotNull(liveDocs);
+                    for (int j = 0; j < reader.MaxDoc; j++)
+                    {
+                        if (!liveDocs.Get(j))
+                        {
+                            numDel++;
+                        }
+                        else
+                        {
+                            reader.Document(j);
+                            reader.GetTermVectors(j);
+                        }
+                    }
+                    Assert.AreEqual(1, numDel);
+                }
+                reader.Dispose();
+
+                writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetMaxBufferedDocs(10));
+                doc = new Document();
+                doc.Add(NewField("contents", "here are some contents", DocCopyIterator.Custom5));
+                for (int j = 0; j < 17; j++)
+                {
+                    writer.AddDocument(doc);
+                }
+                writer.ForceMerge(1);
+                writer.Dispose();
+
+                reader = DirectoryReader.Open(dir);
+                int expected_ = 19 + (1 - i) * 2;
+                Assert.AreEqual(expected_, reader.DocFreq(new Term("contents", "here")));
+                Assert.AreEqual(expected_, reader.MaxDoc);
+                int numDel_ = 0;
+                Assert.IsNull(MultiFields.GetLiveDocs(reader));
+                for (int j = 0; j < reader.MaxDoc; j++)
+                {
+                    reader.Document(j);
+                    reader.GetTermVectors(j);
+                }
+                reader.Dispose();
+                Assert.AreEqual(0, numDel_);
+
+                dir.Dispose();
+            }
+        }
+
+        private class TDWEAnalyzerAnonymousInnerClassHelper : Analyzer
+        {
+            private readonly TestIndexWriterExceptions OuterInstance;
+
+            public TDWEAnalyzerAnonymousInnerClassHelper(TestIndexWriterExceptions outerInstance, Analyzer.ReuseStrategy PER_FIELD_REUSE_STRATEGY)
+                : base(PER_FIELD_REUSE_STRATEGY)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                MockTokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
+                tokenizer.EnableChecks = false; // disable workflow checking as we forcefully close() in exceptional cases.
+                return new TokenStreamComponents(tokenizer, new CrashingFilter(OuterInstance, fieldName, tokenizer));
+            }
+        }
+
+        [Test]
+        public virtual void TestDocumentsWriterExceptionThreads()
+        {
+            Analyzer analyzer = new AnalyzerAnonymousInnerClassHelper2(this, Analyzer.PER_FIELD_REUSE_STRATEGY);
+
+            const int NUM_THREAD = 3;
+            const int NUM_ITER = 100;
+
+            for (int i = 0; i < 2; i++)
+            {
+                Directory dir = NewDirectory();
+
+                {
+                    IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetMaxBufferedDocs(-1).SetMergePolicy(Random().NextBoolean() ? NoMergePolicy.COMPOUND_FILES : NoMergePolicy.NO_COMPOUND_FILES));
+                    // don't use a merge policy here they depend on the DWPThreadPool and its max thread states etc.
+                    int finalI = i;
+
+                    ThreadClass[] threads = new ThreadClass[NUM_THREAD];
+                    for (int t = 0; t < NUM_THREAD; t++)
+                    {
+                        threads[t] = new ThreadAnonymousInnerClassHelper(this, NUM_ITER, writer, finalI, t);
+                        threads[t].Start();
+                    }
+
+                    for (int t = 0; t < NUM_THREAD; t++)
+                    {
+                        threads[t].Join();
+                    }
+
+                    writer.Dispose();
+                }
+
+                IndexReader reader = DirectoryReader.Open(dir);
+                int expected = (3 + (1 - i) * 2) * NUM_THREAD * NUM_ITER;
+                Assert.AreEqual(expected, reader.DocFreq(new Term("contents", "here")), "i=" + i);
+                Assert.AreEqual(expected, reader.MaxDoc);
+                int numDel = 0;
+                IBits liveDocs = MultiFields.GetLiveDocs(reader);
+                Assert.IsNotNull(liveDocs);
+                for (int j = 0; j < reader.MaxDoc; j++)
+                {
+                    if (!liveDocs.Get(j))
+                    {
+                        numDel++;
+                    }
+                    else
+                    {
+                        reader.Document(j);
+                        reader.GetTermVectors(j);
+                    }
+                }
+                reader.Dispose();
+
+                Assert.AreEqual(NUM_THREAD * NUM_ITER, numDel);
+
+                IndexWriter indWriter = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetMaxBufferedDocs(10));
+                Document doc = new Document();
+                doc.Add(NewField("contents", "here are some contents", DocCopyIterator.Custom5));
+                for (int j = 0; j < 17; j++)
+                {
+                    indWriter.AddDocument(doc);
+                }
+                indWriter.ForceMerge(1);
+                indWriter.Dispose();
+
+                reader = DirectoryReader.Open(dir);
+                expected += 17 - NUM_THREAD * NUM_ITER;
+                Assert.AreEqual(expected, reader.DocFreq(new Term("contents", "here")));
+                Assert.AreEqual(expected, reader.MaxDoc);
+                Assert.IsNull(MultiFields.GetLiveDocs(reader));
+                for (int j = 0; j < reader.MaxDoc; j++)
+                {
+                    reader.Document(j);
+                    reader.GetTermVectors(j);
+                }
+                reader.Dispose();
+
+                dir.Dispose();
+            }
+        }
+
+        private class AnalyzerAnonymousInnerClassHelper2 : Analyzer
+        {
+            private readonly TestIndexWriterExceptions OuterInstance;
+
+            public AnalyzerAnonymousInnerClassHelper2(TestIndexWriterExceptions outerInstance, Analyzer.ReuseStrategy PER_FIELD_REUSE_STRATEGY)
+                : base(PER_FIELD_REUSE_STRATEGY)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                MockTokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
+                tokenizer.EnableChecks = false; // disable workflow checking as we forcefully close() in exceptional cases.
+                return new TokenStreamComponents(tokenizer, new CrashingFilter(OuterInstance, fieldName, tokenizer));
+            }
+        }
+
+        private class ThreadAnonymousInnerClassHelper : ThreadClass
+        {
+            private readonly TestIndexWriterExceptions OuterInstance;
+
+            private int NUM_ITER;
+            private IndexWriter Writer;
+            private int FinalI;
+            private int t;
+
+            public ThreadAnonymousInnerClassHelper(TestIndexWriterExceptions outerInstance, int NUM_ITER, IndexWriter writer, int finalI, int t)
+            {
+                this.OuterInstance = outerInstance;
+                this.NUM_ITER = NUM_ITER;
+                this.Writer = writer;
+                this.FinalI = finalI;
+                this.t = t;
+            }
+
+            public override void Run()
+            {
+                try
+                {
+                    for (int iter = 0; iter < NUM_ITER; iter++)
+                    {
+                        Document doc = new Document();
+                        doc.Add(OuterInstance.NewField("contents", "here are some contents", DocCopyIterator.Custom5));
+                        Writer.AddDocument(doc);
+                        Writer.AddDocument(doc);
+                        doc.Add(OuterInstance.NewField("crash", "this should crash after 4 terms", DocCopyIterator.Custom5));
+                        doc.Add(OuterInstance.NewField("other", "this will not get indexed", DocCopyIterator.Custom5));
+                        try
+                        {
+                            Writer.AddDocument(doc);
+                            Assert.Fail("did not hit expected exception");
+                        }
+#pragma warning disable 168
+                        catch (IOException ioe)
+#pragma warning restore 168
+                        {
+                        }
+
+                        if (0 == FinalI)
+                        {
+                            doc = new Document();
+                            doc.Add(OuterInstance.NewField("contents", "here are some contents", DocCopyIterator.Custom5));
+                            Writer.AddDocument(doc);
+                            Writer.AddDocument(doc);
+                        }
+                    }
+                }
+                catch (Exception t)
+                {
+                    lock (this)
+                    {
+                        Console.WriteLine(Thread.CurrentThread.Name + ": ERROR: hit unexpected exception");
+                        Console.WriteLine(t.StackTrace);
+                    }
+                    Assert.Fail();
+                }
+            }
+        }
+
+        // Throws IOException during MockDirectoryWrapper.sync
+        private class FailOnlyInSync : MockDirectoryWrapper.Failure
+        {
+            internal bool DidFail;
+
+            public override void Eval(MockDirectoryWrapper dir)
+            {
+                if (DoFail)
+                {
+                    bool foundMethod =
+                        StackTraceHelper.DoesStackTraceContainMethod(typeof(MockDirectoryWrapper).Name, "Sync");
+
+                    if (DoFail && foundMethod)
+                    {
+                        DidFail = true;
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("TEST: now throw exc:");
+                            Console.WriteLine(Environment.StackTrace);
+                        }
+                        throw new IOException("now failing on purpose during sync");
+                    }
+                }
+            }
+        }
+
+        // TODO: these are also in TestIndexWriter... add a simple doc-writing method
+        // like this to LuceneTestCase?
+        private void AddDoc(IndexWriter writer)
+        {
+            Document doc = new Document();
+            doc.Add(NewTextField("content", "aaa", Field.Store.NO));
+            writer.AddDocument(doc);
+        }
+
+        // LUCENE-1044: test exception during sync
+        [Test]
+        public virtual void TestExceptionDuringSync([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            MockDirectoryWrapper dir = NewMockDirectory();
+            FailOnlyInSync failure = new FailOnlyInSync();
+            dir.FailOn(failure);
+
+            var config = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))
+                            .SetMaxBufferedDocs(2)
+                            .SetMergeScheduler(scheduler)
+                            .SetMergePolicy(NewLogMergePolicy(5));
+
+            IndexWriter writer = new IndexWriter(dir, config);
+            failure.SetDoFail();
+
+            for (int i = 0; i < 23; i++)
+            {
+                AddDoc(writer);
+                if ((i - 1) % 2 == 0)
+                {
+                    try
+                    {
+                        writer.Commit();
+                    }
+#pragma warning disable 168
+                    catch (IOException ioe)
+#pragma warning restore 168
+                    {
+                        // expected
+                    }
+                }
+            }
+            ((IConcurrentMergeScheduler)writer.Config.MergeScheduler).Sync();
+            Assert.IsTrue(failure.DidFail);
+            failure.ClearDoFail();
+            writer.Dispose();
+
+            IndexReader reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(23, reader.NumDocs);
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        private class FailOnlyInCommit : MockDirectoryWrapper.Failure
+        {
+            internal bool FailOnCommit, FailOnDeleteFile;
+            internal readonly bool DontFailDuringGlobalFieldMap;
+            internal const string PREPARE_STAGE = "PrepareCommit";
+            internal const string FINISH_STAGE = "FinishCommit";
+            internal readonly string Stage;
+
+            public FailOnlyInCommit(bool dontFailDuringGlobalFieldMap, string stage)
+            {
+                this.DontFailDuringGlobalFieldMap = dontFailDuringGlobalFieldMap;
+                this.Stage = stage;
+            }
+
+            public override void Eval(MockDirectoryWrapper dir)
+            {
+                bool isCommit = StackTraceHelper.DoesStackTraceContainMethod(typeof(SegmentInfos).Name, Stage);
+                bool isDelete = StackTraceHelper.DoesStackTraceContainMethod(typeof(MockDirectoryWrapper).Name, "DeleteFile");
+                bool isInGlobalFieldMap = StackTraceHelper.DoesStackTraceContainMethod(typeof(SegmentInfos).Name, "WriteGlobalFieldMap");
+
+                if (isInGlobalFieldMap && DontFailDuringGlobalFieldMap)
+                {
+                    isCommit = false;
+                }
+                if (isCommit)
+                {
+                    if (!isDelete)
+                    {
+                        FailOnCommit = true;
+                        throw new Exception("now fail first");
+                    }
+                    else
+                    {
+                        FailOnDeleteFile = true;
+                        throw new IOException("now fail during delete");
+                    }
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestExceptionsDuringCommit()
+        {
+            FailOnlyInCommit[] failures = new FailOnlyInCommit[] { new FailOnlyInCommit(false, FailOnlyInCommit.PREPARE_STAGE), new FailOnlyInCommit(true, FailOnlyInCommit.PREPARE_STAGE), new FailOnlyInCommit(false, FailOnlyInCommit.FINISH_STAGE) };
+
+            foreach (FailOnlyInCommit failure in failures)
+            {
+                MockDirectoryWrapper dir = NewMockDirectory();
+                dir.FailOnCreateOutput = false;
+                IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+                Document doc = new Document();
+                doc.Add(NewTextField("field", "a field", Field.Store.YES));
+                w.AddDocument(doc);
+                dir.FailOn(failure);
+                try
+                {
+                    w.Dispose();
+                    Assert.Fail();
+                }
+#pragma warning disable 168
+                catch (IOException ioe)
+#pragma warning restore 168
+                {
+                    Assert.Fail("expected only RuntimeException");
+                }
+#pragma warning disable 168
+                catch (Exception re)
+#pragma warning restore 168
+                {
+                    // Expected
+                }
+                Assert.IsTrue(failure.FailOnCommit && failure.FailOnDeleteFile);
+                w.Rollback();
+                string[] files = dir.ListAll();
+                Assert.IsTrue(files.Length == 0 || Arrays.Equals(files, new string[] { IndexWriter.WRITE_LOCK_NAME }));
+                dir.Dispose();
+            }
+        }
+
+        [Test]
+        public virtual void TestForceMergeExceptions([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            Directory startDir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy());
+            ((LogMergePolicy)conf.MergePolicy).MergeFactor = 100;
+            IndexWriter w = new IndexWriter(startDir, conf);
+            for (int i = 0; i < 27; i++)
+            {
+                AddDoc(w);
+            }
+            w.Dispose();
+
+            int iter = TEST_NIGHTLY ? 200 : 10;
+            for (int i = 0; i < iter; i++)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: iter " + i);
+                }
+                MockDirectoryWrapper dir = new MockDirectoryWrapper(Random(), new RAMDirectory(startDir, NewIOContext(Random())));
+                conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergeScheduler(scheduler);
+                scheduler.SetSuppressExceptions();
+                w = new IndexWriter(dir, conf);
+                dir.RandomIOExceptionRate = 0.5;
+                try
+                {
+                    w.ForceMerge(1);
+                }
+                catch (IOException ioe)
+                {
+                    if (ioe.InnerException == null)
+                    {
+                        Assert.Fail("forceMerge threw IOException without root cause");
+                    }
+                }
+                dir.RandomIOExceptionRate = 0;
+                w.Dispose();
+                dir.Dispose();
+            }
+            startDir.Dispose();
+        }
+
+        // LUCENE-1429
+        [Test]
+        public virtual void TestOutOfMemoryErrorCausesCloseToFail()
+        {
+            AtomicBoolean thrown = new AtomicBoolean(false);
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetInfoStream(new TOOMInfoStreamAnonymousInnerClassHelper(this, thrown)));
+
+            try
+            {
+                writer.Dispose();
+                Assert.Fail("OutOfMemoryError expected");
+            }
+#pragma warning disable 168
+            catch (System.OutOfMemoryException expected)
+#pragma warning restore 168
+            {
+            }
+
+            // throws IllegalStateEx w/o bug fix
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        private class TOOMInfoStreamAnonymousInnerClassHelper : InfoStream
+        {
+            private readonly TestIndexWriterExceptions OuterInstance;
+
+            private AtomicBoolean Thrown;
+
+            public TOOMInfoStreamAnonymousInnerClassHelper(TestIndexWriterExceptions outerInstance, AtomicBoolean thrown)
+            {
+                this.OuterInstance = outerInstance;
+                this.Thrown = thrown;
+            }
+
+            public override void Message(string component, string message)
+            {
+                if (message.StartsWith("now flush at close") && Thrown.CompareAndSet(false, true))
+                {
+                    throw new System.OutOfMemoryException("fake OOME at " + message);
+                }
+            }
+
+            public override bool IsEnabled(string component)
+            {
+                return true;
+            }
+
+            public override void Dispose()
+            {
+            }
+        }
+
+        // LUCENE-1347
+        private sealed class TestPoint4 : RandomIndexWriter.TestPoint
+        {
+            internal bool DoFail;
+
+            public void Apply(string name)
+            {
+                if (DoFail && name.Equals("rollback before checkpoint"))
+                {
+                    throw new Exception("intentionally failing");
+                }
+            }
+        }
+
+        // LUCENE-1347
+        [Test]
+        public virtual void TestRollbackExceptionHang()
+        {
+            Directory dir = NewDirectory();
+            TestPoint4 testPoint = new TestPoint4();
+            IndexWriter w = RandomIndexWriter.MockIndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())), testPoint);
+
+            AddDoc(w);
+            testPoint.DoFail = true;
+            try
+            {
+                w.Rollback();
+                Assert.Fail("did not hit intentional RuntimeException");
+            }
+#pragma warning disable 168
+            catch (Exception re)
+#pragma warning restore 168
+            {
+                // expected
+            }
+
+            testPoint.DoFail = false;
+            w.Rollback();
+            dir.Dispose();
+        }
+
+        // LUCENE-1044: Simulate checksum error in segments_N
+        [Test]
+        public virtual void TestSegmentsChecksumError()
+        {
+            Directory dir = NewDirectory();
+
+            IndexWriter writer = null;
+
+            writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+
+            // add 100 documents
+            for (int i = 0; i < 100; i++)
+            {
+                AddDoc(writer);
+            }
+
+            // close
+            writer.Dispose();
+
+            long gen = SegmentInfos.GetLastCommitGeneration(dir);
+            Assert.IsTrue(gen > 0, "segment generation should be > 0 but got " + gen);
+
+            string segmentsFileName = SegmentInfos.GetLastCommitSegmentsFileName(dir);
+            IndexInput @in = dir.OpenInput(segmentsFileName, NewIOContext(Random()));
+            IndexOutput @out = dir.CreateOutput(IndexFileNames.FileNameFromGeneration(IndexFileNames.SEGMENTS, "", 1 + gen), NewIOContext(Random()));
+            @out.CopyBytes(@in, @in.Length - 1);
+            byte b = @in.ReadByte();
+            @out.WriteByte((byte)(sbyte)(1 + b));
+            @out.Dispose();
+            @in.Dispose();
+
+            IndexReader reader = null;
+            try
+            {
+                reader = DirectoryReader.Open(dir);
+            }
+            catch (IOException e)
+            {
+                Console.WriteLine(e.StackTrace);
+                Assert.Fail("segmentInfos failed to retry fallback to correct segments_N file");
+            }
+            reader.Dispose();
+
+            // should remove the corrumpted segments_N
+            (new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, null))).Dispose();
+            dir.Dispose();
+        }
+
+        // Simulate a corrupt index by removing last byte of
+        // latest segments file and make sure we get an
+        // IOException trying to open the index:
+        [Test]
+        public virtual void TestSimulatedCorruptIndex1()
+        {
+            BaseDirectoryWrapper dir = NewDirectory();
+            dir.CheckIndexOnClose = false; // we are corrupting it!
+
+            IndexWriter writer = null;
+
+            writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+
+            // add 100 documents
+            for (int i = 0; i < 100; i++)
+            {
+                AddDoc(writer);
+            }
+
+            // close
+            writer.Dispose();
+
+            long gen = SegmentInfos.GetLastCommitGeneration(dir);
+            Assert.IsTrue(gen > 0, "segment generation should be > 0 but got " + gen);
+
+            string fileNameIn = SegmentInfos.GetLastCommitSegmentsFileName(dir);
+            string fileNameOut = IndexFileNames.FileNameFromGeneration(IndexFileNames.SEGMENTS, "", 1 + gen);
+            IndexInput @in = dir.OpenInput(fileNameIn, NewIOContext(Random()));
+            IndexOutput @out = dir.CreateOutput(fileNameOut, NewIOContext(Random()));
+            long length = @in.Length;
+            for (int i = 0; i < length - 1; i++)
+            {
+                @out.WriteByte(@in.ReadByte());
+            }
+            @in.Dispose();
+            @out.Dispose();
+            dir.DeleteFile(fileNameIn);
+
+            IndexReader reader = null;
+            try
+            {
+                reader = DirectoryReader.Open(dir);
+                Assert.Fail("reader did not hit IOException on opening a corrupt index");
+            }
+#pragma warning disable 168
+            catch (Exception e)
+#pragma warning restore 168
+            {
+            }
+            if (reader != null)
+            {
+                reader.Dispose();
+            }
+            dir.Dispose();
+        }
+
+        // Simulate a corrupt index by removing one of the cfs
+        // files and make sure we get an IOException trying to
+        // open the index:
+        [Test]
+        public virtual void TestSimulatedCorruptIndex2()
+        {
+            BaseDirectoryWrapper dir = NewDirectory();
+            dir.CheckIndexOnClose = false; // we are corrupting it!
+            IndexWriter writer = null;
+
+            writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy(true)).SetUseCompoundFile(true));
+            MergePolicy lmp = writer.Config.MergePolicy;
+            // Force creation of CFS:
+            lmp.NoCFSRatio = 1.0;
+            lmp.MaxCFSSegmentSizeMB = double.PositiveInfinity;
+
+            // add 100 documents
+            for (int i = 0; i < 100; i++)
+            {
+                AddDoc(writer);
+            }
+
+            // close
+            writer.Dispose();
+
+            long gen = SegmentInfos.GetLastCommitGeneration(dir);
+            Assert.IsTrue(gen > 0, "segment generation should be > 0 but got " + gen);
+
+            string[] files = dir.ListAll();
+            bool corrupted = false;
+            for (int i = 0; i < files.Length; i++)
+            {
+                if (files[i].EndsWith(".cfs"))
+                {
+                    dir.DeleteFile(files[i]);
+                    corrupted = true;
+                    break;
+                }
+            }
+            Assert.IsTrue(corrupted, "failed to find cfs file to remove");
+
+            IndexReader reader = null;
+            try
+            {
+                reader = DirectoryReader.Open(dir);
+                Assert.Fail("reader did not hit IOException on opening a corrupt index");
+            }
+#pragma warning disable 168
+            catch (Exception e)
+#pragma warning restore 168
+            {
+            }
+            if (reader != null)
+            {
+                reader.Dispose();
+            }
+            dir.Dispose();
+        }
+
+        // Simulate a writer that crashed while writing segments
+        // file: make sure we can still open the index (ie,
+        // gracefully fallback to the previous segments file),
+        // and that we can add to the index:
+        [Test]
+        public virtual void TestSimulatedCrashedWriter()
+        {
+            Directory dir = NewDirectory();
+            if (dir is MockDirectoryWrapper)
+            {
+                ((MockDirectoryWrapper)dir).PreventDoubleWrite = false;
+            }
+
+            IndexWriter writer = null;
+
+            writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+
+            // add 100 documents
+            for (int i = 0; i < 100; i++)
+            {
+                AddDoc(writer);
+            }
+
+            // close
+            writer.Dispose();
+
+            long gen = SegmentInfos.GetLastCommitGeneration(dir);
+            Assert.IsTrue(gen > 0, "segment generation should be > 0 but got " + gen);
+
+            // Make the next segments file, with last byte
+            // missing, to simulate a writer that crashed while
+            // writing segments file:
+            string fileNameIn = SegmentInfos.GetLastCommitSegmentsFileName(dir);
+            string fileNameOut = IndexFileNames.FileNameFromGeneration(IndexFileNames.SEGMENTS, "", 1 + gen);
+            IndexInput @in = dir.OpenInput(fileNameIn, NewIOContext(Random()));
+            IndexOutput @out = dir.CreateOutput(fileNameOut, NewIOContext(Random()));
+            long length = @in.Length;
+            for (int i = 0; i < length - 1; i++)
+            {
+                @out.WriteByte(@in.ReadByte());
+            }
+            @in.Dispose();
+            @out.Dispose();
+
+            IndexReader reader = null;
+            try
+            {
+                reader = DirectoryReader.Open(dir);
+            }
+#pragma warning disable 168
+            catch (Exception e)
+#pragma warning restore 168
+            {
+                Assert.Fail("reader failed to open on a crashed index");
+            }
+            reader.Dispose();
+
+            try
+            {
+                writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE));
+            }
+            catch (Exception e)
+            {
+                Console.WriteLine(e.StackTrace);
+                Assert.Fail("writer failed to open on a crashed index");
+            }
+
+            // add 100 documents
+            for (int i = 0; i < 100; i++)
+            {
+                AddDoc(writer);
+            }
+
+            // close
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestTermVectorExceptions()
+        {
+            FailOnTermVectors[] failures = new FailOnTermVectors[] { new FailOnTermVectors(FailOnTermVectors.AFTER_INIT_STAGE), new FailOnTermVectors(FailOnTermVectors.INIT_STAGE) };
+            int num = AtLeast(1);
+            for (int j = 0; j < num; j++)
+            {
+                foreach (FailOnTermVectors failure in failures)
+                {
+                    MockDirectoryWrapper dir = NewMockDirectory();
+                    IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+                    dir.FailOn(failure);
+                    int numDocs = 10 + Random().Next(30);
+                    for (int i = 0; i < numDocs; i++)
+                    {
+                        Document doc = new Document();
+                        Field field = NewTextField(Random(), "field", "a field", Field.Store.YES);
+                        doc.Add(field);
+                        // random TV
+                        try
+                        {
+                            w.AddDocument(doc);
+                            Assert.IsFalse(field.FieldType.StoreTermVectors);
+                        }
+                        catch (Exception e)
+                        {
+                            Assert.IsTrue(e.Message.StartsWith(FailOnTermVectors.EXC_MSG));
+                        }
+                        if (Random().Next(20) == 0)
+                        {
+                            w.Commit();
+                            TestUtil.CheckIndex(dir);
+                        }
+                    }
+                    Document document = new Document();
+                    document.Add(new TextField("field", "a field", Field.Store.YES));
+                    w.AddDocument(document);
+
+                    for (int i = 0; i < numDocs; i++)
+                    {
+                        Document doc = new Document();
+                        Field field = NewTextField(Random(), "field", "a field", Field.Store.YES);
+                        doc.Add(field);
+                        // random TV
+                        try
+                        {
+                            w.AddDocument(doc);
+                            Assert.IsFalse(field.FieldType.StoreTermVectors);
+                        }
+                        catch (Exception e)
+                        {
+                            Assert.IsTrue(e.Message.StartsWith(FailOnTermVectors.EXC_MSG));
+                        }
+                        if (Random().Next(20) == 0)
+                        {
+                            w.Commit();
+                            TestUtil.CheckIndex(dir);
+                        }
+                    }
+                    document = new Document();
+                    document.Add(new TextField("field", "a field", Field.Store.YES));
+                    w.AddDocument(document);
+                    w.Dispose();
+                    IndexReader reader = DirectoryReader.Open(dir);
+                    Assert.IsTrue(reader.NumDocs > 0);
+                    SegmentInfos sis = new SegmentInfos();
+                    sis.Read(dir);
+                    foreach (AtomicReaderContext context in reader.Leaves)
+                    {
+                        Assert.IsFalse((context.AtomicReader).FieldInfos.HasVectors);
+                    }
+                    reader.Dispose();
+                    dir.Dispose();
+                }
+            }
+        }
+
+        private class FailOnTermVectors : MockDirectoryWrapper.Failure
+        {
+            internal const string INIT_STAGE = "InitTermVectorsWriter";
+            internal const string AFTER_INIT_STAGE = "FinishDocument";
+            internal const string EXC_MSG = "FOTV";
+            internal readonly string Stage;
+
+            public FailOnTermVectors(string stage)
+            {
+                this.Stage = stage;
+            }
+
+            public override void Eval(MockDirectoryWrapper dir)
+            {
+                bool fail = StackTraceHelper.DoesStackTraceContainMethod(typeof(TermVectorsConsumer).Name, Stage);
+
+                if (fail)
+                {
+                    throw new Exception(EXC_MSG);
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestAddDocsNonAbortingException()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            int numDocs1 = Random().Next(25);
+            for (int docCount = 0; docCount < numDocs1; docCount++)
+            {
+                Document doc = new Document();
+                doc.Add(NewTextField("content", "good content", Field.Store.NO));
+                w.AddDocument(doc);
+            }
+
+            IList<Document> docs = new List<Document>();
+            for (int docCount = 0; docCount < 7; docCount++)
+            {
+                Document doc = new Document();
+                docs.Add(doc);
+                doc.Add(NewStringField("id", docCount + "", Field.Store.NO));
+                doc.Add(NewTextField("content", "silly content " + docCount, Field.Store.NO));
+                if (docCount == 4)
+                {
+                    Field f = NewTextField("crash", "", Field.Store.NO);
+                    doc.Add(f);
+                    MockTokenizer tokenizer = new MockTokenizer(new StringReader("crash me on the 4th token"), MockTokenizer.WHITESPACE, false);
+                    tokenizer.EnableChecks = false; // disable workflow checking as we forcefully close() in exceptional cases.
+                    f.SetTokenStream(new CrashingFilter(this, "crash", tokenizer));
+                }
+            }
+            try
+            {
+                w.AddDocuments(docs);
+                // BUG: CrashingFilter didn't
+                Assert.Fail("did not hit expected exception");
+            }
+            catch (IOException ioe)
+            {
+                // expected
+                Assert.AreEqual(CRASH_FAIL_MESSAGE, ioe.Message);
+            }
+
+            int numDocs2 = Random().Next(25);
+            for (int docCount = 0; docCount < numDocs2; docCount++)
+            {
+                Document doc = new Document();
+                doc.Add(NewTextField("content", "good content", Field.Store.NO));
+                w.AddDocument(doc);
+            }
+
+            IndexReader r = w.Reader;
+            w.Dispose();
+
+            IndexSearcher s = NewSearcher(r);
+            PhraseQuery pq = new PhraseQuery();
+            pq.Add(new Term("content", "silly"));
+            pq.Add(new Term("content", "content"));
+            Assert.AreEqual(0, s.Search(pq, 1).TotalHits);
+
+            pq = new PhraseQuery();
+            pq.Add(new Term("content", "good"));
+            pq.Add(new Term("content", "content"));
+            Assert.AreEqual(numDocs1 + numDocs2, s.Search(pq, 1).TotalHits);
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestUpdateDocsNonAbortingException()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            int numDocs1 = Random().Next(25);
+            for (int docCount = 0; docCount < numDocs1; docCount++)
+            {
+                Document doc = new Document();
+                doc.Add(NewTextField("content", "good content", Field.Store.NO));
+                w.AddDocument(doc);
+            }
+
+            // Use addDocs (no exception) to get docs in the index:
+            IList<Document> docs = new List<Document>();
+            int numDocs2 = Random().Next(25);
+            for (int docCount = 0; docCount < numDocs2; docCount++)
+            {
+                Document doc = new Document();
+                docs.Add(doc);
+                doc.Add(NewStringField("subid", "subs", Field.Store.NO));
+                doc.Add(NewStringField("id", docCount + "", Field.Store.NO));
+                doc.Add(NewTextField("content", "silly content " + docCount, Field.Store.NO));
+            }
+            w.AddDocuments(docs);
+
+            int numDocs3 = Random().Next(25);
+            for (int docCount = 0; docCount < numDocs3; docCount++)
+            {
+                Document doc = new Document();
+                doc.Add(NewTextField("content", "good content", Field.Store.NO));
+                w.AddDocument(doc);
+            }
+
+            docs.Clear();
+            int limit = TestUtil.NextInt(Random(), 2, 25);
+            int crashAt = Random().Next(limit);
+            for (int docCount = 0; docCount < limit; docCount++)
+            {
+                Document doc = new Document();
+                docs.Add(doc);
+                doc.Add(NewStringField("id", docCount + "", Field.Store.NO));
+                doc.Add(NewTextField("content", "silly content " + docCount, Field.Store.NO));
+                if (docCount == crashAt)
+                {
+                    Field f = NewTextField("crash", "", Field.Store.NO);
+                    doc.Add(f);
+                    MockTokenizer tokenizer = new MockTokenizer(new StringReader("crash me on the 4th token"), MockTokenizer.WHITESPACE, false);
+                    tokenizer.EnableChecks = false; // disable workflow checking as we forcefully close() in exceptional cases.
+                    f.SetTokenStream(new CrashingFilter(this, "crash", tokenizer));
+                }
+            }
+
+            try
+            {
+                w.UpdateDocuments(new Term("subid", "subs"), docs);
+                // BUG: CrashingFilter didn't
+                Assert.Fail("did not hit expected exception");
+            }
+            catch (IOException ioe)
+            {
+                // expected
+                Assert.AreEqual(CRASH_FAIL_MESSAGE, ioe.Message);
+            }
+
+            int numDocs4 = Random().Next(25);
+            for (int docCount = 0; docCount < numDocs4; docCount++)
+            {
+                Document doc = new Document();
+                doc.Add(NewTextField("content", "good content", Field.Store.NO));
+                w.AddDocument(doc);
+            }
+
+            IndexReader r = w.Reader;
+            w.Dispose();
+
+            IndexSearcher s = NewSearcher(r);
+            PhraseQuery pq = new PhraseQuery();
+            pq.Add(new Term("content", "silly"));
+            pq.Add(new Term("content", "content"));
+            Assert.AreEqual(numDocs2, s.Search(pq, 1).TotalHits);
+
+            pq = new PhraseQuery();
+            pq.Add(new Term("content", "good"));
+            pq.Add(new Term("content", "content"));
+            Assert.AreEqual(numDocs1 + numDocs3 + numDocs4, s.Search(pq, 1).TotalHits);
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        internal class UOEDirectory : RAMDirectory
+        {
+            internal bool DoFail = false;
+
+            public override IndexInput OpenInput(string name, IOContext context)
+            {
+                if (DoFail
+                    && name.StartsWith("segments_")
+                    && StackTraceHelper.DoesStackTraceContainMethod("Read"))
+                {
+                    throw new NotSupportedException("expected UOE");
+                }
+
+                return base.OpenInput(name, context);
+            }
+        }
+
+        [Test]
+        public virtual void TestExceptionOnCtor()
+        {
+            UOEDirectory uoe = new UOEDirectory();
+            Directory d = new MockDirectoryWrapper(Random(), uoe);
+            IndexWriter iw = new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, null));
+            iw.AddDocument(new Document());
+            iw.Dispose();
+            uoe.DoFail = true;
+            try
+            {
+                new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, null));
+                Assert.Fail("should have gotten a UOE");
+            }
+#pragma warning disable 168
+            catch (System.NotSupportedException expected)
+#pragma warning restore 168
+            {
+            }
+
+            uoe.DoFail = false;
+            d.Dispose();
+        }
+
+        [Test]
+        public virtual void TestIllegalPositions()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter iw = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, null));
+            Document doc = new Document();
+            Token t1 = new Token("foo", 0, 3);
+            t1.PositionIncrement = int.MaxValue;
+            Token t2 = new Token("bar", 4, 7);
+            t2.PositionIncrement = 200;
+            TokenStream overflowingTokenStream = new CannedTokenStream(new Token[] { t1, t2 });
+            Field field = new TextField("foo", overflowingTokenStream);
+            doc.Add(field);
+            try
+            {
+                iw.AddDocument(doc);
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException expected)
+#pragma warning restore 168
+            {
+                // expected exception
+            }
+            iw.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestLegalbutVeryLargePositions()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter iw = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, null));
+            Document doc = new Document();
+            Token t1 = new Token("foo", 0, 3);
+            t1.PositionIncrement = int.MaxValue - 500;
+            if (Random().NextBoolean())
+            {
+                t1.Payload = new BytesRef(new byte[] { 0x1 });
+            }
+            TokenStream overflowingTokenStream = new CannedTokenStream(new Token[] { t1 });
+            Field field = new TextField("foo", overflowingTokenStream);
+            doc.Add(field);
+            iw.AddDocument(doc);
+            iw.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestBoostOmitNorms()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            iwc.SetMergePolicy(NewLogMergePolicy());
+            IndexWriter iw = new IndexWriter(dir, iwc);
+            Document doc = new Document();
+            doc.Add(new StringField("field1", "sometext", Field.Store.YES));
+            doc.Add(new TextField("field2", "sometext", Field.Store.NO));
+            doc.Add(new StringField("foo", "bar", Field.Store.NO));
+            iw.AddDocument(doc); // add an 'ok' document
+            try
+            {
+                doc = new Document();
+                // try to boost with norms omitted
+                IList<IIndexableField> list = new List<IIndexableField>();
+                list.Add(new IndexableFieldAnonymousInnerClassHelper(this));
+                iw.AddDocument(list);
+                Assert.Fail("didn't get any exception, boost silently discarded");
+            }
+#pragma warning disable 168
+            catch (System.NotSupportedException expected)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            DirectoryReader ir = DirectoryReader.Open(iw, false);
+            Assert.AreEqual(1, ir.NumDocs);
+            Assert.AreEqual("sometext", ir.Document(0).Get("field1"));
+            ir.Dispose();
+            iw.Dispose();
+            dir.Dispose();
+        }
+
+        private class IndexableFieldAnonymousInnerClassHelper : IIndexableField
+        {
+            private readonly TestIndexWriterExceptions OuterInstance;
+
+            public IndexableFieldAnonymousInnerClassHelper(TestIndexWriterExceptions outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            public string Name
+            {
+                get { return "foo"; }
+            }
+
+            public IIndexableFieldType FieldType
+            {
+                get { return StringField.TYPE_NOT_STORED; }
+            }
+
+            public float Boost
+            {
+                get { return 5f; }
+            }
+
+            public BytesRef GetBinaryValue()
+            {
+                return null;
+            }
+
+            public string GetStringValue()
+            {
+                return "baz";
+            }
+
+            public TextReader GetReaderValue()
+            {
+                return null;
+            }
+
+            public object GetNumericValue()
+            {
+                return null;
+            }
+
+            public TokenStream GetTokenStream(Analyzer analyzer)
+            {
+                return null;
+            }
+        }
+
+        // See LUCENE-4870 TooManyOpenFiles errors are thrown as
+        // FNFExceptions which can trigger data loss.
+        [Test]
+        public virtual void TestTooManyFileException()
+        {
+            // Create failure that throws Too many open files exception randomly
+            MockDirectoryWrapper.Failure failure = new FailureAnonymousInnerClassHelper(this);
+
+            MockDirectoryWrapper dir = NewMockDirectory();
+            // The exception is only thrown on open input
+            dir.FailOnOpenInput = true;
+            dir.FailOn(failure);
+
+            // Create an index with one document
+            IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter iw = new IndexWriter(dir, iwc);
+            Document doc = new Document();
+            doc.Add(new StringField("foo", "bar", Field.Store.NO));
+            iw.AddDocument(doc); // add a document
+            iw.Commit();
+            DirectoryReader ir = DirectoryReader.Open(dir);
+            Assert.AreEqual(1, ir.NumDocs);
+            ir.Dispose();
+            iw.Dispose();
+
+            // Open and close the index a few times
+            for (int i = 0; i < 10; i++)
+            {
+                failure.SetDoFail();
+                iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+                try
+                {
+                    iw = new IndexWriter(dir, iwc);
+                }
+#pragma warning disable 168
+                catch (CorruptIndexException ex)
+#pragma warning restore 168
+                {
+                    // Exceptions are fine - we are running out of file handlers here
+                    continue;
+                }
+#pragma warning disable 168
+                catch (FileNotFoundException/* | NoSuchFileException*/ ex)
+#pragma warning restore 168
+                {
+                    continue;
+                }
+                failure.ClearDoFail();
+                iw.Dispose();
+                ir = DirectoryReader.Open(dir);
+                Assert.AreEqual(1, ir.NumDocs, "lost document after iteration: " + i);
+                ir.Dispose();
+            }
+
+            // Check if document is still there
+            failure.ClearDoFail();
+            ir = DirectoryReader.Open(dir);
+            Assert.AreEqual(1, ir.NumDocs);
+            ir.Dispose();
+
+            dir.Dispose();
+        }
+
+        private class FailureAnonymousInnerClassHelper : MockDirectoryWrapper.Failure
+        {
+            private readonly TestIndexWriterExceptions OuterInstance;
+
+            public FailureAnonymousInnerClassHelper(TestIndexWriterExceptions outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            public override MockDirectoryWrapper.Failure Reset()
+            {
+                DoFail = false;
+                return this;
+            }
+
+            public override void Eval(MockDirectoryWrapper dir)
+            {
+                if (DoFail)
+                {
+                    if (Random().NextBoolean())
+                    {
+                        throw new FileNotFoundException("some/file/name.ext (Too many open files)");
+                    }
+                }
+            }
+        }
+
+        // Make sure if we hit a transient IOException (e.g., disk
+        // full), and then the exception stops (e.g., disk frees
+        // up), so we successfully close IW or open an NRT
+        // reader, we don't lose any deletes or updates:
+        [Test]
+        public virtual void TestNoLostDeletesOrUpdates()
+        {
+            int deleteCount = 0;
+            int docBase = 0;
+            int docCount = 0;
+
+            MockDirectoryWrapper dir = NewMockDirectory();
+            AtomicBoolean shouldFail = new AtomicBoolean();
+            dir.FailOn(new FailureAnonymousInnerClassHelper2(shouldFail));
+
+            RandomIndexWriter w = null;
+
+            for (int iter = 0; iter < 10 * RANDOM_MULTIPLIER; iter++)
+            {
+                int numDocs = AtLeast(100);
+                if (VERBOSE)
+                {
+                    Console.WriteLine("\nTEST: iter=" + iter + " numDocs=" + numDocs + ".DocBase=" + docBase + " delCount=" + deleteCount);
+                }
+                if (w == null)
+                {
+                    IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+                    IMergeScheduler ms = iwc.MergeScheduler;
+                    if (ms is IConcurrentMergeScheduler)
+                    {
+                        IConcurrentMergeScheduler suppressFakeIOE = new ConcurrentMergeSchedulerAnonymousInnerClassHelper(this);
+
+                        IConcurrentMergeScheduler cms = (IConcurrentMergeScheduler)ms;
+                        suppressFakeIOE.SetMaxMergesAndThreads(cms.MaxMergeCount, cms.MaxThreadCount);
+                        suppressFakeIOE.SetMergeThreadPriority(cms.MergeThreadPriority);
+                        iwc.SetMergeScheduler(suppressFakeIOE);
+                    }
+
+                    w = new RandomIndexWriter(Random(), dir, iwc);
+                    // Since we hit exc during merging, a partial
+                    // forceMerge can easily return when there are still
+                    // too many segments in the index:
+                    w.DoRandomForceMergeAssert = false;
+                }
+                for (int i = 0; i < numDocs; i++)
+                {
+                    Document doc = new Document();
+                    doc.Add(new StringField("id", (docBase + i).ToString(), Field.Store.NO));
+                    if (DefaultCodecSupportsDocValues())
+                    {
+                        doc.Add(new NumericDocValuesField("f", 1L));
+                        doc.Add(new NumericDocValuesField("cf", 2L));
+                        doc.Add(new BinaryDocValuesField("bf", TestBinaryDocValuesUpdates.ToBytes(1L)));
+                        doc.Add(new BinaryDocValuesField("bcf", TestBinaryDocValuesUpdates.ToBytes(2L)));
+                    }
+                    w.AddDocument(doc);
+                }
+                docCount += numDocs;
+
+                // TODO: we could make the test more evil, by letting
+                // it throw more than one exc, randomly, before "recovering"
+
+                // TODO: we could also install an infoStream and try
+                // to fail in "more evil" places inside BDS
+
+                shouldFail.Set(true);
+                bool doClose = false;
+
+                try
+                {
+                    bool defaultCodecSupportsFieldUpdates = DefaultCodecSupportsFieldUpdates();
+                    for (int i = 0; i < numDocs; i++)
+                    {
+                        if (Random().Next(10) == 7)
+                        {
+                            bool fieldUpdate = defaultCodecSupportsFieldUpdates && Random().NextBoolean();
+                            if (fieldUpdate)
+                            {
+                                long value = iter;
+                                if (VERBOSE)
+                                {
+                                    Console.WriteLine("  update id=" + docBase + i + " to value " + value);
+                                }
+                                if (Random().NextBoolean()) // update only numeric field
+                                {
+                                    w.UpdateNumericDocValue(new Term("id", (docBase + i).ToString()), "f", value);
+                                    w.UpdateNumericDocValue(new Term("id", (docBase + i).ToString()), "cf", value * 2);
+                                }
+                                else if (Random().NextBoolean())
+                                {
+                                    w.UpdateBinaryDocValue(new Term("id", (docBase + i).ToString()), "bf", TestBinaryDocValuesUpdates.ToBytes(value));
+                                    w.UpdateBinaryDocValue(new Term("id", (docBase + i).ToString()), "bcf", TestBinaryDocValuesUpdates.ToBytes(value * 2));
+                                }
+                                else
+                                {
+                                    w.UpdateNumericDocValue(new Term("id", (docBase + i).ToString()), "f", value);
+                                    w.UpdateNumericDocValue(new Term("id", (docBase + i).ToString()), "cf", value * 2);
+                                    w.UpdateBinaryDocValue(new Term("id", (docBase + i).ToString()), "bf", TestBinaryDocValuesUpdates.ToBytes(value));
+                                    w.UpdateBinaryDocValue(new Term("id", (docBase + i).ToString()), "bcf", TestBinaryDocValuesUpdates.ToBytes(value * 2));
+                                }
+                            }
+
+                            // sometimes do both deletes and updates
+                            if (!fieldUpdate || Random().NextBoolean())
+                            {
+                                if (VERBOSE)
+                                {
+                                    Console.WriteLine("  delete id=" + (docBase + i).ToString());
+                                }
+                                deleteCount++;
+                                w.DeleteDocuments(new Term("id", "" + (docBase + i).ToString()));
+                            }
+                        }
+                    }
+
+                    // Trigger writeLiveDocs so we hit fake exc:
+                    IndexReader r = w.GetReader(true);
+
+                    // Sometimes we will make it here (we only randomly
+                    // throw the exc):
+                    Assert.AreEqual(docCount - deleteCount, r.NumDocs);
+        

<TRUNCATED>

[28/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestIndexWriterForceMerge.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterForceMerge.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterForceMerge.cs
new file mode 100644
index 0000000..62de270
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestIndexWriterForceMerge.cs
@@ -0,0 +1,260 @@
+using System;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    [TestFixture]
+    public class TestIndexWriterForceMerge : LuceneTestCase
+    {
+        private static readonly FieldType StoredTextType = new FieldType(TextField.TYPE_NOT_STORED);
+
+        [Test]
+        public virtual void TestPartialMerge()
+        {
+            Directory dir = NewDirectory();
+
+            Document doc = new Document();
+            doc.Add(NewStringField("content", "aaa", Field.Store.NO));
+            int incrMin = TEST_NIGHTLY ? 15 : 40;
+            for (int numDocs = 10; numDocs < 500; numDocs += TestUtil.NextInt(Random(), incrMin, 5 * incrMin))
+            {
+                LogDocMergePolicy ldmp = new LogDocMergePolicy();
+                ldmp.MinMergeDocs = 1;
+                ldmp.MergeFactor = 5;
+                IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(2).SetMergePolicy(ldmp));
+                for (int j = 0; j < numDocs; j++)
+                {
+                    writer.AddDocument(doc);
+                }
+                writer.Dispose();
+
+                SegmentInfos sis = new SegmentInfos();
+                sis.Read(dir);
+                int segCount = sis.Count;
+
+                ldmp = new LogDocMergePolicy();
+                ldmp.MergeFactor = 5;
+                writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(ldmp));
+                writer.ForceMerge(3);
+                writer.Dispose();
+
+                sis = new SegmentInfos();
+                sis.Read(dir);
+                int optSegCount = sis.Count;
+
+                if (segCount < 3)
+                {
+                    Assert.AreEqual(segCount, optSegCount);
+                }
+                else
+                {
+                    Assert.AreEqual(3, optSegCount);
+                }
+            }
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestMaxNumSegments2([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            Directory dir = NewDirectory();
+
+            Document doc = new Document();
+            doc.Add(NewStringField("content", "aaa", Field.Store.NO));
+
+            LogDocMergePolicy ldmp = new LogDocMergePolicy();
+            ldmp.MinMergeDocs = 1;
+            ldmp.MergeFactor = 4;
+            var config = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))
+                            .SetMaxBufferedDocs(2)
+                            .SetMergePolicy(ldmp)
+                            .SetMergeScheduler(scheduler);
+            IndexWriter writer = new IndexWriter(dir, config);
+
+            for (int iter = 0; iter < 10; iter++)
+            {
+                for (int i = 0; i < 19; i++)
+                {
+                    writer.AddDocument(doc);
+                }
+
+                writer.Commit();
+                writer.WaitForMerges();
+                writer.Commit();
+
+                SegmentInfos sis = new SegmentInfos();
+                sis.Read(dir);
+
+                int segCount = sis.Count;
+                writer.ForceMerge(7);
+                writer.Commit();
+                writer.WaitForMerges();
+
+                sis = new SegmentInfos();
+                sis.Read(dir);
+                int optSegCount = sis.Count;
+
+                if (segCount < 7)
+                {
+                    Assert.AreEqual(segCount, optSegCount);
+                }
+                else
+                {
+                    Assert.AreEqual(7, optSegCount, "seg: " + segCount);
+                }
+            }
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        /// <summary>
+        /// Make sure forceMerge doesn't use any more than 1X
+        /// starting index size as its temporary free space
+        /// required.
+        /// </summary>
+        [Test]
+        public virtual void TestForceMergeTempSpaceUsage()
+        {
+            MockDirectoryWrapper dir = NewMockDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(10).SetMergePolicy(NewLogMergePolicy()));
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: config1=" + writer.Config);
+            }
+
+            for (int j = 0; j < 500; j++)
+            {
+                AddDocWithIndex(writer, j);
+            }
+            int termIndexInterval = writer.Config.TermIndexInterval;
+            // force one extra segment w/ different doc store so
+            // we see the doc stores get merged
+            writer.Commit();
+            AddDocWithIndex(writer, 500);
+            writer.Dispose();
+
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: start disk usage");
+            }
+            long startDiskUsage = 0;
+            string[] files = dir.ListAll();
+            for (int i = 0; i < files.Length; i++)
+            {
+                startDiskUsage += dir.FileLength(files[i]);
+                if (VERBOSE)
+                {
+                    Console.WriteLine(files[i] + ": " + dir.FileLength(files[i]));
+                }
+            }
+
+            dir.ResetMaxUsedSizeInBytes();
+            dir.TrackDiskUsage = true;
+
+            // Import to use same term index interval else a
+            // smaller one here could increase the disk usage and
+            // cause a false failure:
+            writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND).SetTermIndexInterval(termIndexInterval).SetMergePolicy(NewLogMergePolicy()));
+            writer.ForceMerge(1);
+            writer.Dispose();
+            long maxDiskUsage = dir.MaxUsedSizeInBytes;
+            Assert.IsTrue(maxDiskUsage <= 4 * startDiskUsage, "forceMerge used too much temporary space: starting usage was " + startDiskUsage + " bytes; max temp usage was " + maxDiskUsage + " but should have been " + (4 * startDiskUsage) + " (= 4X starting usage)");
+            dir.Dispose();
+        }
+
+        // Test calling forceMerge(1, false) whereby forceMerge is kicked
+        // off but we don't wait for it to finish (but
+        // writer.Dispose()) does wait
+        [Test]
+        public virtual void TestBackgroundForceMerge()
+        {
+            Directory dir = NewDirectory();
+            for (int pass = 0; pass < 2; pass++)
+            {
+                IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy(51)));
+                Document doc = new Document();
+                doc.Add(NewStringField("field", "aaa", Field.Store.NO));
+                for (int i = 0; i < 100; i++)
+                {
+                    writer.AddDocument(doc);
+                }
+                writer.ForceMerge(1, false);
+
+                if (0 == pass)
+                {
+                    writer.Dispose();
+                    DirectoryReader reader = DirectoryReader.Open(dir);
+                    Assert.AreEqual(1, reader.Leaves.Count);
+                    reader.Dispose();
+                }
+                else
+                {
+                    // Get another segment to flush so we can verify it is
+                    // NOT included in the merging
+                    writer.AddDocument(doc);
+                    writer.AddDocument(doc);
+                    writer.Dispose();
+
+                    DirectoryReader reader = DirectoryReader.Open(dir);
+                    Assert.IsTrue(reader.Leaves.Count > 1);
+                    reader.Dispose();
+
+                    SegmentInfos infos = new SegmentInfos();
+                    infos.Read(dir);
+                    Assert.AreEqual(2, infos.Count);
+                }
+            }
+
+            dir.Dispose();
+        }
+
+        /// <summary>
+        /// LUCENENET specific
+        ///
+        /// Copied from <seealso cref="TestIndexWriter.AddDoc(IndexWriter)"/>
+        /// to remove inter-class dependency on TestIndexWriter.
+        /// </summary>
+        private void AddDoc(IndexWriter writer)
+        {
+            Document doc = new Document();
+            doc.Add(NewTextField("content", "aaa", Field.Store.NO));
+            writer.AddDocument(doc);
+        }
+
+        private void AddDocWithIndex(IndexWriter writer, int index)
+        {
+            Document doc = new Document();
+            doc.Add(NewField("content", "aaa " + index, StoredTextType));
+            doc.Add(NewField("id", "" + index, StoredTextType));
+            writer.AddDocument(doc);
+        }
+
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestIndexWriterLockRelease.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterLockRelease.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterLockRelease.cs
new file mode 100644
index 0000000..c0705f0
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestIndexWriterLockRelease.cs
@@ -0,0 +1,64 @@
+using NUnit.Framework;
+using System.IO;
+
+namespace Lucene.Net.Index
+{
+    using Directory = Lucene.Net.Store.Directory;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+
+    /// <summary>
+    /// this tests the patch for issue #LUCENE-715 (IndexWriter does not
+    /// release its write lock when trying to open an index which does not yet
+    /// exist).
+    /// </summary>
+    [TestFixture]
+    public class TestIndexWriterLockRelease : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestIndexWriterLockRelease_Mem()
+        {
+            Directory dir = NewFSDirectory(CreateTempDir("testLockRelease"));
+            try
+            {
+                new IndexWriter(dir, (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetOpenMode(OpenMode.APPEND));
+            }
+#pragma warning disable 168
+            catch (FileNotFoundException /*| NoSuchFileException*/ e)
+#pragma warning restore 168
+            {
+                try
+                {
+                    new IndexWriter(dir, (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetOpenMode(OpenMode.APPEND));
+                }
+#pragma warning disable 168
+                catch (FileNotFoundException /*| NoSuchFileException*/ e1)
+#pragma warning restore 168
+                {
+                }
+            }
+            finally
+            {
+                dir.Dispose();
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestIndexWriterMergePolicy.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterMergePolicy.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterMergePolicy.cs
new file mode 100644
index 0000000..a6b95d7
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestIndexWriterMergePolicy.cs
@@ -0,0 +1,311 @@
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+
+    [TestFixture]
+    public class TestIndexWriterMergePolicy : LuceneTestCase
+    {
+        // Test the normal case
+        [Test]
+        public virtual void TestNormalCase()
+        {
+            Directory dir = NewDirectory();
+
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(10).SetMergePolicy(new LogDocMergePolicy()));
+
+            for (int i = 0; i < 100; i++)
+            {
+                AddDoc(writer);
+                CheckInvariants(writer);
+            }
+
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        // Test to see if there is over merge
+        [Test]
+        public virtual void TestNoOverMerge()
+        {
+            Directory dir = NewDirectory();
+
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(10).SetMergePolicy(new LogDocMergePolicy()));
+
+            bool noOverMerge = false;
+            for (int i = 0; i < 100; i++)
+            {
+                AddDoc(writer);
+                CheckInvariants(writer);
+                if (writer.NumBufferedDocuments + writer.SegmentCount >= 18)
+                {
+                    noOverMerge = true;
+                }
+            }
+            Assert.IsTrue(noOverMerge);
+
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        // Test the case where flush is forced after every addDoc
+        [Test]
+        public virtual void TestForceFlush()
+        {
+            Directory dir = NewDirectory();
+
+            LogDocMergePolicy mp = new LogDocMergePolicy();
+            mp.MinMergeDocs = 100;
+            mp.MergeFactor = 10;
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(10).SetMergePolicy(mp));
+
+            for (int i = 0; i < 100; i++)
+            {
+                AddDoc(writer);
+                writer.Dispose();
+
+                mp = new LogDocMergePolicy();
+                mp.MergeFactor = 10;
+                writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(10).SetMergePolicy(mp));
+                mp.MinMergeDocs = 100;
+                CheckInvariants(writer);
+            }
+
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        // Test the case where mergeFactor changes
+        [Test]
+        public virtual void TestMergeFactorChange()
+        {
+            Directory dir = NewDirectory();
+
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(10).SetMergePolicy(NewLogMergePolicy()).SetMergeScheduler(new SerialMergeScheduler()));
+
+            for (int i = 0; i < 250; i++)
+            {
+                AddDoc(writer);
+                CheckInvariants(writer);
+            }
+
+            ((LogMergePolicy)writer.Config.MergePolicy).MergeFactor = 5;
+
+            // merge policy only fixes segments on levels where merges
+            // have been triggered, so check invariants after all adds
+            for (int i = 0; i < 10; i++)
+            {
+                AddDoc(writer);
+            }
+            CheckInvariants(writer);
+
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        // Test the case where both mergeFactor and maxBufferedDocs change
+        [Test]
+        public virtual void TestMaxBufferedDocsChange()
+        {
+            Directory dir = NewDirectory();
+
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(101).SetMergePolicy(new LogDocMergePolicy()).SetMergeScheduler(new SerialMergeScheduler()));
+
+            // leftmost* segment has 1 doc
+            // rightmost* segment has 100 docs
+            for (int i = 1; i <= 100; i++)
+            {
+                for (int j = 0; j < i; j++)
+                {
+                    AddDoc(writer);
+                    CheckInvariants(writer);
+                }
+                writer.Dispose();
+
+                writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(101).SetMergePolicy(new LogDocMergePolicy()).SetMergeScheduler(new SerialMergeScheduler()));
+            }
+
+            writer.Dispose();
+            LogDocMergePolicy ldmp = new LogDocMergePolicy();
+            ldmp.MergeFactor = 10;
+            writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(10).SetMergePolicy(ldmp).SetMergeScheduler(new SerialMergeScheduler()));
+
+            // merge policy only fixes segments on levels where merges
+            // have been triggered, so check invariants after all adds
+            for (int i = 0; i < 100; i++)
+            {
+                AddDoc(writer);
+            }
+            CheckInvariants(writer);
+
+            for (int i = 100; i < 1000; i++)
+            {
+                AddDoc(writer);
+            }
+            writer.Commit();
+            writer.WaitForMerges();
+            writer.Commit();
+            CheckInvariants(writer);
+
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        // Test the case where a merge results in no doc at all
+        [Test]
+        public virtual void TestMergeDocCount0([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            Directory dir = NewDirectory();
+
+            LogDocMergePolicy ldmp = new LogDocMergePolicy();
+            ldmp.MergeFactor = 100;
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(10).SetMergePolicy(ldmp));
+
+            for (int i = 0; i < 250; i++)
+            {
+                AddDoc(writer);
+                CheckInvariants(writer);
+            }
+            writer.Dispose();
+
+            // delete some docs without merging
+            writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NoMergePolicy.NO_COMPOUND_FILES));
+            writer.DeleteDocuments(new Term("content", "aaa"));
+            writer.Dispose();
+
+            ldmp = new LogDocMergePolicy();
+            ldmp.MergeFactor = 5;
+            var config = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))
+                .SetOpenMode(OpenMode.APPEND)
+                .SetMaxBufferedDocs(10)
+                .SetMergePolicy(ldmp)
+                .SetMergeScheduler(scheduler);
+            writer = new IndexWriter(dir, config);
+
+            // merge factor is changed, so check invariants after all adds
+            for (int i = 0; i < 10; i++)
+            {
+                AddDoc(writer);
+            }
+            writer.Commit();
+            writer.WaitForMerges();
+            writer.Commit();
+            CheckInvariants(writer);
+            Assert.AreEqual(10, writer.MaxDoc);
+
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        private void AddDoc(IndexWriter writer)
+        {
+            Document doc = new Document();
+            doc.Add(NewTextField("content", "aaa", Field.Store.NO));
+            writer.AddDocument(doc);
+        }
+
+        private void CheckInvariants(IndexWriter writer)
+        {
+            writer.WaitForMerges();
+            int maxBufferedDocs = writer.Config.MaxBufferedDocs;
+            int mergeFactor = ((LogMergePolicy)writer.Config.MergePolicy).MergeFactor;
+            int maxMergeDocs = ((LogMergePolicy)writer.Config.MergePolicy).MaxMergeDocs;
+
+            int ramSegmentCount = writer.NumBufferedDocuments;
+            Assert.IsTrue(ramSegmentCount < maxBufferedDocs);
+
+            int lowerBound = -1;
+            int upperBound = maxBufferedDocs;
+            int numSegments = 0;
+
+            int segmentCount = writer.SegmentCount;
+            for (int i = segmentCount - 1; i >= 0; i--)
+            {
+                int docCount = writer.GetDocCount(i);
+                Assert.IsTrue(docCount > lowerBound, "docCount=" + docCount + " lowerBound=" + lowerBound + " upperBound=" + upperBound + " i=" + i + " segmentCount=" + segmentCount + " index=" + writer.SegString() + " config=" + writer.Config);
+
+                if (docCount <= upperBound)
+                {
+                    numSegments++;
+                }
+                else
+                {
+                    if (upperBound * mergeFactor <= maxMergeDocs)
+                    {
+                        Assert.IsTrue(numSegments < mergeFactor, "maxMergeDocs=" + maxMergeDocs + "; numSegments=" + numSegments + "; upperBound=" + upperBound + "; mergeFactor=" + mergeFactor + "; segs=" + writer.SegString() + " config=" + writer.Config);
+                    }
+
+                    do
+                    {
+                        lowerBound = upperBound;
+                        upperBound *= mergeFactor;
+                    } while (docCount > upperBound);
+                    numSegments = 1;
+                }
+            }
+            if (upperBound * mergeFactor <= maxMergeDocs)
+            {
+                Assert.IsTrue(numSegments < mergeFactor);
+            }
+        }
+
+        private const double EPSILON = 1E-14;
+
+        [Test]
+        public virtual void TestSetters()
+        {
+            AssertSetters(new LogByteSizeMergePolicy());
+            AssertSetters(new LogDocMergePolicy());
+        }
+
+        private void AssertSetters(MergePolicy lmp)
+        {
+            lmp.MaxCFSSegmentSizeMB = 2.0;
+            Assert.AreEqual(2.0, lmp.MaxCFSSegmentSizeMB, EPSILON);
+
+            lmp.MaxCFSSegmentSizeMB = double.PositiveInfinity;
+            Assert.AreEqual(long.MaxValue / 1024 / 1024.0, lmp.MaxCFSSegmentSizeMB, EPSILON * long.MaxValue);
+
+            lmp.MaxCFSSegmentSizeMB = long.MaxValue / 1024 / 1024.0;
+            Assert.AreEqual(long.MaxValue / 1024 / 1024.0, lmp.MaxCFSSegmentSizeMB, EPSILON * long.MaxValue);
+
+            try
+            {
+                lmp.MaxCFSSegmentSizeMB = -2.0;
+                Assert.Fail("Didn't throw IllegalArgumentException");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // pass
+            }
+
+            // TODO: Add more checks for other non-double setters!
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestIndexWriterMerging.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterMerging.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterMerging.cs
new file mode 100644
index 0000000..37fc3cd
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestIndexWriterMerging.cs
@@ -0,0 +1,488 @@
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.Threading;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Attributes;
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+
+    /*
+        /// Copyright 2006 The Apache Software Foundation
+        ///
+        /// Licensed under the Apache License, Version 2.0 (the "License");
+        /// you may not use this file except in compliance with the License.
+        /// You may obtain a copy of the License at
+        ///
+        ///     http://www.apache.org/licenses/LICENSE-2.0
+        ///
+        /// Unless required by applicable law or agreed to in writing, software
+        /// distributed under the License is distributed on an "AS IS" BASIS,
+        /// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+        /// See the License for the specific language governing permissions and
+        /// limitations under the License.
+        */
+
+    using AlreadyClosedException = Lucene.Net.Store.AlreadyClosedException;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldType = FieldType;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using TextField = TextField;
+
+    [TestFixture]
+    public class TestIndexWriterMerging : LuceneTestCase
+    {
+        /// <summary>
+        /// Tests that index merging (specifically addIndexes(Directory...)) doesn't
+        /// change the index order of documents.
+        /// </summary>
+        [Test]
+        public virtual void TestLucene()
+        {
+            int num = 100;
+
+            Directory indexA = NewDirectory();
+            Directory indexB = NewDirectory();
+
+            FillIndex(Random(), indexA, 0, num);
+            bool fail = VerifyIndex(indexA, 0);
+            if (fail)
+            {
+                Assert.Fail("Index a is invalid");
+            }
+
+            FillIndex(Random(), indexB, num, num);
+            fail = VerifyIndex(indexB, num);
+            if (fail)
+            {
+                Assert.Fail("Index b is invalid");
+            }
+
+            Directory merged = NewDirectory();
+
+            IndexWriter writer = new IndexWriter(merged, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy(2)));
+            writer.AddIndexes(indexA, indexB);
+            writer.ForceMerge(1);
+            writer.Dispose();
+
+            fail = VerifyIndex(merged, 0);
+
+            Assert.IsFalse(fail, "The merged index is invalid");
+            indexA.Dispose();
+            indexB.Dispose();
+            merged.Dispose();
+        }
+
+        private bool VerifyIndex(Directory directory, int startAt)
+        {
+            bool fail = false;
+            IndexReader reader = DirectoryReader.Open(directory);
+
+            int max = reader.MaxDoc;
+            for (int i = 0; i < max; i++)
+            {
+                Document temp = reader.Document(i);
+                //System.out.println("doc "+i+"="+temp.GetField("count").StringValue);
+                //compare the index doc number to the value that it should be
+                if (!temp.GetField("count").GetStringValue().Equals((i + startAt) + ""))
+                {
+                    fail = true;
+                    Console.WriteLine("Document " + (i + startAt) + " is returning document " + temp.GetField("count").GetStringValue());
+                }
+            }
+            reader.Dispose();
+            return fail;
+        }
+
+        private void FillIndex(Random random, Directory dir, int start, int numDocs)
+        {
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy(2)));
+
+            for (int i = start; i < (start + numDocs); i++)
+            {
+                Document temp = new Document();
+                temp.Add(NewStringField("count", ("" + i), Field.Store.YES));
+
+                writer.AddDocument(temp);
+            }
+            writer.Dispose();
+        }
+
+        // LUCENE-325: test forceMergeDeletes, when 2 singular merges
+        // are required
+        [Test]
+        public virtual void TestForceMergeDeletes()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2).SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH));
+            Document document = new Document();
+
+            FieldType customType = new FieldType();
+            customType.IsStored = true;
+
+            FieldType customType1 = new FieldType(TextField.TYPE_NOT_STORED);
+            customType1.IsTokenized = false;
+            customType1.StoreTermVectors = true;
+            customType1.StoreTermVectorPositions = true;
+            customType1.StoreTermVectorOffsets = true;
+
+            Field idField = NewStringField("id", "", Field.Store.NO);
+            document.Add(idField);
+            Field storedField = NewField("stored", "stored", customType);
+            document.Add(storedField);
+            Field termVectorField = NewField("termVector", "termVector", customType1);
+            document.Add(termVectorField);
+            for (int i = 0; i < 10; i++)
+            {
+                idField.SetStringValue("" + i);
+                writer.AddDocument(document);
+            }
+            writer.Dispose();
+
+            IndexReader ir = DirectoryReader.Open(dir);
+            Assert.AreEqual(10, ir.MaxDoc);
+            Assert.AreEqual(10, ir.NumDocs);
+            ir.Dispose();
+
+            IndexWriterConfig dontMergeConfig = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetMergePolicy(NoMergePolicy.COMPOUND_FILES);
+            writer = new IndexWriter(dir, dontMergeConfig);
+            writer.DeleteDocuments(new Term("id", "0"));
+            writer.DeleteDocuments(new Term("id", "7"));
+            writer.Dispose();
+
+            ir = DirectoryReader.Open(dir);
+            Assert.AreEqual(8, ir.NumDocs);
+            ir.Dispose();
+
+            writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()));
+            Assert.AreEqual(8, writer.NumDocs);
+            Assert.AreEqual(10, writer.MaxDoc);
+            writer.ForceMergeDeletes();
+            Assert.AreEqual(8, writer.NumDocs);
+            writer.Dispose();
+            ir = DirectoryReader.Open(dir);
+            Assert.AreEqual(8, ir.MaxDoc);
+            Assert.AreEqual(8, ir.NumDocs);
+            ir.Dispose();
+            dir.Dispose();
+        }
+
+        // LUCENE-325: test forceMergeDeletes, when many adjacent merges are required
+        [Test]
+        public virtual void TestForceMergeDeletes2()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2).SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH).SetMergePolicy(NewLogMergePolicy(50)));
+
+            Document document = new Document();
+
+            FieldType customType = new FieldType();
+            customType.IsStored = true;
+
+            FieldType customType1 = new FieldType(TextField.TYPE_NOT_STORED);
+            customType1.IsTokenized = false;
+            customType1.StoreTermVectors = true;
+            customType1.StoreTermVectorPositions = true;
+            customType1.StoreTermVectorOffsets = true;
+
+            Field storedField = NewField("stored", "stored", customType);
+            document.Add(storedField);
+            Field termVectorField = NewField("termVector", "termVector", customType1);
+            document.Add(termVectorField);
+            Field idField = NewStringField("id", "", Field.Store.NO);
+            document.Add(idField);
+            for (int i = 0; i < 98; i++)
+            {
+                idField.SetStringValue("" + i);
+                writer.AddDocument(document);
+            }
+            writer.Dispose();
+
+            IndexReader ir = DirectoryReader.Open(dir);
+            Assert.AreEqual(98, ir.MaxDoc);
+            Assert.AreEqual(98, ir.NumDocs);
+            ir.Dispose();
+
+            IndexWriterConfig dontMergeConfig = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetMergePolicy(NoMergePolicy.COMPOUND_FILES);
+            writer = new IndexWriter(dir, dontMergeConfig);
+            for (int i = 0; i < 98; i += 2)
+            {
+                writer.DeleteDocuments(new Term("id", "" + i));
+            }
+            writer.Dispose();
+
+            ir = DirectoryReader.Open(dir);
+            Assert.AreEqual(49, ir.NumDocs);
+            ir.Dispose();
+
+            writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy(3)));
+            Assert.AreEqual(49, writer.NumDocs);
+            writer.ForceMergeDeletes();
+            writer.Dispose();
+            ir = DirectoryReader.Open(dir);
+            Assert.AreEqual(49, ir.MaxDoc);
+            Assert.AreEqual(49, ir.NumDocs);
+            ir.Dispose();
+            dir.Dispose();
+        }
+
+        // LUCENE-325: test forceMergeDeletes without waiting, when
+        // many adjacent merges are required
+        [Test]
+        public virtual void TestForceMergeDeletes3()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2).SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH).SetMergePolicy(NewLogMergePolicy(50)));
+
+            FieldType customType = new FieldType();
+            customType.IsStored = true;
+
+            FieldType customType1 = new FieldType(TextField.TYPE_NOT_STORED);
+            customType1.IsTokenized = false;
+            customType1.StoreTermVectors = true;
+            customType1.StoreTermVectorPositions = true;
+            customType1.StoreTermVectorOffsets = true;
+
+            Document document = new Document();
+            Field storedField = NewField("stored", "stored", customType);
+            document.Add(storedField);
+            Field termVectorField = NewField("termVector", "termVector", customType1);
+            document.Add(termVectorField);
+            Field idField = NewStringField("id", "", Field.Store.NO);
+            document.Add(idField);
+            for (int i = 0; i < 98; i++)
+            {
+                idField.SetStringValue("" + i);
+                writer.AddDocument(document);
+            }
+            writer.Dispose();
+
+            IndexReader ir = DirectoryReader.Open(dir);
+            Assert.AreEqual(98, ir.MaxDoc);
+            Assert.AreEqual(98, ir.NumDocs);
+            ir.Dispose();
+
+            IndexWriterConfig dontMergeConfig = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetMergePolicy(NoMergePolicy.COMPOUND_FILES);
+            writer = new IndexWriter(dir, dontMergeConfig);
+            for (int i = 0; i < 98; i += 2)
+            {
+                writer.DeleteDocuments(new Term("id", "" + i));
+            }
+            writer.Dispose();
+            ir = DirectoryReader.Open(dir);
+            Assert.AreEqual(49, ir.NumDocs);
+            ir.Dispose();
+
+            writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy(3)));
+            writer.ForceMergeDeletes(false);
+            writer.Dispose();
+            ir = DirectoryReader.Open(dir);
+            Assert.AreEqual(49, ir.MaxDoc);
+            Assert.AreEqual(49, ir.NumDocs);
+            ir.Dispose();
+            dir.Dispose();
+        }
+
+        // Just intercepts all merges & verifies that we are never
+        // merging a segment with >= 20 (maxMergeDocs) docs
+        private class MyMergeScheduler : MergeScheduler
+        {
+            private readonly TestIndexWriterMerging OuterInstance;
+
+            public MyMergeScheduler(TestIndexWriterMerging outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            public override void Merge(IndexWriter writer, MergeTrigger trigger, bool newMergesFound)
+            {
+                lock (this)
+                {
+                    while (true)
+                    {
+                        MergePolicy.OneMerge merge = writer.NextMerge();
+                        if (merge == null)
+                        {
+                            break;
+                        }
+                        for (int i = 0; i < merge.Segments.Count; i++)
+                        {
+                            Debug.Assert(merge.Segments[i].Info.DocCount < 20);
+                        }
+                        writer.Merge(merge);
+                    }
+                }
+            }
+
+            protected override void Dispose(bool disposing)
+            {
+            }
+        }
+
+        // LUCENE-1013
+        [Test]
+        public virtual void TestSetMaxMergeDocs()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergeScheduler(new MyMergeScheduler(this)).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy());
+            LogMergePolicy lmp = (LogMergePolicy)conf.MergePolicy;
+            lmp.MaxMergeDocs = 20;
+            lmp.MergeFactor = 2;
+            IndexWriter iw = new IndexWriter(dir, conf);
+            Document document = new Document();
+
+            FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
+            customType.StoreTermVectors = true;
+
+            document.Add(NewField("tvtest", "a b c", customType));
+            for (int i = 0; i < 177; i++)
+            {
+                iw.AddDocument(document);
+            }
+            iw.Dispose();
+            dir.Dispose();
+        }
+
+#if !NETSTANDARD
+        // LUCENENET: There is no Timeout on NUnit for .NET Core.
+        [Timeout(80000)]
+#endif
+        [Test, HasTimeout]
+        public virtual void TestNoWaitClose()
+        {
+            Directory directory = NewDirectory();
+
+            Document doc = new Document();
+            FieldType customType = new FieldType(TextField.TYPE_STORED);
+            customType.IsTokenized = false;
+
+            Field idField = NewField("id", "", customType);
+            doc.Add(idField);
+
+            for (int pass = 0; pass < 2; pass++)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: pass=" + pass);
+                }
+
+                IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy());
+                if (pass == 2)
+                {
+                    conf.SetMergeScheduler(new SerialMergeScheduler());
+                }
+
+                IndexWriter writer = new IndexWriter(directory, conf);
+                ((LogMergePolicy)writer.Config.MergePolicy).MergeFactor = 100;
+
+                for (int iter = 0; iter < 10; iter++)
+                {
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("TEST: iter=" + iter);
+                    }
+                    for (int j = 0; j < 199; j++)
+                    {
+                        idField.SetStringValue(Convert.ToString(iter * 201 + j));
+                        writer.AddDocument(doc);
+                    }
+
+                    int delID = iter * 199;
+                    for (int j = 0; j < 20; j++)
+                    {
+                        writer.DeleteDocuments(new Term("id", Convert.ToString(delID)));
+                        delID += 5;
+                    }
+
+                    // Force a bunch of merge threads to kick off so we
+                    // stress out aborting them on close:
+                    ((LogMergePolicy)writer.Config.MergePolicy).MergeFactor = 2;
+
+                    IndexWriter finalWriter = writer;
+                    List<Exception> failure = new List<Exception>();
+                    ThreadClass t1 = new ThreadAnonymousInnerClassHelper(this, doc, finalWriter, failure);
+
+                    if (failure.Count > 0)
+                    {
+                        throw failure[0];
+                    }
+
+                    t1.Start();
+
+                    writer.Dispose(false);
+                    t1.Join();
+
+                    // Make sure reader can read
+                    IndexReader reader = DirectoryReader.Open(directory);
+                    reader.Dispose();
+
+                    // Reopen
+                    writer = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND).SetMergePolicy(NewLogMergePolicy()));
+                }
+                writer.Dispose();
+            }
+
+            directory.Dispose();
+        }
+
+        private class ThreadAnonymousInnerClassHelper : ThreadClass
+        {
+            private readonly TestIndexWriterMerging OuterInstance;
+
+            private Document Doc;
+            private IndexWriter FinalWriter;
+            private List<Exception> Failure;
+
+            public ThreadAnonymousInnerClassHelper(TestIndexWriterMerging outerInstance, Document doc, IndexWriter finalWriter, List<Exception> failure)
+            {
+                this.OuterInstance = outerInstance;
+                this.Doc = doc;
+                this.FinalWriter = finalWriter;
+                this.Failure = failure;
+            }
+
+            public override void Run()
+            {
+                bool done = false;
+                while (!done)
+                {
+                    for (int i = 0; i < 100; i++)
+                    {
+                        try
+                        {
+                            FinalWriter.AddDocument(Doc);
+                        }
+#pragma warning disable 168
+                        catch (AlreadyClosedException e)
+#pragma warning restore 168
+                        {
+                            done = true;
+                            break;
+                        }
+#pragma warning disable 168
+                        catch (System.NullReferenceException e)
+#pragma warning restore 168
+                        {
+                            done = true;
+                            break;
+                        }
+                        catch (Exception e)
+                        {
+                            Console.WriteLine(e.StackTrace);
+                            Failure.Add(e);
+                            done = true;
+                            break;
+                        }
+                    }
+                    Thread.Sleep(0);
+                }
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestIndexWriterNRTIsCurrent.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterNRTIsCurrent.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterNRTIsCurrent.cs
new file mode 100644
index 0000000..3866d4d
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestIndexWriterNRTIsCurrent.cs
@@ -0,0 +1,260 @@
+using System;
+using System.Threading;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Randomized.Generators;
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using System.IO;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements. See the NOTICE file distributed with this
+         * work for additional information regarding copyright ownership. The ASF
+         * licenses this file to You under the Apache License, Version 2.0 (the
+         * "License"); you may not use this file except in compliance with the License.
+         * You may obtain a copy of the License at
+         *
+         * http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+         * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+         * License for the specific language governing permissions and limitations under
+         * the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using TextField = TextField;
+
+    [TestFixture]
+    public class TestIndexWriterNRTIsCurrent : LuceneTestCase
+    {
+        public class ReaderHolder
+        {
+            internal volatile DirectoryReader Reader;
+            internal volatile bool Stop = false;
+        }
+
+        [Test]
+        public virtual void TestIsCurrentWithThreads()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, conf);
+            ReaderHolder holder = new ReaderHolder();
+            ReaderThread[] threads = new ReaderThread[AtLeast(3)];
+            CountdownEvent latch = new CountdownEvent(1);
+            WriterThread writerThread = new WriterThread(holder, writer, AtLeast(500), Random(), latch);
+            for (int i = 0; i < threads.Length; i++)
+            {
+                threads[i] = new ReaderThread(holder, latch);
+                threads[i].Start();
+            }
+            writerThread.Start();
+
+            writerThread.Join();
+            bool failed = writerThread.Failed != null;
+            if (failed)
+            {
+                Console.WriteLine(writerThread.Failed.ToString());
+                Console.Write(writerThread.Failed.StackTrace);
+            }
+            for (int i = 0; i < threads.Length; i++)
+            {
+                threads[i].Join();
+                if (threads[i].Failed != null)
+                {
+                    Console.WriteLine(threads[i].Failed.ToString());
+                    Console.Write(threads[i].Failed.StackTrace);
+                    failed = true;
+                }
+            }
+            Assert.IsFalse(failed);
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        public class WriterThread : ThreadClass
+        {
+            internal readonly ReaderHolder Holder;
+            internal readonly IndexWriter Writer;
+            internal readonly int NumOps;
+            internal bool Countdown = true;
+            internal readonly CountdownEvent Latch;
+            internal Exception Failed;
+
+            internal WriterThread(ReaderHolder holder, IndexWriter writer, int numOps, Random random, CountdownEvent latch)
+                : base()
+            {
+                this.Holder = holder;
+                this.Writer = writer;
+                this.NumOps = numOps;
+                this.Latch = latch;
+            }
+
+            public override void Run()
+            {
+                DirectoryReader currentReader = null;
+                Random random = LuceneTestCase.Random();
+                try
+                {
+                    Document doc = new Document();
+                    doc.Add(new TextField("id", "1", Field.Store.NO));
+                    Writer.AddDocument(doc);
+                    Holder.Reader = currentReader = Writer.GetReader(true);
+                    Term term = new Term("id");
+                    for (int i = 0; i < NumOps && !Holder.Stop; i++)
+                    {
+                        float nextOp = (float)random.NextDouble();
+                        if (nextOp < 0.3)
+                        {
+                            term.Set("id", new BytesRef("1"));
+                            Writer.UpdateDocument(term, doc);
+                        }
+                        else if (nextOp < 0.5)
+                        {
+                            Writer.AddDocument(doc);
+                        }
+                        else
+                        {
+                            term.Set("id", new BytesRef("1"));
+                            Writer.DeleteDocuments(term);
+                        }
+                        if (Holder.Reader != currentReader)
+                        {
+                            Holder.Reader = currentReader;
+                            if (Countdown)
+                            {
+                                Countdown = false;
+                                Latch.Signal();
+                            }
+                        }
+                        if (random.NextBoolean())
+                        {
+                            Writer.Commit();
+                            DirectoryReader newReader = DirectoryReader.OpenIfChanged(currentReader);
+                            if (newReader != null)
+                            {
+                                currentReader.DecRef();
+                                currentReader = newReader;
+                            }
+                            if (currentReader.NumDocs == 0)
+                            {
+                                Writer.AddDocument(doc);
+                            }
+                        }
+                    }
+                }
+                catch (Exception e)
+                {
+                    Failed = e;
+                }
+                finally
+                {
+                    Holder.Reader = null;
+                    if (Countdown)
+                    {
+                        Latch.Signal();
+                    }
+                    if (currentReader != null)
+                    {
+                        try
+                        {
+                            currentReader.DecRef();
+                        }
+#pragma warning disable 168
+                        catch (IOException e)
+#pragma warning restore 168
+                        {
+                        }
+                    }
+                }
+                if (VERBOSE)
+                {
+                    Console.WriteLine("writer stopped - forced by reader: " + Holder.Stop);
+                }
+            }
+        }
+
+        public sealed class ReaderThread : ThreadClass
+        {
+            internal readonly ReaderHolder Holder;
+            internal readonly CountdownEvent Latch;
+            internal Exception Failed;
+
+            internal ReaderThread(ReaderHolder holder, CountdownEvent latch)
+                : base()
+            {
+                this.Holder = holder;
+                this.Latch = latch;
+            }
+
+            public override void Run()
+            {
+#if !NETSTANDARD
+                try
+                {
+#endif
+                    Latch.Wait();
+#if !NETSTANDARD
+                }
+                catch (ThreadInterruptedException e)
+                {
+                    Failed = e;
+                    return;
+                }
+#endif
+                DirectoryReader reader;
+                while ((reader = Holder.Reader) != null)
+                {
+                    if (reader.TryIncRef())
+                    {
+                        try
+                        {
+                            bool current = reader.IsCurrent;
+                            if (VERBOSE)
+                            {
+                                Console.WriteLine("Thread: " + Thread.CurrentThread + " Reader: " + reader + " isCurrent:" + current);
+                            }
+
+                            Assert.IsFalse(current);
+                        }
+                        catch (Exception e)
+                        {
+                            if (VERBOSE)
+                            {
+                                Console.WriteLine("FAILED Thread: " + Thread.CurrentThread + " Reader: " + reader + " isCurrent: false");
+                            }
+                            Failed = e;
+                            Holder.Stop = true;
+                            return;
+                        }
+                        finally
+                        {
+                            try
+                            {
+                                reader.DecRef();
+                            }
+                            catch (IOException e)
+                            {
+                                if (Failed == null)
+                                {
+                                    Failed = e;
+                                }
+                            }
+                        }
+                        return;
+                    }
+                }
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestIndexWriterOnDiskFull.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterOnDiskFull.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterOnDiskFull.cs
new file mode 100644
index 0000000..eae0626
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestIndexWriterOnDiskFull.cs
@@ -0,0 +1,703 @@
+using System;
+using System.Diagnostics;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+    using System.IO;
+    using Util;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldType = FieldType;
+    using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+    using NumericDocValuesField = NumericDocValuesField;
+    using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+    using ScoreDoc = Lucene.Net.Search.ScoreDoc;
+    using TermQuery = Lucene.Net.Search.TermQuery;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using TextField = TextField;
+
+    /// <summary>
+    /// Tests for IndexWriter when the disk runs out of space
+    /// </summary>
+    [TestFixture]
+    public class TestIndexWriterOnDiskFull : LuceneTestCase
+    {
+        /*
+         * Make sure IndexWriter cleans up on hitting a disk
+         * full exception in addDocument.
+         * TODO: how to do this on windows with FSDirectory?
+         */
+
+        [Test]
+        public virtual void TestAddDocumentOnDiskFull()
+        {
+            for (int pass = 0; pass < 2; pass++)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: pass=" + pass);
+                }
+                bool doAbort = pass == 1;
+                long diskFree = TestUtil.NextInt(Random(), 100, 300);
+                while (true)
+                {
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("TEST: cycle: diskFree=" + diskFree);
+                    }
+                    MockDirectoryWrapper dir = new MockDirectoryWrapper(Random(), new RAMDirectory());
+                    dir.MaxSizeInBytes = diskFree;
+                    IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+                    IMergeScheduler ms = writer.Config.MergeScheduler;
+                    if (ms is IConcurrentMergeScheduler)
+                    {
+                        // this test intentionally produces exceptions
+                        // in the threads that CMS launches; we don't
+                        // want to pollute test output with these.
+                        ((IConcurrentMergeScheduler)ms).SetSuppressExceptions();
+                    }
+
+                    bool hitError = false;
+                    try
+                    {
+                        for (int i = 0; i < 200; i++)
+                        {
+                            AddDoc(writer);
+                        }
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("TEST: done adding docs; now commit");
+                        }
+                        writer.Commit();
+                    }
+                    catch (IOException e)
+                    {
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("TEST: exception on addDoc");
+                            Console.WriteLine(e.StackTrace);
+                        }
+                        hitError = true;
+                    }
+
+                    if (hitError)
+                    {
+                        if (doAbort)
+                        {
+                            if (VERBOSE)
+                            {
+                                Console.WriteLine("TEST: now rollback");
+                            }
+                            writer.Rollback();
+                        }
+                        else
+                        {
+                            try
+                            {
+                                if (VERBOSE)
+                                {
+                                    Console.WriteLine("TEST: now close");
+                                }
+                                writer.Dispose();
+                            }
+                            catch (IOException e)
+                            {
+                                if (VERBOSE)
+                                {
+                                    Console.WriteLine("TEST: exception on close; retry w/ no disk space limit");
+                                    Console.WriteLine(e.StackTrace);
+                                }
+                                dir.MaxSizeInBytes = 0;
+                                writer.Dispose();
+                            }
+                        }
+
+                        //TestUtil.SyncConcurrentMerges(ms);
+
+                        if (TestUtil.AnyFilesExceptWriteLock(dir))
+                        {
+                            TestIndexWriter.AssertNoUnreferencedFiles(dir, "after disk full during addDocument");
+
+                            // Make sure reader can open the index:
+                            DirectoryReader.Open(dir).Dispose();
+                        }
+
+                        dir.Dispose();
+                        // Now try again w/ more space:
+
+                        diskFree += TEST_NIGHTLY ? TestUtil.NextInt(Random(), 400, 600) : TestUtil.NextInt(Random(), 3000, 5000);
+                    }
+                    else
+                    {
+                        //TestUtil.SyncConcurrentMerges(writer);
+                        dir.MaxSizeInBytes = 0;
+                        writer.Dispose();
+                        dir.Dispose();
+                        break;
+                    }
+                }
+            }
+        }
+
+        // TODO: make @Nightly variant that provokes more disk
+        // fulls
+
+        // TODO: have test fail if on any given top
+        // iter there was not a single IOE hit
+
+        /*
+        Test: make sure when we run out of disk space or hit
+        random IOExceptions in any of the addIndexes(*) calls
+        that 1) index is not corrupt (searcher can open/search
+        it) and 2) transactional semantics are followed:
+        either all or none of the incoming documents were in
+        fact added.
+         */
+
+        [Test]
+        public virtual void TestAddIndexOnDiskFull()
+        {
+            // MemoryCodec, since it uses FST, is not necessarily
+            // "additive", ie if you add up N small FSTs, then merge
+            // them, the merged result can easily be larger than the
+            // sum because the merged FST may use array encoding for
+            // some arcs (which uses more space):
+
+            string idFormat = TestUtil.GetPostingsFormat("id");
+            string contentFormat = TestUtil.GetPostingsFormat("content");
+            AssumeFalse("this test cannot run with Memory codec", idFormat.Equals("Memory") || contentFormat.Equals("Memory"));
+
+            int START_COUNT = 57;
+            int NUM_DIR = TEST_NIGHTLY ? 50 : 5;
+            int END_COUNT = START_COUNT + NUM_DIR * (TEST_NIGHTLY ? 25 : 5);
+
+            // Build up a bunch of dirs that have indexes which we
+            // will then merge together by calling addIndexes(*):
+            Directory[] dirs = new Directory[NUM_DIR];
+            long inputDiskUsage = 0;
+            for (int i = 0; i < NUM_DIR; i++)
+            {
+                dirs[i] = NewDirectory();
+                IndexWriter writer = new IndexWriter(dirs[i], NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+                for (int j = 0; j < 25; j++)
+                {
+                    AddDocWithIndex(writer, 25 * i + j);
+                }
+                writer.Dispose();
+                string[] files = dirs[i].ListAll();
+                for (int j = 0; j < files.Length; j++)
+                {
+                    inputDiskUsage += dirs[i].FileLength(files[j]);
+                }
+            }
+
+            // Now, build a starting index that has START_COUNT docs.  We
+            // will then try to addIndexes into a copy of this:
+            MockDirectoryWrapper startDir = NewMockDirectory();
+            IndexWriter indWriter = new IndexWriter(startDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            for (int j = 0; j < START_COUNT; j++)
+            {
+                AddDocWithIndex(indWriter, j);
+            }
+            indWriter.Dispose();
+
+            // Make sure starting index seems to be working properly:
+            Term searchTerm = new Term("content", "aaa");
+            IndexReader reader = DirectoryReader.Open(startDir);
+            Assert.AreEqual(57, reader.DocFreq(searchTerm), "first docFreq");
+
+            IndexSearcher searcher = NewSearcher(reader);
+            ScoreDoc[] hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
+            Assert.AreEqual(57, hits.Length, "first number of hits");
+            reader.Dispose();
+
+            // Iterate with larger and larger amounts of free
+            // disk space.  With little free disk space,
+            // addIndexes will certainly run out of space &
+            // fail.  Verify that when this happens, index is
+            // not corrupt and index in fact has added no
+            // documents.  Then, we increase disk space by 2000
+            // bytes each iteration.  At some point there is
+            // enough free disk space and addIndexes should
+            // succeed and index should show all documents were
+            // added.
+
+            // String[] files = startDir.ListAll();
+            long diskUsage = startDir.SizeInBytes();
+
+            long startDiskUsage = 0;
+            string[] files_ = startDir.ListAll();
+            for (int i = 0; i < files_.Length; i++)
+            {
+                startDiskUsage += startDir.FileLength(files_[i]);
+            }
+
+            for (int iter = 0; iter < 3; iter++)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: iter=" + iter);
+                }
+
+                // Start with 100 bytes more than we are currently using:
+                long diskFree = diskUsage + TestUtil.NextInt(Random(), 50, 200);
+
+                int method = iter;
+
+                bool success = false;
+                bool done = false;
+
+                string methodName;
+                if (0 == method)
+                {
+                    methodName = "addIndexes(Directory[]) + forceMerge(1)";
+                }
+                else if (1 == method)
+                {
+                    methodName = "addIndexes(IndexReader[])";
+                }
+                else
+                {
+                    methodName = "addIndexes(Directory[])";
+                }
+
+                while (!done)
+                {
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("TEST: cycle...");
+                    }
+
+                    // Make a new dir that will enforce disk usage:
+                    MockDirectoryWrapper dir = new MockDirectoryWrapper(Random(), new RAMDirectory(startDir, NewIOContext(Random())));
+                    indWriter = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND).SetMergePolicy(NewLogMergePolicy(false)));
+                    IOException err = null;
+
+                    IMergeScheduler ms = indWriter.Config.MergeScheduler;
+                    for (int x = 0; x < 2; x++)
+                    {
+                        if (ms is IConcurrentMergeScheduler)
+                        // this test intentionally produces exceptions
+                        // in the threads that CMS launches; we don't
+                        // want to pollute test output with these.
+                        {
+                            if (0 == x)
+                            {
+                                ((IConcurrentMergeScheduler)ms).SetSuppressExceptions();
+                            }
+                            else
+                            {
+                                ((IConcurrentMergeScheduler)ms).ClearSuppressExceptions();
+                            }
+                        }
+
+                        // Two loops: first time, limit disk space &
+                        // throw random IOExceptions; second time, no
+                        // disk space limit:
+
+                        double rate = 0.05;
+                        double diskRatio = ((double)diskFree) / diskUsage;
+                        long thisDiskFree;
+
+                        string testName = null;
+
+                        if (0 == x)
+                        {
+                            dir.RandomIOExceptionRateOnOpen = Random().NextDouble() * 0.01;
+                            thisDiskFree = diskFree;
+                            if (diskRatio >= 2.0)
+                            {
+                                rate /= 2;
+                            }
+                            if (diskRatio >= 4.0)
+                            {
+                                rate /= 2;
+                            }
+                            if (diskRatio >= 6.0)
+                            {
+                                rate = 0.0;
+                            }
+                            if (VERBOSE)
+                            {
+                                testName = "disk full test " + methodName + " with disk full at " + diskFree + " bytes";
+                            }
+                        }
+                        else
+                        {
+                            dir.RandomIOExceptionRateOnOpen = 0.0;
+                            thisDiskFree = 0;
+                            rate = 0.0;
+                            if (VERBOSE)
+                            {
+                                testName = "disk full test " + methodName + " with unlimited disk space";
+                            }
+                        }
+
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("\ncycle: " + testName);
+                        }
+
+                        dir.TrackDiskUsage = true;
+                        dir.MaxSizeInBytes = thisDiskFree;
+                        dir.RandomIOExceptionRate = rate;
+
+                        try
+                        {
+                            if (0 == method)
+                            {
+                                if (VERBOSE)
+                                {
+                                    Console.WriteLine("TEST: now addIndexes count=" + dirs.Length);
+                                }
+                                indWriter.AddIndexes(dirs);
+                                if (VERBOSE)
+                                {
+                                    Console.WriteLine("TEST: now forceMerge");
+                                }
+                                indWriter.ForceMerge(1);
+                            }
+                            else if (1 == method)
+                            {
+                                IndexReader[] readers = new IndexReader[dirs.Length];
+                                for (int i = 0; i < dirs.Length; i++)
+                                {
+                                    readers[i] = DirectoryReader.Open(dirs[i]);
+                                }
+                                try
+                                {
+                                    indWriter.AddIndexes(readers);
+                                }
+                                finally
+                                {
+                                    for (int i = 0; i < dirs.Length; i++)
+                                    {
+                                        readers[i].Dispose();
+                                    }
+                                }
+                            }
+                            else
+                            {
+                                indWriter.AddIndexes(dirs);
+                            }
+
+                            success = true;
+                            if (VERBOSE)
+                            {
+                                Console.WriteLine("  success!");
+                            }
+
+                            if (0 == x)
+                            {
+                                done = true;
+                            }
+                        }
+                        catch (IOException e)
+                        {
+                            success = false;
+                            err = e;
+                            if (VERBOSE)
+                            {
+                                Console.WriteLine("  hit IOException: " + e);
+                                Console.WriteLine(e.StackTrace);
+                            }
+
+                            if (1 == x)
+                            {
+                                Console.WriteLine(e.StackTrace);
+                                Assert.Fail(methodName + " hit IOException after disk space was freed up");
+                            }
+                        }
+
+                        // Make sure all threads from
+                        // ConcurrentMergeScheduler are done
+                        TestUtil.SyncConcurrentMerges(indWriter);
+
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("  now test readers");
+                        }
+
+                        // Finally, verify index is not corrupt, and, if
+                        // we succeeded, we see all docs added, and if we
+                        // failed, we see either all docs or no docs added
+                        // (transactional semantics):
+                        dir.RandomIOExceptionRateOnOpen = 0.0;
+                        try
+                        {
+                            reader = DirectoryReader.Open(dir);
+                        }
+                        catch (IOException e)
+                        {
+                            Console.WriteLine(e.StackTrace);
+                            Assert.Fail(testName + ": exception when creating IndexReader: " + e);
+                        }
+                        int result = reader.DocFreq(searchTerm);
+                        if (success)
+                        {
+                            if (result != START_COUNT)
+                            {
+                                Assert.Fail(testName + ": method did not throw exception but docFreq('aaa') is " + result + " instead of expected " + START_COUNT);
+                            }
+                        }
+                        else
+                        {
+                            // On hitting exception we still may have added
+                            // all docs:
+                            if (result != START_COUNT && result != END_COUNT)
+                            {
+                                Console.WriteLine(err.StackTrace);
+                                Assert.Fail(testName + ": method did throw exception but docFreq('aaa') is " + result + " instead of expected " + START_COUNT + " or " + END_COUNT);
+                            }
+                        }
+
+                        searcher = NewSearcher(reader);
+                        try
+                        {
+                            hits = searcher.Search(new TermQuery(searchTerm), null, END_COUNT).ScoreDocs;
+                        }
+                        catch (IOException e)
+                        {
+                            Console.WriteLine(e.StackTrace);
+                            Assert.Fail(testName + ": exception when searching: " + e);
+                        }
+                        int result2 = hits.Length;
+                        if (success)
+                        {
+                            if (result2 != result)
+                            {
+                                Assert.Fail(testName + ": method did not throw exception but hits.Length for search on term 'aaa' is " + result2 + " instead of expected " + result);
+                            }
+                        }
+                        else
+                        {
+                            // On hitting exception we still may have added
+                            // all docs:
+                            if (result2 != result)
+                            {
+                                Console.WriteLine(err.StackTrace);
+                                Assert.Fail(testName + ": method did throw exception but hits.Length for search on term 'aaa' is " + result2 + " instead of expected " + result);
+                            }
+                        }
+
+                        reader.Dispose();
+                        if (VERBOSE)
+                        {
+                            Console.WriteLine("  count is " + result);
+                        }
+
+                        if (done || result == END_COUNT)
+                        {
+                            break;
+                        }
+                    }
+
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("  start disk = " + startDiskUsage + "; input disk = " + inputDiskUsage + "; max used = " + dir.MaxUsedSizeInBytes);
+                    }
+
+                    if (done)
+                    {
+                        // Javadocs state that temp free Directory space
+                        // required is at most 2X total input size of
+                        // indices so let's make sure:
+                        Assert.IsTrue((dir.MaxUsedSizeInBytes - startDiskUsage) < 2 * (startDiskUsage + inputDiskUsage), "max free Directory space required exceeded 1X the total input index sizes during " + methodName + ": max temp usage = " + (dir.MaxUsedSizeInBytes - startDiskUsage) + " bytes vs limit=" + (2 * (startDiskUsage + inputDiskUsage)) + "; starting disk usage = " + startDiskUsage + " bytes; " + "input index disk usage = " + inputDiskUsage + " bytes");
+                    }
+
+                    // Make sure we don't hit disk full during close below:
+                    dir.MaxSizeInBytes = 0;
+                    dir.RandomIOExceptionRate = 0.0;
+                    dir.RandomIOExceptionRateOnOpen = 0.0;
+
+                    indWriter.Dispose();
+
+                    // Wait for all BG threads to finish else
+                    // dir.Dispose() will throw IOException because
+                    // there are still open files
+                    TestUtil.SyncConcurrentMerges(ms);
+
+                    dir.Dispose();
+
+                    // Try again with more free space:
+                    diskFree += TEST_NIGHTLY ? TestUtil.NextInt(Random(), 4000, 8000) : TestUtil.NextInt(Random(), 40000, 80000);
+                }
+            }
+
+            startDir.Dispose();
+            foreach (Directory dir in dirs)
+            {
+                dir.Dispose();
+            }
+        }
+
+        private class FailTwiceDuringMerge : MockDirectoryWrapper.Failure
+        {
+            public bool DidFail1;
+            public bool DidFail2;
+
+            public override void Eval(MockDirectoryWrapper dir)
+            {
+                if (!DoFail)
+                {
+                    return;
+                }
+
+                /*typeof(SegmentMerger).Name.Equals(frame.GetType().Name) && */
+                if (StackTraceHelper.DoesStackTraceContainMethod("MergeTerms") && !DidFail1)
+                {
+                    DidFail1 = true;
+                    throw new IOException("fake disk full during mergeTerms");
+                }
+
+                /*typeof(LiveDocsFormat).Name.Equals(frame.GetType().Name) && */
+                if (StackTraceHelper.DoesStackTraceContainMethod("WriteLiveDocs") && !DidFail2)
+                {
+                    DidFail2 = true;
+                    throw new IOException("fake disk full while writing LiveDocs");
+                }
+            }
+        }
+
+        // LUCENE-2593
+        [Test]
+        public virtual void TestCorruptionAfterDiskFullDuringMerge()
+        {
+            MockDirectoryWrapper dir = NewMockDirectory();
+            //IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setReaderPooling(true));
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergeScheduler(new SerialMergeScheduler()).SetReaderPooling(true).SetMergePolicy(NewLogMergePolicy(2)));
+            // we can do this because we add/delete/add (and dont merge to "nothing")
+            w.KeepFullyDeletedSegments = true;
+
+            Document doc = new Document();
+
+            doc.Add(NewTextField("f", "doctor who", Field.Store.NO));
+            w.AddDocument(doc);
+            w.Commit();
+
+            w.DeleteDocuments(new Term("f", "who"));
+            w.AddDocument(doc);
+
+            // disk fills up!
+            FailTwiceDuringMerge ftdm = new FailTwiceDuringMerge();
+            ftdm.SetDoFail();
+            dir.FailOn(ftdm);
+
+            try
+            {
+                w.Commit();
+                Assert.Fail("fake disk full IOExceptions not hit");
+            }
+#pragma warning disable 168
+            catch (IOException ioe)
+#pragma warning restore 168
+            {
+                // expected
+                Assert.IsTrue(ftdm.DidFail1 || ftdm.DidFail2);
+            }
+            TestUtil.CheckIndex(dir);
+            ftdm.ClearDoFail();
+            w.AddDocument(doc);
+            w.Dispose();
+
+            dir.Dispose();
+        }
+
+        // LUCENE-1130: make sure immeidate disk full on creating
+        // an IndexWriter (hit during DW.ThreadState.Init()) is
+        // OK:
+        [Test]
+        public virtual void TestImmediateDiskFull([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            MockDirectoryWrapper dir = NewMockDirectory();
+            var config = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))
+                            .SetMaxBufferedDocs(2)
+                            .SetMergeScheduler(scheduler);
+            IndexWriter writer = new IndexWriter(dir, config);
+            dir.MaxSizeInBytes = Math.Max(1, dir.RecomputedActualSizeInBytes);
+            Document doc = new Document();
+            FieldType customType = new FieldType(TextField.TYPE_STORED);
+            doc.Add(NewField("field", "aaa bbb ccc ddd eee fff ggg hhh iii jjj", customType));
+            try
+            {
+                writer.AddDocument(doc);
+                Assert.Fail("did not hit disk full");
+            }
+            catch (IOException)
+            {
+            }
+            // Without fix for LUCENE-1130: this call will hang:
+            try
+            {
+                writer.AddDocument(doc);
+                Assert.Fail("did not hit disk full");
+            }
+            catch (IOException)
+            {
+            }
+            try
+            {
+                writer.Dispose(false);
+                Assert.Fail("did not hit disk full");
+            }
+            catch (IOException)
+            {
+            }
+
+            // Make sure once disk space is avail again, we can
+            // cleanly close:
+            dir.MaxSizeInBytes = 0;
+            writer.Dispose(false);
+            dir.Dispose();
+        }
+
+        // TODO: these are also in TestIndexWriter... add a simple doc-writing method
+        // like this to LuceneTestCase?
+        private void AddDoc(IndexWriter writer)
+        {
+            Document doc = new Document();
+            doc.Add(NewTextField("content", "aaa", Field.Store.NO));
+            if (DefaultCodecSupportsDocValues())
+            {
+                doc.Add(new NumericDocValuesField("numericdv", 1));
+            }
+            writer.AddDocument(doc);
+        }
+
+        private void AddDocWithIndex(IndexWriter writer, int index)
+        {
+            Document doc = new Document();
+            doc.Add(NewTextField("content", "aaa " + index, Field.Store.NO));
+            doc.Add(NewTextField("id", "" + index, Field.Store.NO));
+            if (DefaultCodecSupportsDocValues())
+            {
+                doc.Add(new NumericDocValuesField("numericdv", 1));
+            }
+            writer.AddDocument(doc);
+        }
+    }
+}
\ No newline at end of file


[20/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestPostingsOffsets.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestPostingsOffsets.cs b/src/Lucene.Net.Tests/Index/TestPostingsOffsets.cs
new file mode 100644
index 0000000..706987e
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestPostingsOffsets.cs
@@ -0,0 +1,580 @@
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.Linq;
+using Lucene.Net.Documents;
+using Lucene.Net.Search;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Randomized.Generators;
+    using NUnit.Framework;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Analyzer = Lucene.Net.Analysis.Analyzer;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using CannedTokenStream = Lucene.Net.Analysis.CannedTokenStream;
+    using Directory = Lucene.Net.Store.Directory;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using Document = Documents.Document;
+    using English = Lucene.Net.Util.English;
+    using Field = Field;
+    using FieldType = FieldType;
+    using Int32Field = Int32Field;
+    using IOUtils = Lucene.Net.Util.IOUtils;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockPayloadAnalyzer = Lucene.Net.Analysis.MockPayloadAnalyzer;
+    using StringField = StringField;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using TextField = TextField;
+    using Token = Lucene.Net.Analysis.Token;
+    using TokenStream = Lucene.Net.Analysis.TokenStream;
+
+    // TODO: we really need to test indexingoffsets, but then getting only docs / docs + freqs.
+    // not all codecs store prx separate...
+    // TODO: fix sep codec to index offsets so we can greatly reduce this list!
+    [SuppressCodecs("Lucene3x", "MockFixedIntBlock", "MockVariableIntBlock", "MockSep", "MockRandom")]
+    [TestFixture]
+    public class TestPostingsOffsets : LuceneTestCase
+    {
+        internal IndexWriterConfig Iwc;
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+        }
+
+        [Test]
+        public virtual void TestBasic()
+        {
+            Directory dir = NewDirectory();
+
+            RandomIndexWriter w = new RandomIndexWriter(Random(), dir, Iwc);
+            Document doc = new Document();
+
+            FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
+            ft.IndexOptions = IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS;
+            if (Random().NextBoolean())
+            {
+                ft.StoreTermVectors = true;
+                ft.StoreTermVectorPositions = Random().NextBoolean();
+                ft.StoreTermVectorOffsets = Random().NextBoolean();
+            }
+            Token[] tokens = new Token[] { MakeToken("a", 1, 0, 6), MakeToken("b", 1, 8, 9), MakeToken("a", 1, 9, 17), MakeToken("c", 1, 19, 50) };
+            doc.Add(new Field("content", new CannedTokenStream(tokens), ft));
+
+            w.AddDocument(doc);
+            IndexReader r = w.Reader;
+            w.Dispose();
+
+            DocsAndPositionsEnum dp = MultiFields.GetTermPositionsEnum(r, null, "content", new BytesRef("a"));
+            Assert.IsNotNull(dp);
+            Assert.AreEqual(0, dp.NextDoc());
+            Assert.AreEqual(2, dp.Freq);
+            Assert.AreEqual(0, dp.NextPosition());
+            Assert.AreEqual(0, dp.StartOffset);
+            Assert.AreEqual(6, dp.EndOffset);
+            Assert.AreEqual(2, dp.NextPosition());
+            Assert.AreEqual(9, dp.StartOffset);
+            Assert.AreEqual(17, dp.EndOffset);
+            Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, dp.NextDoc());
+
+            dp = MultiFields.GetTermPositionsEnum(r, null, "content", new BytesRef("b"));
+            Assert.IsNotNull(dp);
+            Assert.AreEqual(0, dp.NextDoc());
+            Assert.AreEqual(1, dp.Freq);
+            Assert.AreEqual(1, dp.NextPosition());
+            Assert.AreEqual(8, dp.StartOffset);
+            Assert.AreEqual(9, dp.EndOffset);
+            Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, dp.NextDoc());
+
+            dp = MultiFields.GetTermPositionsEnum(r, null, "content", new BytesRef("c"));
+            Assert.IsNotNull(dp);
+            Assert.AreEqual(0, dp.NextDoc());
+            Assert.AreEqual(1, dp.Freq);
+            Assert.AreEqual(3, dp.NextPosition());
+            Assert.AreEqual(19, dp.StartOffset);
+            Assert.AreEqual(50, dp.EndOffset);
+            Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, dp.NextDoc());
+
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestSkipping()
+        {
+            DoTestNumbers(false);
+        }
+
+        [Test]
+        public virtual void TestPayloads()
+        {
+            DoTestNumbers(true);
+        }
+
+        public virtual void DoTestNumbers(bool withPayloads)
+        {
+            Directory dir = NewDirectory();
+            Analyzer analyzer = withPayloads ? (Analyzer)new MockPayloadAnalyzer() : new MockAnalyzer(Random());
+            Iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
+            Iwc.SetMergePolicy(NewLogMergePolicy()); // will rely on docids a bit for skipping
+            RandomIndexWriter w = new RandomIndexWriter(Random(), dir, Iwc);
+
+            FieldType ft = new FieldType(TextField.TYPE_STORED);
+            ft.IndexOptions = IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS;
+            if (Random().NextBoolean())
+            {
+                ft.StoreTermVectors = true;
+                ft.StoreTermVectorOffsets = Random().NextBoolean();
+                ft.StoreTermVectorPositions = Random().NextBoolean();
+            }
+
+            int numDocs = AtLeast(500);
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document doc = new Document();
+                doc.Add(new Field("numbers", English.IntToEnglish(i), ft));
+                doc.Add(new Field("oddeven", (i % 2) == 0 ? "even" : "odd", ft));
+                doc.Add(new StringField("id", "" + i, Field.Store.NO));
+                w.AddDocument(doc);
+            }
+
+            IndexReader reader = w.Reader;
+            w.Dispose();
+
+            string[] terms = new string[] { "one", "two", "three", "four", "five", "six", "seven", "eight", "nine", "ten", "hundred" };
+
+            foreach (string term in terms)
+            {
+                DocsAndPositionsEnum dp = MultiFields.GetTermPositionsEnum(reader, null, "numbers", new BytesRef(term));
+                int doc;
+                while ((doc = dp.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS)
+                {
+                    string storedNumbers = reader.Document(doc).Get("numbers");
+                    int freq = dp.Freq;
+                    for (int i = 0; i < freq; i++)
+                    {
+                        dp.NextPosition();
+                        int start = dp.StartOffset;
+                        Debug.Assert(start >= 0);
+                        int end = dp.EndOffset;
+                        Debug.Assert(end >= 0 && end >= start);
+                        // check that the offsets correspond to the term in the src text
+                        Assert.IsTrue(storedNumbers.Substring(start, end - start).Equals(term));
+                        if (withPayloads)
+                        {
+                            // check that we have a payload and it starts with "pos"
+                            Assert.IsNotNull(dp.GetPayload());
+                            BytesRef payload = dp.GetPayload();
+                            Assert.IsTrue(payload.Utf8ToString().StartsWith("pos:"));
+                        } // note: withPayloads=false doesnt necessarily mean we dont have them from MockAnalyzer!
+                    }
+                }
+            }
+
+            // check we can skip correctly
+            int numSkippingTests = AtLeast(50);
+
+            for (int j = 0; j < numSkippingTests; j++)
+            {
+                int num = TestUtil.NextInt(Random(), 100, Math.Min(numDocs - 1, 999));
+                DocsAndPositionsEnum dp = MultiFields.GetTermPositionsEnum(reader, null, "numbers", new BytesRef("hundred"));
+                int doc = dp.Advance(num);
+                Assert.AreEqual(num, doc);
+                int freq = dp.Freq;
+                for (int i = 0; i < freq; i++)
+                {
+                    string storedNumbers = reader.Document(doc).Get("numbers");
+                    dp.NextPosition();
+                    int start = dp.StartOffset;
+                    Debug.Assert(start >= 0);
+                    int end = dp.EndOffset;
+                    Debug.Assert(end >= 0 && end >= start);
+                    // check that the offsets correspond to the term in the src text
+                    Assert.IsTrue(storedNumbers.Substring(start, end - start).Equals("hundred"));
+                    if (withPayloads)
+                    {
+                        // check that we have a payload and it starts with "pos"
+                        Assert.IsNotNull(dp.GetPayload());
+                        BytesRef payload = dp.GetPayload();
+                        Assert.IsTrue(payload.Utf8ToString().StartsWith("pos:"));
+                    } // note: withPayloads=false doesnt necessarily mean we dont have them from MockAnalyzer!
+                }
+            }
+
+            // check that other fields (without offsets) work correctly
+
+            for (int i = 0; i < numDocs; i++)
+            {
+                DocsEnum dp = MultiFields.GetTermDocsEnum(reader, null, "id", new BytesRef("" + i), 0);
+                Assert.AreEqual(i, dp.NextDoc());
+                Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, dp.NextDoc());
+            }
+
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestRandom()
+        {
+            // token -> docID -> tokens
+            IDictionary<string, IDictionary<int?, IList<Token>>> actualTokens = new Dictionary<string, IDictionary<int?, IList<Token>>>();
+
+            Directory dir = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), dir, Iwc);
+
+            int numDocs = AtLeast(20);
+            //final int numDocs = AtLeast(5);
+
+            FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
+
+            // TODO: randomize what IndexOptions we use; also test
+            // changing this up in one IW buffered segment...:
+            ft.IndexOptions = IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS;
+            if (Random().NextBoolean())
+            {
+                ft.StoreTermVectors = true;
+                ft.StoreTermVectorOffsets = Random().NextBoolean();
+                ft.StoreTermVectorPositions = Random().NextBoolean();
+            }
+
+            for (int docCount = 0; docCount < numDocs; docCount++)
+            {
+                Document doc = new Document();
+                doc.Add(new Int32Field("id", docCount, Field.Store.NO));
+                IList<Token> tokens = new List<Token>();
+                int numTokens = AtLeast(100);
+                //final int numTokens = AtLeast(20);
+                int pos = -1;
+                int offset = 0;
+                //System.out.println("doc id=" + docCount);
+                for (int tokenCount = 0; tokenCount < numTokens; tokenCount++)
+                {
+                    string text;
+                    if (Random().NextBoolean())
+                    {
+                        text = "a";
+                    }
+                    else if (Random().NextBoolean())
+                    {
+                        text = "b";
+                    }
+                    else if (Random().NextBoolean())
+                    {
+                        text = "c";
+                    }
+                    else
+                    {
+                        text = "d";
+                    }
+
+                    int posIncr = Random().NextBoolean() ? 1 : Random().Next(5);
+                    if (tokenCount == 0 && posIncr == 0)
+                    {
+                        posIncr = 1;
+                    }
+                    int offIncr = Random().NextBoolean() ? 0 : Random().Next(5);
+                    int tokenOffset = Random().Next(5);
+
+                    Token token = MakeToken(text, posIncr, offset + offIncr, offset + offIncr + tokenOffset);
+                    if (!actualTokens.ContainsKey(text))
+                    {
+                        actualTokens[text] = new Dictionary<int?, IList<Token>>();
+                    }
+                    IDictionary<int?, IList<Token>> postingsByDoc = actualTokens[text];
+                    if (!postingsByDoc.ContainsKey(docCount))
+                    {
+                        postingsByDoc[docCount] = new List<Token>();
+                    }
+                    postingsByDoc[docCount].Add(token);
+                    tokens.Add(token);
+                    pos += posIncr;
+                    // stuff abs position into type:
+                    token.Type = "" + pos;
+                    offset += offIncr + tokenOffset;
+                    //System.out.println("  " + token + " posIncr=" + token.getPositionIncrement() + " pos=" + pos + " off=" + token.StartOffset + "/" + token.EndOffset + " (freq=" + postingsByDoc.Get(docCount).Size() + ")");
+                }
+                doc.Add(new Field("content", new CannedTokenStream(tokens.ToArray()), ft));
+                w.AddDocument(doc);
+            }
+            DirectoryReader r = w.Reader;
+            w.Dispose();
+
+            string[] terms = new string[] { "a", "b", "c", "d" };
+            foreach (AtomicReaderContext ctx in r.Leaves)
+            {
+                // TODO: improve this
+                AtomicReader sub = (AtomicReader)ctx.Reader;
+                //System.out.println("\nsub=" + sub);
+                TermsEnum termsEnum = sub.Fields.GetTerms("content").GetIterator(null);
+                DocsEnum docs = null;
+                DocsAndPositionsEnum docsAndPositions = null;
+                DocsAndPositionsEnum docsAndPositionsAndOffsets = null;
+                FieldCache.Int32s docIDToID = FieldCache.DEFAULT.GetInt32s(sub, "id", false);
+                foreach (string term in terms)
+                {
+                    //System.out.println("  term=" + term);
+                    if (termsEnum.SeekExact(new BytesRef(term)))
+                    {
+                        docs = termsEnum.Docs(null, docs);
+                        Assert.IsNotNull(docs);
+                        int doc;
+                        //System.out.println("    doc/freq");
+                        while ((doc = docs.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS)
+                        {
+                            IList<Token> expected = actualTokens[term][docIDToID.Get(doc)];
+                            //System.out.println("      doc=" + docIDToID.Get(doc) + " docID=" + doc + " " + expected.Size() + " freq");
+                            Assert.IsNotNull(expected);
+                            Assert.AreEqual(expected.Count, docs.Freq);
+                        }
+
+                        // explicitly exclude offsets here
+                        docsAndPositions = termsEnum.DocsAndPositions(null, docsAndPositions, DocsAndPositionsEnum.FLAG_PAYLOADS);
+                        Assert.IsNotNull(docsAndPositions);
+                        //System.out.println("    doc/freq/pos");
+                        while ((doc = docsAndPositions.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS)
+                        {
+                            IList<Token> expected = actualTokens[term][docIDToID.Get(doc)];
+                            //System.out.println("      doc=" + docIDToID.Get(doc) + " " + expected.Size() + " freq");
+                            Assert.IsNotNull(expected);
+                            Assert.AreEqual(expected.Count, docsAndPositions.Freq);
+                            foreach (Token token in expected)
+                            {
+                                int pos = Convert.ToInt32(token.Type);
+                                //System.out.println("        pos=" + pos);
+                                Assert.AreEqual(pos, docsAndPositions.NextPosition());
+                            }
+                        }
+
+                        docsAndPositionsAndOffsets = termsEnum.DocsAndPositions(null, docsAndPositions);
+                        Assert.IsNotNull(docsAndPositionsAndOffsets);
+                        //System.out.println("    doc/freq/pos/offs");
+                        while ((doc = docsAndPositionsAndOffsets.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS)
+                        {
+                            IList<Token> expected = actualTokens[term][docIDToID.Get(doc)];
+                            //System.out.println("      doc=" + docIDToID.Get(doc) + " " + expected.Size() + " freq");
+                            Assert.IsNotNull(expected);
+                            Assert.AreEqual(expected.Count, docsAndPositionsAndOffsets.Freq);
+                            foreach (Token token in expected)
+                            {
+                                int pos = Convert.ToInt32(token.Type);
+                                //System.out.println("        pos=" + pos);
+                                Assert.AreEqual(pos, docsAndPositionsAndOffsets.NextPosition());
+                                Assert.AreEqual(token.StartOffset, docsAndPositionsAndOffsets.StartOffset);
+                                Assert.AreEqual(token.EndOffset, docsAndPositionsAndOffsets.EndOffset);
+                            }
+                        }
+                    }
+                }
+                // TODO: test advance:
+            }
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestWithUnindexedFields()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter riw = new RandomIndexWriter(Random(), dir, Iwc);
+            for (int i = 0; i < 100; i++)
+            {
+                Document doc = new Document();
+                // ensure at least one doc is indexed with offsets
+                if (i < 99 && Random().Next(2) == 0)
+                {
+                    // stored only
+                    FieldType ft = new FieldType();
+                    ft.IsIndexed = false;
+                    ft.IsStored = true;
+                    doc.Add(new Field("foo", "boo!", ft));
+                }
+                else
+                {
+                    FieldType ft = new FieldType(TextField.TYPE_STORED);
+                    ft.IndexOptions = IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS;
+                    if (Random().NextBoolean())
+                    {
+                        // store some term vectors for the checkindex cross-check
+                        ft.StoreTermVectors = true;
+                        ft.StoreTermVectorPositions = true;
+                        ft.StoreTermVectorOffsets = true;
+                    }
+                    doc.Add(new Field("foo", "bar", ft));
+                }
+                riw.AddDocument(doc);
+            }
+            CompositeReader ir = riw.Reader;
+            AtomicReader slow = SlowCompositeReaderWrapper.Wrap(ir);
+            FieldInfos fis = slow.FieldInfos;
+            Assert.AreEqual(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS, fis.FieldInfo("foo").IndexOptions);
+            slow.Dispose();
+            ir.Dispose();
+            riw.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestAddFieldTwice()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            Document doc = new Document();
+            FieldType customType3 = new FieldType(TextField.TYPE_STORED);
+            customType3.StoreTermVectors = true;
+            customType3.StoreTermVectorPositions = true;
+            customType3.StoreTermVectorOffsets = true;
+            customType3.IndexOptions = IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS;
+            doc.Add(new Field("content3", "here is more content with aaa aaa aaa", customType3));
+            doc.Add(new Field("content3", "here is more content with aaa aaa aaa", customType3));
+            iw.AddDocument(doc);
+            iw.Dispose();
+            dir.Dispose(); // checkindex
+        }
+
+        // NOTE: the next two tests aren't that good as we need an EvilToken...
+        [Test]
+        public virtual void TestNegativeOffsets()
+        {
+            try
+            {
+                CheckTokens(new Token[] { MakeToken("foo", 1, -1, -1) });
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException expected)
+#pragma warning restore 168
+            {
+                //expected
+            }
+        }
+
+        [Test]
+        public virtual void TestIllegalOffsets()
+        {
+            try
+            {
+                CheckTokens(new Token[] { MakeToken("foo", 1, 1, 0) });
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException expected)
+#pragma warning restore 168
+            {
+                //expected
+            }
+        }
+
+        [Test]
+        public virtual void TestBackwardsOffsets()
+        {
+            try
+            {
+                CheckTokens(new Token[] { MakeToken("foo", 1, 0, 3), MakeToken("foo", 1, 4, 7), MakeToken("foo", 0, 3, 6) });
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException expected)
+#pragma warning restore 168
+            {
+                // expected
+            }
+        }
+
+        [Test]
+        public virtual void TestStackedTokens()
+        {
+            CheckTokens(new Token[] { MakeToken("foo", 1, 0, 3), MakeToken("foo", 0, 0, 3), MakeToken("foo", 0, 0, 3) });
+        }
+
+        [Test]
+        public virtual void TestLegalbutVeryLargeOffsets()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter iw = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, null));
+            Document doc = new Document();
+            Token t1 = new Token("foo", 0, int.MaxValue - 500);
+            if (Random().NextBoolean())
+            {
+                t1.Payload = new BytesRef("test");
+            }
+            Token t2 = new Token("foo", int.MaxValue - 500, int.MaxValue);
+            TokenStream tokenStream = new CannedTokenStream(new Token[] { t1, t2 });
+            FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
+            ft.IndexOptions = IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS;
+            // store some term vectors for the checkindex cross-check
+            ft.StoreTermVectors = true;
+            ft.StoreTermVectorPositions = true;
+            ft.StoreTermVectorOffsets = true;
+            Field field = new Field("foo", tokenStream, ft);
+            doc.Add(field);
+            iw.AddDocument(doc);
+            iw.Dispose();
+            dir.Dispose();
+        }
+
+        // TODO: more tests with other possibilities
+
+        private void CheckTokens(Token[] tokens)
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter riw = new RandomIndexWriter(Random(), dir, Iwc);
+            bool success = false;
+            try
+            {
+                FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
+                ft.IndexOptions = IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS;
+                // store some term vectors for the checkindex cross-check
+                ft.StoreTermVectors = true;
+                ft.StoreTermVectorPositions = true;
+                ft.StoreTermVectorOffsets = true;
+
+                Document doc = new Document();
+                doc.Add(new Field("body", new CannedTokenStream(tokens), ft));
+                riw.AddDocument(doc);
+                success = true;
+            }
+            finally
+            {
+                if (success)
+                {
+                    IOUtils.Close(riw, dir);
+                }
+                else
+                {
+                    IOUtils.CloseWhileHandlingException(riw, dir);
+                }
+            }
+        }
+
+        private Token MakeToken(string text, int posIncr, int startOffset, int endOffset)
+        {
+            Token t = new Token();
+            t.Append(text);
+            t.PositionIncrement = posIncr;
+            t.SetOffset(startOffset, endOffset);
+            return t;
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestPrefixCodedTerms.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestPrefixCodedTerms.cs b/src/Lucene.Net.Tests/Index/TestPrefixCodedTerms.cs
new file mode 100644
index 0000000..031d5c0
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestPrefixCodedTerms.cs
@@ -0,0 +1,142 @@
+using Lucene.Net.Support;
+using System.Collections.Generic;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Util;
+    using NUnit.Framework;
+
+    /*
+             * Licensed to the Apache Software Foundation (ASF) under one or more
+             * contributor license agreements.  See the NOTICE file distributed with
+             * this work for additional information regarding copyright ownership.
+             * The ASF licenses this file to You under the Apache License, Version 2.0
+             * (the "License"); you may not use this file except in compliance with
+             * the License.  You may obtain a copy of the License at
+             *
+             *     http://www.apache.org/licenses/LICENSE-2.0
+             *
+             * Unless required by applicable law or agreed to in writing, software
+             * distributed under the License is distributed on an "AS IS" BASIS,
+             * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+             * See the License for the specific language governing permissions and
+             * limitations under the License.
+             */
+
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    //using MergedIterator = Lucene.Net.Util.MergedIterator;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    [TestFixture]
+    public class TestPrefixCodedTerms : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestEmpty()
+        {
+            PrefixCodedTerms.Builder b = new PrefixCodedTerms.Builder();
+            PrefixCodedTerms pb = b.Finish();
+            Assert.IsFalse(pb.GetEnumerator().MoveNext());
+        }
+
+        [Test]
+        public virtual void TestOne()
+        {
+            Term term = new Term("foo", "bogus");
+            PrefixCodedTerms.Builder b = new PrefixCodedTerms.Builder();
+            b.Add(term);
+            PrefixCodedTerms pb = b.Finish();
+            IEnumerator<Term> iterator = pb.GetEnumerator();
+            Assert.IsTrue(iterator.MoveNext());
+            Assert.AreEqual(term, iterator.Current);
+        }
+
+        [Test]
+        public virtual void TestRandom()
+        {
+            SortedSet<Term> terms = new SortedSet<Term>();
+            int nterms = AtLeast(10000);
+            for (int i = 0; i < nterms; i++)
+            {
+                Term term = new Term(TestUtil.RandomUnicodeString(Random(), 2), TestUtil.RandomUnicodeString(Random()));
+                terms.Add(term);
+            }
+
+            PrefixCodedTerms.Builder b = new PrefixCodedTerms.Builder();
+            foreach (Term @ref in terms)
+            {
+                b.Add(@ref);
+            }
+            PrefixCodedTerms pb = b.Finish();
+
+            IEnumerator<Term> expected = terms.GetEnumerator();
+            foreach (Term t in pb)
+            {
+                Assert.IsTrue(expected.MoveNext());
+                Assert.AreEqual(expected.Current, t);
+            }
+            Assert.IsFalse(expected.MoveNext());
+        }
+
+        [Test]
+        public virtual void TestMergeOne()
+        {
+            Term t1 = new Term("foo", "a");
+            PrefixCodedTerms.Builder b1 = new PrefixCodedTerms.Builder();
+            b1.Add(t1);
+            PrefixCodedTerms pb1 = b1.Finish();
+
+            Term t2 = new Term("foo", "b");
+            PrefixCodedTerms.Builder b2 = new PrefixCodedTerms.Builder();
+            b2.Add(t2);
+            PrefixCodedTerms pb2 = b2.Finish();
+
+            IEnumerator<Term> merged = new MergedIterator<Term>(pb1.GetEnumerator(), pb2.GetEnumerator());
+            Assert.IsTrue(merged.MoveNext());
+            Assert.AreEqual(t1, merged.Current);
+            Assert.IsTrue(merged.MoveNext());
+            Assert.AreEqual(t2, merged.Current);
+        }
+
+        [Test]
+        public virtual void TestMergeRandom()
+        {
+            PrefixCodedTerms[] pb = new PrefixCodedTerms[TestUtil.NextInt(Random(), 2, 10)];
+            SortedSet<Term> superSet = new SortedSet<Term>();
+
+            for (int i = 0; i < pb.Length; i++)
+            {
+                SortedSet<Term> terms = new SortedSet<Term>();
+                int nterms = TestUtil.NextInt(Random(), 0, 10000);
+                for (int j = 0; j < nterms; j++)
+                {
+                    Term term = new Term(TestUtil.RandomUnicodeString(Random(), 2), TestUtil.RandomUnicodeString(Random(), 4));
+                    terms.Add(term);
+                }
+                superSet.AddAll(terms);
+
+                PrefixCodedTerms.Builder b = new PrefixCodedTerms.Builder();
+                foreach (Term @ref in terms)
+                {
+                    b.Add(@ref);
+                }
+                pb[i] = b.Finish();
+            }
+
+            List<IEnumerator<Term>> subs = new List<IEnumerator<Term>>();
+            for (int i = 0; i < pb.Length; i++)
+            {
+                subs.Add(pb[i].GetEnumerator());
+            }
+
+            IEnumerator<Term> expected = superSet.GetEnumerator();
+            IEnumerator<Term> actual = new MergedIterator<Term>(subs.ToArray());
+            while (actual.MoveNext())
+            {
+                Assert.IsTrue(expected.MoveNext());
+                Assert.AreEqual(expected.Current, actual.Current);
+            }
+            Assert.IsFalse(expected.MoveNext());
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestReaderClosed.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestReaderClosed.cs b/src/Lucene.Net.Tests/Index/TestReaderClosed.cs
new file mode 100644
index 0000000..99df942
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestReaderClosed.cs
@@ -0,0 +1,118 @@
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+    using AlreadyClosedException = Lucene.Net.Store.AlreadyClosedException;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockTokenizer = Lucene.Net.Analysis.MockTokenizer;
+    using TermRangeQuery = Lucene.Net.Search.TermRangeQuery;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    [TestFixture]
+    public class TestReaderClosed : LuceneTestCase
+    {
+        private IndexReader Reader;
+        private Directory Dir;
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), Dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.KEYWORD, false)).SetMaxBufferedDocs(TestUtil.NextInt(Random(), 50, 1000)));
+
+            Document doc = new Document();
+            Field field = NewStringField("field", "", Field.Store.NO);
+            doc.Add(field);
+
+            // we generate aweful prefixes: good for testing.
+            // but for preflex codec, the test can be very slow, so use less iterations.
+            int num = AtLeast(10);
+            for (int i = 0; i < num; i++)
+            {
+                field.SetStringValue(TestUtil.RandomUnicodeString(Random(), 10));
+                writer.AddDocument(doc);
+            }
+            Reader = writer.Reader;
+            writer.Dispose();
+        }
+
+        [Test]
+        public virtual void Test()
+        {
+            Assert.IsTrue(Reader.RefCount > 0);
+            IndexSearcher searcher = NewSearcher(Reader);
+            TermRangeQuery query = TermRangeQuery.NewStringRange("field", "a", "z", true, true);
+            searcher.Search(query, 5);
+            Reader.Dispose();
+            try
+            {
+                searcher.Search(query, 5);
+            }
+#pragma warning disable 168
+            catch (AlreadyClosedException ace)
+#pragma warning restore 168
+            {
+                // expected
+            }
+        }
+
+        // LUCENE-3800
+        [Test]
+        public virtual void TestReaderChaining()
+        {
+            Assert.IsTrue(Reader.RefCount > 0);
+            IndexReader wrappedReader = SlowCompositeReaderWrapper.Wrap(Reader);
+            wrappedReader = new ParallelAtomicReader((AtomicReader)wrappedReader);
+
+            IndexSearcher searcher = NewSearcher(wrappedReader);
+            TermRangeQuery query = TermRangeQuery.NewStringRange("field", "a", "z", true, true);
+            searcher.Search(query, 5);
+            Reader.Dispose(); // close original child reader
+            try
+            {
+                searcher.Search(query, 5);
+            }
+            catch (AlreadyClosedException ace)
+            {
+                Assert.AreEqual("this IndexReader cannot be used anymore as one of its child readers was closed", ace.Message);
+            }
+            finally
+            {
+                // shutdown executor: in case of wrap-wrap-wrapping
+                searcher.IndexReader.Dispose();
+            }
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            Dir.Dispose();
+            base.TearDown();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestRollback.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestRollback.cs b/src/Lucene.Net.Tests/Index/TestRollback.cs
new file mode 100644
index 0000000..f613e47
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestRollback.cs
@@ -0,0 +1,67 @@
+using System;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+
+    [TestFixture]
+    public class TestRollback : LuceneTestCase
+    {
+        // LUCENE-2536
+        [Test]
+        public virtual void TestRollbackIntegrityWithBufferFlush()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter rw = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            for (int i = 0; i < 5; i++)
+            {
+                Document doc = new Document();
+                doc.Add(NewStringField("pk", Convert.ToString(i), Field.Store.YES));
+                rw.AddDocument(doc);
+            }
+            rw.Dispose();
+
+            // If buffer size is small enough to cause a flush, errors ensue...
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2).SetOpenMode(OpenMode.APPEND));
+
+            for (int i = 0; i < 3; i++)
+            {
+                Document doc = new Document();
+                string value = Convert.ToString(i);
+                doc.Add(NewStringField("pk", value, Field.Store.YES));
+                doc.Add(NewStringField("text", "foo", Field.Store.YES));
+                w.UpdateDocument(new Term("pk", value), doc);
+            }
+            w.Rollback();
+
+            IndexReader r = DirectoryReader.Open(dir);
+            Assert.AreEqual(5, r.NumDocs, "index should contain same number of docs post rollback");
+            r.Dispose();
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestRollingUpdates.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestRollingUpdates.cs b/src/Lucene.Net.Tests/Index/TestRollingUpdates.cs
new file mode 100644
index 0000000..8989662
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestRollingUpdates.cs
@@ -0,0 +1,285 @@
+using System;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Codecs.Memory;
+    //using MemoryPostingsFormat = Lucene.Net.Codecs.memory.MemoryPostingsFormat;
+
+    using Lucene.Net.Randomized.Generators;
+    using Lucene.Net.Store;
+    using Lucene.Net.Support;
+    using Lucene.Net.Util;
+    using NUnit.Framework;
+    using Codec = Lucene.Net.Codecs.Codec;
+    using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using TermQuery = Lucene.Net.Search.TermQuery;
+    using TopDocs = Lucene.Net.Search.TopDocs;
+
+    [TestFixture]
+    public class TestRollingUpdates : LuceneTestCase
+    {
+        // Just updates the same set of N docs over and over, to
+        // stress out deletions
+
+        [Test]
+        public virtual void TestRollingUpdates_Mem()
+        {
+            Random random = new Random(Random().Next());
+            BaseDirectoryWrapper dir = NewDirectory();
+            LineFileDocs docs = new LineFileDocs(random, DefaultCodecSupportsDocValues());
+
+            //provider.register(new MemoryCodec());
+            if ((!"Lucene3x".Equals(Codec.Default.Name)) && Random().NextBoolean())
+            {
+                Codec.Default =
+                    TestUtil.AlwaysPostingsFormat(new MemoryPostingsFormat(Random().nextBoolean(), random.NextFloat()));
+            }
+
+            MockAnalyzer analyzer = new MockAnalyzer(Random());
+            analyzer.MaxTokenLength = TestUtil.NextInt(Random(), 1, IndexWriter.MAX_TERM_LENGTH);
+
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
+            int SIZE = AtLeast(20);
+            int id = 0;
+            IndexReader r = null;
+            IndexSearcher s = null;
+            int numUpdates = (int)(SIZE * (2 + (TEST_NIGHTLY ? 200 * Random().NextDouble() : 5 * Random().NextDouble())));
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: numUpdates=" + numUpdates);
+            }
+            int updateCount = 0;
+            // TODO: sometimes update ids not in order...
+            for (int docIter = 0; docIter < numUpdates; docIter++)
+            {
+                Documents.Document doc = docs.NextDoc();
+                string myID = "" + id;
+                if (id == SIZE - 1)
+                {
+                    id = 0;
+                }
+                else
+                {
+                    id++;
+                }
+                if (VERBOSE)
+                {
+                    Console.WriteLine("  docIter=" + docIter + " id=" + id);
+                }
+                ((Field)doc.GetField("docid")).SetStringValue(myID);
+
+                Term idTerm = new Term("docid", myID);
+
+                bool doUpdate;
+                if (s != null && updateCount < SIZE)
+                {
+                    TopDocs hits = s.Search(new TermQuery(idTerm), 1);
+                    Assert.AreEqual(1, hits.TotalHits);
+                    doUpdate = !w.TryDeleteDocument(r, hits.ScoreDocs[0].Doc);
+                    if (VERBOSE)
+                    {
+                        if (doUpdate)
+                        {
+                            Console.WriteLine("  tryDeleteDocument failed");
+                        }
+                        else
+                        {
+                            Console.WriteLine("  tryDeleteDocument succeeded");
+                        }
+                    }
+                }
+                else
+                {
+                    doUpdate = true;
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("  no searcher: doUpdate=true");
+                    }
+                }
+
+                updateCount++;
+
+                if (doUpdate)
+                {
+                    w.UpdateDocument(idTerm, doc);
+                }
+                else
+                {
+                    w.AddDocument(doc);
+                }
+
+                if (docIter >= SIZE && Random().Next(50) == 17)
+                {
+                    if (r != null)
+                    {
+                        r.Dispose();
+                    }
+
+                    bool applyDeletions = Random().NextBoolean();
+
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("TEST: reopen applyDeletions=" + applyDeletions);
+                    }
+
+                    r = w.GetReader(applyDeletions);
+                    if (applyDeletions)
+                    {
+                        s = NewSearcher(r);
+                    }
+                    else
+                    {
+                        s = null;
+                    }
+                    Assert.IsTrue(!applyDeletions || r.NumDocs == SIZE, "applyDeletions=" + applyDeletions + " r.NumDocs=" + r.NumDocs + " vs SIZE=" + SIZE);
+                    updateCount = 0;
+                }
+            }
+
+            if (r != null)
+            {
+                r.Dispose();
+            }
+
+            w.Commit();
+            Assert.AreEqual(SIZE, w.NumDocs);
+
+            w.Dispose();
+
+            TestIndexWriter.AssertNoUnreferencedFiles(dir, "leftover files after rolling updates");
+
+            docs.Dispose();
+
+            // LUCENE-4455:
+            SegmentInfos infos = new SegmentInfos();
+            infos.Read(dir);
+            long totalBytes = 0;
+            foreach (SegmentCommitInfo sipc in infos.Segments)
+            {
+                totalBytes += sipc.SizeInBytes();
+            }
+            long totalBytes2 = 0;
+            foreach (string fileName in dir.ListAll())
+            {
+                if (!fileName.StartsWith(IndexFileNames.SEGMENTS))
+                {
+                    totalBytes2 += dir.FileLength(fileName);
+                }
+            }
+            Assert.AreEqual(totalBytes2, totalBytes);
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestUpdateSameDoc()
+        {
+            Directory dir = NewDirectory();
+
+            LineFileDocs docs = new LineFileDocs(Random());
+            for (int r = 0; r < 3; r++)
+            {
+                IndexWriter w = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2));
+                int numUpdates = AtLeast(20);
+                int numThreads = TestUtil.NextInt(Random(), 2, 6);
+                IndexingThread[] threads = new IndexingThread[numThreads];
+                for (int i = 0; i < numThreads; i++)
+                {
+                    threads[i] = new IndexingThread(docs, w, numUpdates, NewStringField);
+                    threads[i].Start();
+                }
+
+                for (int i = 0; i < numThreads; i++)
+                {
+                    threads[i].Join();
+                }
+
+                w.Dispose();
+            }
+
+            IndexReader open = DirectoryReader.Open(dir);
+            Assert.AreEqual(1, open.NumDocs);
+            open.Dispose();
+            docs.Dispose();
+            dir.Dispose();
+        }
+
+        internal class IndexingThread : ThreadClass
+        {
+            internal readonly LineFileDocs Docs;
+            internal readonly IndexWriter Writer;
+            internal readonly int Num;
+
+            private readonly Func<string, string, Field.Store, Field> NewStringField;
+
+            /// <param name="newStringField">
+            /// LUCENENET specific
+            /// Passed in because <see cref="LuceneTestCase.NewStringField(string, string, Field.Store)"/>
+            /// is no longer static.
+            /// </param>
+            public IndexingThread(LineFileDocs docs, IndexWriter writer, int num, Func<string, string, Field.Store, Field> newStringField)
+                : base()
+            {
+                this.Docs = docs;
+                this.Writer = writer;
+                this.Num = num;
+                NewStringField = newStringField;
+            }
+
+            public override void Run()
+            {
+                try
+                {
+                    DirectoryReader open = null;
+                    for (int i = 0; i < Num; i++)
+                    {
+                        Documents.Document doc = new Documents.Document(); // docs.NextDoc();
+                        doc.Add(NewStringField("id", "test", Field.Store.NO));
+                        Writer.UpdateDocument(new Term("id", "test"), doc);
+                        if (Random().Next(3) == 0)
+                        {
+                            if (open == null)
+                            {
+                                open = DirectoryReader.Open(Writer, true);
+                            }
+                            DirectoryReader reader = DirectoryReader.OpenIfChanged(open);
+                            if (reader != null)
+                            {
+                                open.Dispose();
+                                open = reader;
+                            }
+                            Assert.AreEqual(1, open.NumDocs, "iter: " + i + " numDocs: " + open.NumDocs + " del: " + open.NumDeletedDocs + " max: " + open.MaxDoc);
+                        }
+                    }
+                    if (open != null)
+                    {
+                        open.Dispose();
+                    }
+                }
+                catch (Exception e)
+                {
+                    throw new Exception(e.Message, e);
+                }
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestSameTokenSamePosition.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestSameTokenSamePosition.cs b/src/Lucene.Net.Tests/Index/TestSameTokenSamePosition.cs
new file mode 100644
index 0000000..ca9637a
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestSameTokenSamePosition.cs
@@ -0,0 +1,110 @@
+using Lucene.Net.Analysis.TokenAttributes;
+using Lucene.Net.Documents;
+using NUnit.Framework;
+
+namespace Lucene.Net.Index
+{
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using TextField = TextField;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using TokenStream = Lucene.Net.Analysis.TokenStream;
+
+    [TestFixture]
+    public class TestSameTokenSamePosition : LuceneTestCase
+    {
+        /// <summary>
+        /// Attempt to reproduce an assertion error that happens
+        /// only with the trunk version around April 2011.
+        /// </summary>
+        [Test]
+        public virtual void Test()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter riw = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            Document doc = new Document();
+            doc.Add(new TextField("eng", new BugReproTokenStream()));
+            riw.AddDocument(doc);
+            riw.Dispose();
+            dir.Dispose();
+        }
+
+        /// <summary>
+        /// Same as the above, but with more docs
+        /// </summary>
+        [Test]
+        public virtual void TestMoreDocs()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter riw = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            for (int i = 0; i < 100; i++)
+            {
+                Document doc = new Document();
+                doc.Add(new TextField("eng", new BugReproTokenStream()));
+                riw.AddDocument(doc);
+            }
+            riw.Dispose();
+            dir.Dispose();
+        }
+    }
+
+    internal sealed class BugReproTokenStream : TokenStream
+    {
+        private readonly ICharTermAttribute TermAtt;
+        private readonly IOffsetAttribute OffsetAtt;
+        private readonly IPositionIncrementAttribute PosIncAtt;
+        private readonly int TokenCount = 4;
+        private int NextTokenIndex = 0;
+        private readonly string[] Terms = new string[] { "six", "six", "drunken", "drunken" };
+        private readonly int[] Starts = new int[] { 0, 0, 4, 4 };
+        private readonly int[] Ends = new int[] { 3, 3, 11, 11 };
+        private readonly int[] Incs = new int[] { 1, 0, 1, 0 };
+
+        public BugReproTokenStream()
+        {
+            TermAtt = AddAttribute<ICharTermAttribute>();
+            OffsetAtt = AddAttribute<IOffsetAttribute>();
+            PosIncAtt = AddAttribute<IPositionIncrementAttribute>();
+        }
+
+        public override bool IncrementToken()
+        {
+            if (NextTokenIndex < TokenCount)
+            {
+                TermAtt.SetEmpty().Append(Terms[NextTokenIndex]);
+                OffsetAtt.SetOffset(Starts[NextTokenIndex], Ends[NextTokenIndex]);
+                PosIncAtt.PositionIncrement = Incs[NextTokenIndex];
+                NextTokenIndex++;
+                return true;
+            }
+            else
+            {
+                return false;
+            }
+        }
+
+        public override void Reset()
+        {
+            base.Reset();
+            this.NextTokenIndex = 0;
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestSegmentMerger.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestSegmentMerger.cs b/src/Lucene.Net.Tests/Index/TestSegmentMerger.cs
new file mode 100644
index 0000000..30786b5
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestSegmentMerger.cs
@@ -0,0 +1,207 @@
+using Lucene.Net.Support;
+
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Codec = Lucene.Net.Codecs.Codec;
+    using Constants = Lucene.Net.Util.Constants;
+    using Directory = Lucene.Net.Store.Directory;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using Document = Documents.Document;
+    using FixedBitSet = Lucene.Net.Util.FixedBitSet;
+    using InfoStream = Lucene.Net.Util.InfoStream;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    [TestFixture]
+    public class TestSegmentMerger : LuceneTestCase
+    {
+        //The variables for the new merged segment
+        private Directory MergedDir;
+
+        private string MergedSegment = "test";
+
+        //First segment to be merged
+        private Directory Merge1Dir;
+
+        private Document Doc1;
+        private SegmentReader Reader1;
+
+        //Second Segment to be merged
+        private Directory Merge2Dir;
+
+        private Document Doc2;
+        private SegmentReader Reader2;
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            this.Doc1 = new Document();
+            this.Doc2 = new Document();
+            MergedDir = NewDirectory();
+            Merge1Dir = NewDirectory();
+            Merge2Dir = NewDirectory();
+            DocHelper.SetupDoc(Doc1);
+            SegmentCommitInfo info1 = DocHelper.WriteDoc(Random(), Merge1Dir, Doc1);
+            DocHelper.SetupDoc(Doc2);
+            SegmentCommitInfo info2 = DocHelper.WriteDoc(Random(), Merge2Dir, Doc2);
+            Reader1 = new SegmentReader(info1, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, NewIOContext(Random()));
+            Reader2 = new SegmentReader(info2, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, NewIOContext(Random()));
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            Reader1.Dispose();
+            Reader2.Dispose();
+            MergedDir.Dispose();
+            Merge1Dir.Dispose();
+            Merge2Dir.Dispose();
+            base.TearDown();
+        }
+
+        [Test]
+        public virtual void Test()
+        {
+            Assert.IsTrue(MergedDir != null);
+            Assert.IsTrue(Merge1Dir != null);
+            Assert.IsTrue(Merge2Dir != null);
+            Assert.IsTrue(Reader1 != null);
+            Assert.IsTrue(Reader2 != null);
+        }
+
+        [Test]
+        public virtual void TestMerge()
+        {
+            Codec codec = Codec.Default;
+            SegmentInfo si = new SegmentInfo(MergedDir, Constants.LUCENE_MAIN_VERSION, MergedSegment, -1, false, codec, null);
+
+            SegmentMerger merger = new SegmentMerger(Arrays.AsList<AtomicReader>(Reader1, Reader2), si, InfoStream.Default, MergedDir, IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL, CheckAbort.NONE, new FieldInfos.FieldNumbers(), NewIOContext(Random()), true);
+            MergeState mergeState = merger.Merge();
+            int docsMerged = mergeState.SegmentInfo.DocCount;
+            Assert.IsTrue(docsMerged == 2);
+            //Should be able to open a new SegmentReader against the new directory
+            SegmentReader mergedReader = new SegmentReader(new SegmentCommitInfo(new SegmentInfo(MergedDir, Constants.LUCENE_MAIN_VERSION, MergedSegment, docsMerged, false, codec, null), 0, -1L, -1L), DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, NewIOContext(Random()));
+            Assert.IsTrue(mergedReader != null);
+            Assert.IsTrue(mergedReader.NumDocs == 2);
+            Document newDoc1 = mergedReader.Document(0);
+            Assert.IsTrue(newDoc1 != null);
+            //There are 2 unstored fields on the document
+            Assert.IsTrue(DocHelper.NumFields(newDoc1) == DocHelper.NumFields(Doc1) - DocHelper.Unstored.Count);
+            Document newDoc2 = mergedReader.Document(1);
+            Assert.IsTrue(newDoc2 != null);
+            Assert.IsTrue(DocHelper.NumFields(newDoc2) == DocHelper.NumFields(Doc2) - DocHelper.Unstored.Count);
+
+            DocsEnum termDocs = TestUtil.Docs(Random(), mergedReader, DocHelper.TEXT_FIELD_2_KEY, new BytesRef("field"), MultiFields.GetLiveDocs(mergedReader), null, 0);
+            Assert.IsTrue(termDocs != null);
+            Assert.IsTrue(termDocs.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+
+            int tvCount = 0;
+            foreach (FieldInfo fieldInfo in mergedReader.FieldInfos)
+            {
+                if (fieldInfo.HasVectors)
+                {
+                    tvCount++;
+                }
+            }
+
+            //System.out.println("stored size: " + stored.Size());
+            Assert.AreEqual(3, tvCount, "We do not have 3 fields that were indexed with term vector");
+
+            Terms vector = mergedReader.GetTermVectors(0).GetTerms(DocHelper.TEXT_FIELD_2_KEY);
+            Assert.IsNotNull(vector);
+            Assert.AreEqual(3, vector.Count);
+            TermsEnum termsEnum = vector.GetIterator(null);
+
+            int i = 0;
+            while (termsEnum.Next() != null)
+            {
+                string term = termsEnum.Term.Utf8ToString();
+                int freq = (int)termsEnum.TotalTermFreq;
+                //System.out.println("Term: " + term + " Freq: " + freq);
+                Assert.IsTrue(DocHelper.FIELD_2_TEXT.IndexOf(term) != -1);
+                Assert.IsTrue(DocHelper.FIELD_2_FREQS[i] == freq);
+                i++;
+            }
+
+            TestSegmentReader.CheckNorms(mergedReader);
+            mergedReader.Dispose();
+        }
+
+        private static bool Equals(MergeState.DocMap map1, MergeState.DocMap map2)
+        {
+            if (map1.MaxDoc != map2.MaxDoc)
+            {
+                return false;
+            }
+            for (int i = 0; i < map1.MaxDoc; ++i)
+            {
+                if (map1.Get(i) != map2.Get(i))
+                {
+                    return false;
+                }
+            }
+            return true;
+        }
+
+        [Test]
+        public virtual void TestBuildDocMap()
+        {
+            int maxDoc = TestUtil.NextInt(Random(), 1, 128);
+            int numDocs = TestUtil.NextInt(Random(), 0, maxDoc);
+            int numDeletedDocs = maxDoc - numDocs;
+            FixedBitSet liveDocs = new FixedBitSet(maxDoc);
+            for (int i = 0; i < numDocs; ++i)
+            {
+                while (true)
+                {
+                    int docID = Random().Next(maxDoc);
+                    if (!liveDocs.Get(docID))
+                    {
+                        liveDocs.Set(docID);
+                        break;
+                    }
+                }
+            }
+
+            MergeState.DocMap docMap = MergeState.DocMap.Build(maxDoc, liveDocs);
+
+            Assert.AreEqual(maxDoc, docMap.MaxDoc);
+            Assert.AreEqual(numDocs, docMap.NumDocs);
+            Assert.AreEqual(numDeletedDocs, docMap.NumDeletedDocs);
+            // assert the mapping is compact
+            for (int i = 0, del = 0; i < maxDoc; ++i)
+            {
+                if (!liveDocs.Get(i))
+                {
+                    Assert.AreEqual(-1, docMap.Get(i));
+                    ++del;
+                }
+                else
+                {
+                    Assert.AreEqual(i - del, docMap.Get(i));
+                }
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestSegmentReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestSegmentReader.cs b/src/Lucene.Net.Tests/Index/TestSegmentReader.cs
new file mode 100644
index 0000000..b98287d
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestSegmentReader.cs
@@ -0,0 +1,277 @@
+using System;
+using System.Collections.Generic;
+
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Document = Documents.Document;
+    using IOContext = Lucene.Net.Store.IOContext;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    [TestFixture]
+    public class TestSegmentReader : LuceneTestCase
+    {
+        private Directory Dir;
+        private Document TestDoc;
+        private SegmentReader Reader;
+
+        //TODO: Setup the reader w/ multiple documents
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Dir = NewDirectory();
+            TestDoc = new Document();
+            DocHelper.SetupDoc(TestDoc);
+            SegmentCommitInfo info = DocHelper.WriteDoc(Random(), Dir, TestDoc);
+            Reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, IOContext.READ);
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            Reader.Dispose();
+            Dir.Dispose();
+            base.TearDown();
+        }
+
+        [Test]
+        public virtual void Test()
+        {
+            Assert.IsTrue(Dir != null);
+            Assert.IsTrue(Reader != null);
+            Assert.IsTrue(DocHelper.NameValues.Count > 0);
+            Assert.IsTrue(DocHelper.NumFields(TestDoc) == DocHelper.All.Count);
+        }
+
+        [Test]
+        public virtual void TestDocument()
+        {
+            Assert.IsTrue(Reader.NumDocs == 1);
+            Assert.IsTrue(Reader.MaxDoc >= 1);
+            Document result = Reader.Document(0);
+            Assert.IsTrue(result != null);
+            //There are 2 unstored fields on the document that are not preserved across writing
+            Assert.IsTrue(DocHelper.NumFields(result) == DocHelper.NumFields(TestDoc) - DocHelper.Unstored.Count);
+
+            IList<IIndexableField> fields = result.Fields;
+            foreach (IIndexableField field in fields)
+            {
+                Assert.IsTrue(field != null);
+                Assert.IsTrue(DocHelper.NameValues.ContainsKey(field.Name));
+            }
+        }
+
+        [Test]
+        public virtual void TestGetFieldNameVariations()
+        {
+            ICollection<string> allFieldNames = new HashSet<string>();
+            ICollection<string> indexedFieldNames = new HashSet<string>();
+            ICollection<string> notIndexedFieldNames = new HashSet<string>();
+            ICollection<string> tvFieldNames = new HashSet<string>();
+            ICollection<string> noTVFieldNames = new HashSet<string>();
+
+            foreach (FieldInfo fieldInfo in Reader.FieldInfos)
+            {
+                string name = fieldInfo.Name;
+                allFieldNames.Add(name);
+                if (fieldInfo.IsIndexed)
+                {
+                    indexedFieldNames.Add(name);
+                }
+                else
+                {
+                    notIndexedFieldNames.Add(name);
+                }
+                if (fieldInfo.HasVectors)
+                {
+                    tvFieldNames.Add(name);
+                }
+                else if (fieldInfo.IsIndexed)
+                {
+                    noTVFieldNames.Add(name);
+                }
+            }
+
+            Assert.IsTrue(allFieldNames.Count == DocHelper.All.Count);
+            foreach (string s in allFieldNames)
+            {
+                Assert.IsTrue(DocHelper.NameValues.ContainsKey(s) == true || s.Equals(""));
+            }
+
+            Assert.IsTrue(indexedFieldNames.Count == DocHelper.Indexed.Count);
+            foreach (string s in indexedFieldNames)
+            {
+                Assert.IsTrue(DocHelper.Indexed.ContainsKey(s) == true || s.Equals(""));
+            }
+
+            Assert.IsTrue(notIndexedFieldNames.Count == DocHelper.Unindexed.Count);
+            //Get all indexed fields that are storing term vectors
+            Assert.IsTrue(tvFieldNames.Count == DocHelper.Termvector.Count);
+
+            Assert.IsTrue(noTVFieldNames.Count == DocHelper.Notermvector.Count);
+        }
+
+        [Test]
+        public virtual void TestTerms()
+        {
+            Fields fields = MultiFields.GetFields(Reader);
+            foreach (string field in fields)
+            {
+                Terms terms = fields.GetTerms(field);
+                Assert.IsNotNull(terms);
+                TermsEnum termsEnum = terms.GetIterator(null);
+                while (termsEnum.Next() != null)
+                {
+                    BytesRef term = termsEnum.Term;
+                    Assert.IsTrue(term != null);
+                    string fieldValue = (string)DocHelper.NameValues[field];
+                    Assert.IsTrue(fieldValue.IndexOf(term.Utf8ToString()) != -1);
+                }
+            }
+
+            DocsEnum termDocs = TestUtil.Docs(Random(), Reader, DocHelper.TEXT_FIELD_1_KEY, new BytesRef("field"), MultiFields.GetLiveDocs(Reader), null, 0);
+            Assert.IsTrue(termDocs.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+
+            termDocs = TestUtil.Docs(Random(), Reader, DocHelper.NO_NORMS_KEY, new BytesRef(DocHelper.NO_NORMS_TEXT), MultiFields.GetLiveDocs(Reader), null, 0);
+
+            Assert.IsTrue(termDocs.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+
+            DocsAndPositionsEnum positions = MultiFields.GetTermPositionsEnum(Reader, MultiFields.GetLiveDocs(Reader), DocHelper.TEXT_FIELD_1_KEY, new BytesRef("field"));
+            // NOTE: prior rev of this test was failing to first
+            // call next here:
+            Assert.IsTrue(positions.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.IsTrue(positions.DocID == 0);
+            Assert.IsTrue(positions.NextPosition() >= 0);
+        }
+
+        [Test]
+        public virtual void TestNorms()
+        {
+            //TODO: Not sure how these work/should be tested
+            /*
+                try {
+                  byte [] norms = reader.norms(DocHelper.TEXT_FIELD_1_KEY);
+                  System.out.println("Norms: " + norms);
+                  Assert.IsTrue(norms != null);
+                } catch (IOException e) {
+                  e.printStackTrace();
+                  Assert.IsTrue(false);
+                }
+            */
+
+            CheckNorms(Reader);
+        }
+
+        public static void CheckNorms(AtomicReader reader)
+        {
+            // test omit norms
+            for (int i = 0; i < DocHelper.Fields.Length; i++)
+            {
+                IIndexableField f = DocHelper.Fields[i];
+                if (f.FieldType.IsIndexed)
+                {
+                    Assert.AreEqual(reader.GetNormValues(f.Name) != null, !f.FieldType.OmitNorms);
+                    Assert.AreEqual(reader.GetNormValues(f.Name) != null, !DocHelper.NoNorms.ContainsKey(f.Name));
+                    if (reader.GetNormValues(f.Name) == null)
+                    {
+                        // test for norms of null
+                        NumericDocValues norms = MultiDocValues.GetNormValues(reader, f.Name);
+                        Assert.IsNull(norms);
+                    }
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestTermVectors()
+        {
+            Terms result = Reader.GetTermVectors(0).GetTerms(DocHelper.TEXT_FIELD_2_KEY);
+            Assert.IsNotNull(result);
+            Assert.AreEqual(3, result.Count);
+            TermsEnum termsEnum = result.GetIterator(null);
+            while (termsEnum.Next() != null)
+            {
+                string term = termsEnum.Term.Utf8ToString();
+                int freq = (int)termsEnum.TotalTermFreq;
+                Assert.IsTrue(DocHelper.FIELD_2_TEXT.IndexOf(term) != -1);
+                Assert.IsTrue(freq > 0);
+            }
+
+            Fields results = Reader.GetTermVectors(0);
+            Assert.IsTrue(results != null);
+            Assert.AreEqual(3, results.Count, "We do not have 3 term freq vectors");
+        }
+
+        [Test]
+        public virtual void TestOutOfBoundsAccess()
+        {
+            int numDocs = Reader.MaxDoc;
+            try
+            {
+                Reader.Document(-1);
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (System.IndexOutOfRangeException expected)
+#pragma warning restore 168
+            {
+            }
+
+            try
+            {
+                Reader.GetTermVectors(-1);
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (System.IndexOutOfRangeException expected)
+#pragma warning restore 168
+            {
+            }
+
+            try
+            {
+                Reader.Document(numDocs);
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (System.IndexOutOfRangeException expected)
+#pragma warning restore 168
+            {
+            }
+
+            try
+            {
+                Reader.GetTermVectors(numDocs);
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (System.IndexOutOfRangeException expected)
+#pragma warning restore 168
+            {
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestSegmentTermDocs.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestSegmentTermDocs.cs b/src/Lucene.Net.Tests/Index/TestSegmentTermDocs.cs
new file mode 100644
index 0000000..f876774
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestSegmentTermDocs.cs
@@ -0,0 +1,274 @@
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using Document = Documents.Document;
+    using Field = Field;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    [TestFixture]
+    public class TestSegmentTermDocs : LuceneTestCase
+    {
+        private Document TestDoc;
+        private Directory Dir;
+        private SegmentCommitInfo Info;
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            TestDoc = new Document();
+            Dir = NewDirectory();
+            DocHelper.SetupDoc(TestDoc);
+            Info = DocHelper.WriteDoc(Random(), Dir, TestDoc);
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            Dir.Dispose();
+            base.TearDown();
+        }
+
+        [Test]
+        public virtual void Test()
+        {
+            Assert.IsTrue(Dir != null);
+        }
+
+        [Test]
+        public virtual void TestTermDocs()
+        {
+            TestTermDocs(1);
+        }
+
+        public virtual void TestTermDocs(int indexDivisor)
+        {
+            //After adding the document, we should be able to read it back in
+            SegmentReader reader = new SegmentReader(Info, indexDivisor, NewIOContext(Random()));
+            Assert.IsTrue(reader != null);
+            Assert.AreEqual(indexDivisor, reader.TermInfosIndexDivisor);
+
+            TermsEnum terms = reader.Fields.GetTerms(DocHelper.TEXT_FIELD_2_KEY).GetIterator(null);
+            terms.SeekCeil(new BytesRef("field"));
+            DocsEnum termDocs = TestUtil.Docs(Random(), terms, reader.LiveDocs, null, DocsEnum.FLAG_FREQS);
+            if (termDocs.NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
+            {
+                int docId = termDocs.DocID;
+                Assert.IsTrue(docId == 0);
+                int freq = termDocs.Freq;
+                Assert.IsTrue(freq == 3);
+            }
+            reader.Dispose();
+        }
+
+        [Test]
+        public virtual void TestBadSeek()
+        {
+            TestBadSeek(1);
+        }
+
+        public virtual void TestBadSeek(int indexDivisor)
+        {
+            {
+                //After adding the document, we should be able to read it back in
+                SegmentReader reader = new SegmentReader(Info, indexDivisor, NewIOContext(Random()));
+                Assert.IsTrue(reader != null);
+                DocsEnum termDocs = TestUtil.Docs(Random(), reader, "textField2", new BytesRef("bad"), reader.LiveDocs, null, 0);
+
+                Assert.IsNull(termDocs);
+                reader.Dispose();
+            }
+            {
+                //After adding the document, we should be able to read it back in
+                SegmentReader reader = new SegmentReader(Info, indexDivisor, NewIOContext(Random()));
+                Assert.IsTrue(reader != null);
+                DocsEnum termDocs = TestUtil.Docs(Random(), reader, "junk", new BytesRef("bad"), reader.LiveDocs, null, 0);
+                Assert.IsNull(termDocs);
+                reader.Dispose();
+            }
+        }
+
+        [Test]
+        public virtual void TestSkipTo()
+        {
+            TestSkipTo(1);
+        }
+
+        public virtual void TestSkipTo(int indexDivisor)
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()));
+
+            Term ta = new Term("content", "aaa");
+            for (int i = 0; i < 10; i++)
+            {
+                AddDoc(writer, "aaa aaa aaa aaa");
+            }
+
+            Term tb = new Term("content", "bbb");
+            for (int i = 0; i < 16; i++)
+            {
+                AddDoc(writer, "bbb bbb bbb bbb");
+            }
+
+            Term tc = new Term("content", "ccc");
+            for (int i = 0; i < 50; i++)
+            {
+                AddDoc(writer, "ccc ccc ccc ccc");
+            }
+
+            // assure that we deal with a single segment
+            writer.ForceMerge(1);
+            writer.Dispose();
+
+            IndexReader reader = DirectoryReader.Open(dir, indexDivisor);
+
+            DocsEnum tdocs = TestUtil.Docs(Random(), reader, ta.Field, new BytesRef(ta.Text()), MultiFields.GetLiveDocs(reader), null, DocsEnum.FLAG_FREQS);
+
+            // without optimization (assumption skipInterval == 16)
+
+            // with next
+            Assert.IsTrue(tdocs.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(0, tdocs.DocID);
+            Assert.AreEqual(4, tdocs.Freq);
+            Assert.IsTrue(tdocs.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(1, tdocs.DocID);
+            Assert.AreEqual(4, tdocs.Freq);
+            Assert.IsTrue(tdocs.Advance(2) != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(2, tdocs.DocID);
+            Assert.IsTrue(tdocs.Advance(4) != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(4, tdocs.DocID);
+            Assert.IsTrue(tdocs.Advance(9) != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(9, tdocs.DocID);
+            Assert.IsFalse(tdocs.Advance(10) != DocIdSetIterator.NO_MORE_DOCS);
+
+            // without next
+            tdocs = TestUtil.Docs(Random(), reader, ta.Field, new BytesRef(ta.Text()), MultiFields.GetLiveDocs(reader), null, 0);
+
+            Assert.IsTrue(tdocs.Advance(0) != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(0, tdocs.DocID);
+            Assert.IsTrue(tdocs.Advance(4) != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(4, tdocs.DocID);
+            Assert.IsTrue(tdocs.Advance(9) != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(9, tdocs.DocID);
+            Assert.IsFalse(tdocs.Advance(10) != DocIdSetIterator.NO_MORE_DOCS);
+
+            // exactly skipInterval documents and therefore with optimization
+
+            // with next
+            tdocs = TestUtil.Docs(Random(), reader, tb.Field, new BytesRef(tb.Text()), MultiFields.GetLiveDocs(reader), null, DocsEnum.FLAG_FREQS);
+
+            Assert.IsTrue(tdocs.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(10, tdocs.DocID);
+            Assert.AreEqual(4, tdocs.Freq);
+            Assert.IsTrue(tdocs.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(11, tdocs.DocID);
+            Assert.AreEqual(4, tdocs.Freq);
+            Assert.IsTrue(tdocs.Advance(12) != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(12, tdocs.DocID);
+            Assert.IsTrue(tdocs.Advance(15) != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(15, tdocs.DocID);
+            Assert.IsTrue(tdocs.Advance(24) != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(24, tdocs.DocID);
+            Assert.IsTrue(tdocs.Advance(25) != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(25, tdocs.DocID);
+            Assert.IsFalse(tdocs.Advance(26) != DocIdSetIterator.NO_MORE_DOCS);
+
+            // without next
+            tdocs = TestUtil.Docs(Random(), reader, tb.Field, new BytesRef(tb.Text()), MultiFields.GetLiveDocs(reader), null, DocsEnum.FLAG_FREQS);
+
+            Assert.IsTrue(tdocs.Advance(5) != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(10, tdocs.DocID);
+            Assert.IsTrue(tdocs.Advance(15) != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(15, tdocs.DocID);
+            Assert.IsTrue(tdocs.Advance(24) != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(24, tdocs.DocID);
+            Assert.IsTrue(tdocs.Advance(25) != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(25, tdocs.DocID);
+            Assert.IsFalse(tdocs.Advance(26) != DocIdSetIterator.NO_MORE_DOCS);
+
+            // much more than skipInterval documents and therefore with optimization
+
+            // with next
+            tdocs = TestUtil.Docs(Random(), reader, tc.Field, new BytesRef(tc.Text()), MultiFields.GetLiveDocs(reader), null, DocsEnum.FLAG_FREQS);
+
+            Assert.IsTrue(tdocs.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(26, tdocs.DocID);
+            Assert.AreEqual(4, tdocs.Freq);
+            Assert.IsTrue(tdocs.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(27, tdocs.DocID);
+            Assert.AreEqual(4, tdocs.Freq);
+            Assert.IsTrue(tdocs.Advance(28) != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(28, tdocs.DocID);
+            Assert.IsTrue(tdocs.Advance(40) != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(40, tdocs.DocID);
+            Assert.IsTrue(tdocs.Advance(57) != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(57, tdocs.DocID);
+            Assert.IsTrue(tdocs.Advance(74) != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(74, tdocs.DocID);
+            Assert.IsTrue(tdocs.Advance(75) != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(75, tdocs.DocID);
+            Assert.IsFalse(tdocs.Advance(76) != DocIdSetIterator.NO_MORE_DOCS);
+
+            //without next
+            tdocs = TestUtil.Docs(Random(), reader, tc.Field, new BytesRef(tc.Text()), MultiFields.GetLiveDocs(reader), null, 0);
+            Assert.IsTrue(tdocs.Advance(5) != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(26, tdocs.DocID);
+            Assert.IsTrue(tdocs.Advance(40) != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(40, tdocs.DocID);
+            Assert.IsTrue(tdocs.Advance(57) != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(57, tdocs.DocID);
+            Assert.IsTrue(tdocs.Advance(74) != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(74, tdocs.DocID);
+            Assert.IsTrue(tdocs.Advance(75) != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(75, tdocs.DocID);
+            Assert.IsFalse(tdocs.Advance(76) != DocIdSetIterator.NO_MORE_DOCS);
+
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestIndexDivisor()
+        {
+            TestDoc = new Document();
+            DocHelper.SetupDoc(TestDoc);
+            DocHelper.WriteDoc(Random(), Dir, TestDoc);
+            TestTermDocs(2);
+            TestBadSeek(2);
+            TestSkipTo(2);
+        }
+
+        private void AddDoc(IndexWriter writer, string value)
+        {
+            Document doc = new Document();
+            doc.Add(NewTextField("content", value, Field.Store.NO));
+            writer.AddDocument(doc);
+        }
+    }
+}
\ No newline at end of file


[13/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/JustCompileSearch.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/JustCompileSearch.cs b/src/Lucene.Net.Tests/Search/JustCompileSearch.cs
new file mode 100644
index 0000000..272d338
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/JustCompileSearch.cs
@@ -0,0 +1,358 @@
+using System;
+
+namespace Lucene.Net.Search
+{
+    using Lucene.Net.Util;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
+    using IBits = Lucene.Net.Util.IBits;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using FieldInvertState = Lucene.Net.Index.FieldInvertState;
+    using Similarity = Lucene.Net.Search.Similarities.Similarity;
+    using Terms = Lucene.Net.Index.Terms;
+    using TermsEnum = Lucene.Net.Index.TermsEnum;
+
+    /// <summary>
+    /// Holds all implementations of classes in the o.a.l.search package as a
+    /// back-compatibility test. It does not run any tests per-se, however if
+    /// someone adds a method to an interface or abstract method to an abstract
+    /// class, one of the implementations here will fail to compile and so we know
+    /// back-compat policy was violated.
+    /// </summary>
+    internal sealed class JustCompileSearch
+    {
+        private const string UNSUPPORTED_MSG = "unsupported: used for back-compat testing only !";
+
+        internal sealed class JustCompileCollector : ICollector
+        {
+            public void Collect(int doc)
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+
+            public void SetNextReader(AtomicReaderContext context)
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+
+            public void SetScorer(Scorer scorer)
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+
+            public bool AcceptsDocsOutOfOrder
+            {
+                get { throw new System.NotSupportedException(UNSUPPORTED_MSG); }
+            }
+        }
+
+        internal sealed class JustCompileDocIdSet : DocIdSet
+        {
+            public override DocIdSetIterator GetIterator()
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+        }
+
+        internal sealed class JustCompileDocIdSetIterator : DocIdSetIterator
+        {
+            public override int DocID
+            {
+                get { throw new System.NotSupportedException(UNSUPPORTED_MSG); }
+            }
+
+            public override int NextDoc()
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+
+            public override int Advance(int target)
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+
+            public override long GetCost()
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+        }
+
+        internal sealed class JustCompileExtendedFieldCacheLongParser : FieldCache.IInt64Parser
+        {
+            /// <summary>
+            /// NOTE: This was parseLong() in Lucene
+            /// </summary>
+            public long ParseInt64(BytesRef @string)
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+
+            public TermsEnum TermsEnum(Terms terms)
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+        }
+
+        internal sealed class JustCompileExtendedFieldCacheDoubleParser : FieldCache.IDoubleParser
+        {
+            public double ParseDouble(BytesRef term)
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+
+            public TermsEnum TermsEnum(Terms terms)
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+        }
+
+        internal sealed class JustCompileFieldComparer : FieldComparer<object>
+        {
+            public override int Compare(int slot1, int slot2)
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+
+            public override int CompareBottom(int doc)
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+
+            public override void Copy(int slot, int doc)
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+
+            public override void SetBottom(int slot)
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+
+            public override void SetTopValue(object value)
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+
+            public override FieldComparer SetNextReader(AtomicReaderContext context)
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+
+            // LUCENENET NOTE: This was value(int) in Lucene.
+            public override IComparable this[int slot]
+            {
+                get { throw new System.NotSupportedException(UNSUPPORTED_MSG); }
+            }
+
+            public override int CompareTop(int doc)
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+        }
+
+        internal sealed class JustCompileFieldComparerSource : FieldComparerSource
+        {
+            public override FieldComparer NewComparer(string fieldname, int numHits, int sortPos, bool reversed)
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+        }
+
+        internal sealed class JustCompileFilter : Filter
+        {
+            // Filter is just an abstract class with no abstract methods. However it is
+            // still added here in case someone will add abstract methods in the future.
+
+            public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs)
+            {
+                return null;
+            }
+        }
+
+        internal sealed class JustCompileFilteredDocIdSet : FilteredDocIdSet
+        {
+            public JustCompileFilteredDocIdSet(DocIdSet innerSet)
+                : base(innerSet)
+            {
+            }
+
+            protected override bool Match(int docid)
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+        }
+
+        internal sealed class JustCompileFilteredDocIdSetIterator : FilteredDocIdSetIterator
+        {
+            public JustCompileFilteredDocIdSetIterator(DocIdSetIterator innerIter)
+                : base(innerIter)
+            {
+            }
+
+            protected override bool Match(int doc)
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+
+            public override long GetCost()
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+        }
+
+        internal sealed class JustCompileQuery : Query
+        {
+            public override string ToString(string field)
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+        }
+
+        internal sealed class JustCompileScorer : Scorer
+        {
+            internal JustCompileScorer(Weight weight)
+                : base(weight)
+            {
+            }
+
+            public override float GetScore()
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+
+            public override int Freq
+            {
+                get { throw new System.NotSupportedException(UNSUPPORTED_MSG); }
+            }
+
+            public override int DocID
+            {
+                get { throw new System.NotSupportedException(UNSUPPORTED_MSG); }
+            }
+
+            public override int NextDoc()
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+
+            public override int Advance(int target)
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+
+            public override long GetCost()
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+        }
+
+        internal sealed class JustCompileSimilarity : Similarity
+        {
+            public override SimWeight ComputeWeight(float queryBoost, CollectionStatistics collectionStats, params TermStatistics[] termStats)
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+
+            public override SimScorer GetSimScorer(SimWeight stats, AtomicReaderContext context)
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+
+            public override long ComputeNorm(FieldInvertState state)
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+        }
+
+        internal sealed class JustCompileTopDocsCollector : TopDocsCollector<ScoreDoc>
+        {
+            internal JustCompileTopDocsCollector(PriorityQueue<ScoreDoc> pq)
+                : base(pq)
+            {
+            }
+
+            public override void Collect(int doc)
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+
+            public override void SetNextReader(AtomicReaderContext context)
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+
+            public override void SetScorer(Scorer scorer)
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+
+            public override bool AcceptsDocsOutOfOrder
+            {
+                get { throw new System.NotSupportedException(UNSUPPORTED_MSG); }
+            }
+
+            public override TopDocs GetTopDocs()
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+
+            public override TopDocs GetTopDocs(int start)
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+
+            public override TopDocs GetTopDocs(int start, int end)
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+        }
+
+        internal sealed class JustCompileWeight : Weight
+        {
+            public override Explanation Explain(AtomicReaderContext context, int doc)
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+
+            public override Query Query
+            {
+                get
+                {
+                    throw new System.NotSupportedException(UNSUPPORTED_MSG);
+                }
+            }
+
+            public override void Normalize(float norm, float topLevelBoost)
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+
+            public override float GetValueForNormalization()
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+
+            public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs)
+            {
+                throw new System.NotSupportedException(UNSUPPORTED_MSG);
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/MockFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/MockFilter.cs b/src/Lucene.Net.Tests/Search/MockFilter.cs
new file mode 100644
index 0000000..19daade
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/MockFilter.cs
@@ -0,0 +1,44 @@
+namespace Lucene.Net.Search
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
+    using IBits = Lucene.Net.Util.IBits;
+    using FixedBitSet = Lucene.Net.Util.FixedBitSet;
+
+    public class MockFilter : Filter
+    {
+        private bool WasCalled_Renamed;
+
+        public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs)
+        {
+            WasCalled_Renamed = true;
+            return new FixedBitSet(context.Reader.MaxDoc);
+        }
+
+        public virtual void Clear()
+        {
+            WasCalled_Renamed = false;
+        }
+
+        public virtual bool WasCalled()
+        {
+            return WasCalled_Renamed;
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/MultiCollectorTest.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/MultiCollectorTest.cs b/src/Lucene.Net.Tests/Search/MultiCollectorTest.cs
new file mode 100644
index 0000000..5ed25ad
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/MultiCollectorTest.cs
@@ -0,0 +1,118 @@
+namespace Lucene.Net.Search
+{
+    using NUnit.Framework;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    [TestFixture]
+    public class MultiCollectorTest : LuceneTestCase
+    {
+        private class DummyCollector : ICollector
+        {
+            internal bool AcceptsDocsOutOfOrderCalled = false;
+            internal bool CollectCalled = false;
+            internal bool SetNextReaderCalled = false;
+            internal bool SetScorerCalled = false;
+
+            public virtual bool AcceptsDocsOutOfOrder
+            {
+                get
+                {
+                    AcceptsDocsOutOfOrderCalled = true;
+                    return true;
+                }
+            }
+
+            public virtual void Collect(int doc)
+            {
+                CollectCalled = true;
+            }
+
+            public virtual void SetNextReader(AtomicReaderContext context)
+            {
+                SetNextReaderCalled = true;
+            }
+
+            public virtual void SetScorer(Scorer scorer)
+            {
+                SetScorerCalled = true;
+            }
+        }
+
+        [Test]
+        public virtual void TestNullCollectors()
+        {
+            // Tests that the collector rejects all null collectors.
+            try
+            {
+                MultiCollector.Wrap(null, null);
+                Assert.Fail("only null collectors should not be supported");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException e)
+#pragma warning restore 168
+            {
+                // expected
+            }
+
+            // Tests that the collector handles some null collectors well. If it
+            // doesn't, an NPE would be thrown.
+            ICollector c = MultiCollector.Wrap(new DummyCollector(), null, new DummyCollector());
+            Assert.IsTrue(c is MultiCollector);
+            Assert.IsTrue(c.AcceptsDocsOutOfOrder);
+            c.Collect(1);
+            c.SetNextReader(null);
+            c.SetScorer(null);
+        }
+
+        [Test]
+        public virtual void TestSingleCollector()
+        {
+            // Tests that if a single Collector is input, it is returned (and not MultiCollector).
+            DummyCollector dc = new DummyCollector();
+            Assert.AreSame(dc, MultiCollector.Wrap(dc));
+            Assert.AreSame(dc, MultiCollector.Wrap(dc, null));
+        }
+
+        [Test]
+        public virtual void TestCollector()
+        {
+            // Tests that the collector delegates calls to input collectors properly.
+
+            // Tests that the collector handles some null collectors well. If it
+            // doesn't, an NPE would be thrown.
+            DummyCollector[] dcs = new DummyCollector[] { new DummyCollector(), new DummyCollector() };
+            ICollector c = MultiCollector.Wrap(dcs);
+            Assert.IsTrue(c.AcceptsDocsOutOfOrder);
+            c.Collect(1);
+            c.SetNextReader(null);
+            c.SetScorer(null);
+
+            foreach (DummyCollector dc in dcs)
+            {
+                Assert.IsTrue(dc.AcceptsDocsOutOfOrderCalled);
+                Assert.IsTrue(dc.CollectCalled);
+                Assert.IsTrue(dc.SetNextReaderCalled);
+                Assert.IsTrue(dc.SetScorerCalled);
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/Payloads/PayloadHelper.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/Payloads/PayloadHelper.cs b/src/Lucene.Net.Tests/Search/Payloads/PayloadHelper.cs
new file mode 100644
index 0000000..d0bfdfe
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/Payloads/PayloadHelper.cs
@@ -0,0 +1,158 @@
+using Lucene.Net.Analysis.TokenAttributes;
+using Lucene.Net.Documents;
+using NUnit.Framework;
+using System;
+
+namespace Lucene.Net.Search.Payloads
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    using Lucene.Net.Analysis;
+    using System.IO;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
+    using Document = Documents.Document;
+    using English = Lucene.Net.Util.English;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using IndexWriter = Lucene.Net.Index.IndexWriter;
+    using IndexWriterConfig = Lucene.Net.Index.IndexWriterConfig;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+    using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+    using Similarity = Lucene.Net.Search.Similarities.Similarity;
+    using TextField = TextField;
+
+    ///
+    ///
+    ///
+    public class PayloadHelper
+    {
+        private byte[] PayloadField = new byte[] { 1 };
+        private byte[] PayloadMultiField1 = new byte[] { 2 };
+        private byte[] PayloadMultiField2 = new byte[] { 4 };
+        public const string NO_PAYLOAD_FIELD = "noPayloadField";
+        public const string MULTI_FIELD = "multiField";
+        public const string FIELD = "field";
+
+        public IndexReader Reader;
+
+        public sealed class PayloadAnalyzer : Analyzer
+        {
+            private readonly PayloadHelper OuterInstance;
+
+            public PayloadAnalyzer(PayloadHelper outerInstance)
+                : base(PER_FIELD_REUSE_STRATEGY)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                Tokenizer result = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
+                return new TokenStreamComponents(result, new PayloadFilter(OuterInstance, result, fieldName));
+            }
+        }
+
+        public sealed class PayloadFilter : TokenFilter
+        {
+            private readonly PayloadHelper OuterInstance;
+
+            internal readonly string FieldName;
+            internal int NumSeen = 0;
+            internal readonly IPayloadAttribute PayloadAtt;
+
+            public PayloadFilter(PayloadHelper outerInstance, TokenStream input, string fieldName)
+                : base(input)
+            {
+                this.OuterInstance = outerInstance;
+                this.FieldName = fieldName;
+                PayloadAtt = AddAttribute<IPayloadAttribute>();
+            }
+
+            public override bool IncrementToken()
+            {
+                if (m_input.IncrementToken())
+                {
+                    if (FieldName.Equals(FIELD))
+                    {
+                        PayloadAtt.Payload = new BytesRef(OuterInstance.PayloadField);
+                    }
+                    else if (FieldName.Equals(MULTI_FIELD))
+                    {
+                        if (NumSeen % 2 == 0)
+                        {
+                            PayloadAtt.Payload = new BytesRef(OuterInstance.PayloadMultiField1);
+                        }
+                        else
+                        {
+                            PayloadAtt.Payload = new BytesRef(OuterInstance.PayloadMultiField2);
+                        }
+                        NumSeen++;
+                    }
+                    return true;
+                }
+                return false;
+            }
+
+            public override void Reset()
+            {
+                base.Reset();
+                this.NumSeen = 0;
+            }
+        }
+
+        /// <summary>
+        /// Sets up a RAMDirectory, and adds documents (using English.IntToEnglish()) with two fields: field and multiField
+        /// and analyzes them using the PayloadAnalyzer </summary>
+        /// <param name="similarity"> The Similarity class to use in the Searcher </param>
+        /// <param name="numDocs"> The num docs to add </param>
+        /// <returns> An IndexSearcher </returns>
+        // TODO: randomize
+        public virtual IndexSearcher SetUp(Random random, Similarity similarity, int numDocs)
+        {
+            Directory directory = new MockDirectoryWrapper(random, new RAMDirectory());
+            PayloadAnalyzer analyzer = new PayloadAnalyzer(this);
+
+            // TODO randomize this
+            IndexWriter writer = new IndexWriter(directory, (new IndexWriterConfig(LuceneTestCase.TEST_VERSION_CURRENT, analyzer)).SetSimilarity(similarity));
+            // writer.infoStream = System.out;
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document doc = new Document();
+                doc.Add(new TextField(FIELD, English.IntToEnglish(i), Field.Store.YES));
+                doc.Add(new TextField(MULTI_FIELD, English.IntToEnglish(i) + "  " + English.IntToEnglish(i), Field.Store.YES));
+                doc.Add(new TextField(NO_PAYLOAD_FIELD, English.IntToEnglish(i), Field.Store.YES));
+                writer.AddDocument(doc);
+            }
+            Reader = DirectoryReader.Open(writer, true);
+            writer.Dispose();
+
+            IndexSearcher searcher = LuceneTestCase.NewSearcher(Reader, similarity);
+            searcher.Similarity = similarity;
+            return searcher;
+        }
+
+        [TearDown]
+        public virtual void TearDown()
+        {
+            Reader.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/Payloads/TestPayloadExplanations.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/Payloads/TestPayloadExplanations.cs b/src/Lucene.Net.Tests/Search/Payloads/TestPayloadExplanations.cs
new file mode 100644
index 0000000..e205871
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/Payloads/TestPayloadExplanations.cs
@@ -0,0 +1,117 @@
+using NUnit.Framework;
+
+namespace Lucene.Net.Search.Payloads
+{
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using DefaultSimilarity = Lucene.Net.Search.Similarities.DefaultSimilarity;
+    using SpanQuery = Lucene.Net.Search.Spans.SpanQuery;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Term = Lucene.Net.Index.Term;
+
+    /// <summary>
+    /// TestExplanations subclass focusing on payload queries
+    /// </summary>
+    [TestFixture]
+    public class TestPayloadExplanations : TestExplanations
+    {
+        private PayloadFunction[] Functions = new PayloadFunction[] { new AveragePayloadFunction(), new MinPayloadFunction(), new MaxPayloadFunction() };
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Searcher.Similarity = new DefaultSimilarityAnonymousInnerClassHelper(this);
+        }
+
+        private class DefaultSimilarityAnonymousInnerClassHelper : DefaultSimilarity
+        {
+            private readonly TestPayloadExplanations OuterInstance;
+
+            public DefaultSimilarityAnonymousInnerClassHelper(TestPayloadExplanations outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            public override float ScorePayload(int doc, int start, int end, BytesRef payload)
+            {
+                return 1 + (payload.GetHashCode() % 10);
+            }
+        }
+
+        /// <summary>
+        /// macro for payloadtermquery </summary>
+        private SpanQuery Pt(string s, PayloadFunction fn, bool includeSpanScore)
+        {
+            return new PayloadTermQuery(new Term(FIELD, s), fn, includeSpanScore);
+        }
+
+        /* simple PayloadTermQueries */
+
+        [Test]
+        public virtual void TestPT1()
+        {
+            foreach (PayloadFunction fn in Functions)
+            {
+                Qtest(Pt("w1", fn, false), new int[] { 0, 1, 2, 3 });
+                Qtest(Pt("w1", fn, true), new int[] { 0, 1, 2, 3 });
+            }
+        }
+
+        [Test]
+        public virtual void TestPT2()
+        {
+            foreach (PayloadFunction fn in Functions)
+            {
+                SpanQuery q = Pt("w1", fn, false);
+                q.Boost = 1000;
+                Qtest(q, new int[] { 0, 1, 2, 3 });
+                q = Pt("w1", fn, true);
+                q.Boost = 1000;
+                Qtest(q, new int[] { 0, 1, 2, 3 });
+            }
+        }
+
+        [Test]
+        public virtual void TestPT4()
+        {
+            foreach (PayloadFunction fn in Functions)
+            {
+                Qtest(Pt("xx", fn, false), new int[] { 2, 3 });
+                Qtest(Pt("xx", fn, true), new int[] { 2, 3 });
+            }
+        }
+
+        [Test]
+        public virtual void TestPT5()
+        {
+            foreach (PayloadFunction fn in Functions)
+            {
+                SpanQuery q = Pt("xx", fn, false);
+                q.Boost = 1000;
+                Qtest(q, new int[] { 2, 3 });
+                q = Pt("xx", fn, true);
+                q.Boost = 1000;
+                Qtest(q, new int[] { 2, 3 });
+            }
+        }
+
+        // TODO: test the payloadnear query too!
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/Payloads/TestPayloadNearQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/Payloads/TestPayloadNearQuery.cs b/src/Lucene.Net.Tests/Search/Payloads/TestPayloadNearQuery.cs
new file mode 100644
index 0000000..d621574
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/Payloads/TestPayloadNearQuery.cs
@@ -0,0 +1,392 @@
+using System.Text.RegularExpressions;
+using Lucene.Net.Analysis.TokenAttributes;
+using System;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search.Payloads
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    using Lucene.Net.Analysis;
+    using NUnit.Framework;
+    using System.IO;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using DefaultSimilarity = Lucene.Net.Search.Similarities.DefaultSimilarity;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using English = Lucene.Net.Util.English;
+    using Field = Field;
+    using FieldInvertState = Lucene.Net.Index.FieldInvertState;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using SpanNearQuery = Lucene.Net.Search.Spans.SpanNearQuery;
+    using SpanQuery = Lucene.Net.Search.Spans.SpanQuery;
+    using SpanTermQuery = Lucene.Net.Search.Spans.SpanTermQuery;
+    using Term = Lucene.Net.Index.Term;
+
+    [TestFixture]
+    public class TestPayloadNearQuery : LuceneTestCase
+    {
+        private static IndexSearcher Searcher;
+        private static IndexReader Reader;
+        private static Directory Directory;
+        private static BoostingSimilarity similarity = new BoostingSimilarity();
+        private static byte[] Payload2 = { 2 };
+        private static byte[] Payload4 = { 4 };
+        private static readonly Regex _whiteSpaceRegex = new Regex("[\\s]+", RegexOptions.Compiled);
+
+        private class PayloadAnalyzer : Analyzer
+        {
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                Tokenizer result = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
+                return new TokenStreamComponents(result, new PayloadFilter(result, fieldName));
+            }
+        }
+
+        private class PayloadFilter : TokenFilter
+        {
+            internal readonly string FieldName;
+            internal int NumSeen = 0;
+            internal readonly IPayloadAttribute PayAtt;
+
+            public PayloadFilter(TokenStream input, string fieldName)
+                : base(input)
+            {
+                this.FieldName = fieldName;
+                PayAtt = AddAttribute<IPayloadAttribute>();
+            }
+
+            public sealed override bool IncrementToken()
+            {
+                bool result = false;
+                if (m_input.IncrementToken())
+                {
+                    if (NumSeen % 2 == 0)
+                    {
+                        PayAtt.Payload = new BytesRef(Payload2);
+                    }
+                    else
+                    {
+                        PayAtt.Payload = new BytesRef(Payload4);
+                    }
+                    NumSeen++;
+                    result = true;
+                }
+                return result;
+            }
+
+            public override void Reset()
+            {
+                base.Reset();
+                this.NumSeen = 0;
+            }
+        }
+
+        private PayloadNearQuery NewPhraseQuery(string fieldName, string phrase, bool inOrder, PayloadFunction function)
+        {
+            var words = _whiteSpaceRegex.Split(phrase);
+            var clauses = new SpanQuery[words.Length];
+            for (var i = 0; i < clauses.Length; i++)
+            {
+                clauses[i] = new SpanTermQuery(new Term(fieldName, words[i]));
+            }
+            return new PayloadNearQuery(clauses, 0, inOrder, function);
+        }
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Is non-static because NewIndexWriterConfig is no longer static.
+        /// </summary>
+        [OneTimeSetUp]
+        public void BeforeClass()
+        {
+            Directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), Directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new PayloadAnalyzer()).SetSimilarity(similarity));
+            //writer.infoStream = System.out;
+            for (int i = 0; i < 1000; i++)
+            {
+                Document doc = new Document();
+                doc.Add(NewTextField("field", English.IntToEnglish(i), Field.Store.YES));
+                string txt = English.IntToEnglish(i) + ' ' + English.IntToEnglish(i + 1);
+                doc.Add(NewTextField("field2", txt, Field.Store.YES));
+                writer.AddDocument(doc);
+            }
+            Reader = writer.Reader;
+            writer.Dispose();
+
+            Searcher = NewSearcher(Reader);
+            Searcher.Similarity = similarity;
+        }
+
+        [OneTimeTearDown]
+        public static void AfterClass()
+        {
+            Searcher = null;
+            Reader.Dispose();
+            Reader = null;
+            Directory.Dispose();
+            Directory = null;
+        }
+
+        [Test]
+        public virtual void Test()
+        {
+            PayloadNearQuery query;
+            TopDocs hits;
+
+            query = NewPhraseQuery("field", "twenty two", true, new AveragePayloadFunction());
+            QueryUtils.Check(query);
+
+            // all 10 hits should have score = 3 because adjacent terms have payloads of 2,4
+            // and all the similarity factors are set to 1
+            hits = Searcher.Search(query, null, 100);
+            Assert.IsTrue(hits != null, "hits is null and it shouldn't be");
+            // 10 documents were added with the tokens "twenty two", each has 3 instances
+            Assert.AreEqual(10, hits.TotalHits, "should be 10 hits");
+            for (int j = 0; j < hits.ScoreDocs.Length; j++)
+            {
+                ScoreDoc doc = hits.ScoreDocs[j];
+                Assert.AreEqual(3, doc.Score, doc.Score + " does not equal: " + 3);
+            }
+            for (int i = 1; i < 10; i++)
+            {
+                query = NewPhraseQuery("field", English.IntToEnglish(i) + " hundred", true, new AveragePayloadFunction());
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: run query=" + query);
+                }
+                // all should have score = 3 because adjacent terms have payloads of 2,4
+                // and all the similarity factors are set to 1
+                hits = Searcher.Search(query, null, 100);
+                Assert.IsTrue(hits != null, "hits is null and it shouldn't be");
+                Assert.AreEqual(100, hits.TotalHits, "should be 100 hits");
+                for (int j = 0; j < hits.ScoreDocs.Length; j++)
+                {
+                    ScoreDoc doc = hits.ScoreDocs[j];
+                    //        System.out.println("Doc: " + doc.toString());
+                    //        System.out.println("Explain: " + searcher.Explain(query, doc.Doc));
+                    Assert.AreEqual(3, doc.Score, doc.Score + " does not equal: " + 3);
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestPayloadNear()
+        {
+            SpanNearQuery q1, q2;
+            PayloadNearQuery query;
+            //SpanNearQuery(clauses, 10000, false)
+            q1 = SpanNearQuery("field2", "twenty two");
+            q2 = SpanNearQuery("field2", "twenty three");
+            SpanQuery[] clauses = new SpanQuery[2];
+            clauses[0] = q1;
+            clauses[1] = q2;
+            query = new PayloadNearQuery(clauses, 10, false);
+            //System.out.println(query.toString());
+            Assert.AreEqual(12, Searcher.Search(query, null, 100).TotalHits);
+            /*
+            System.out.println(hits.TotalHits);
+            for (int j = 0; j < hits.ScoreDocs.Length; j++) {
+              ScoreDoc doc = hits.ScoreDocs[j];
+              System.out.println("doc: "+doc.Doc+", score: "+doc.Score);
+            }
+            */
+        }
+
+        [Test]
+        public virtual void TestAverageFunction()
+        {
+            PayloadNearQuery query;
+            TopDocs hits;
+
+            query = NewPhraseQuery("field", "twenty two", true, new AveragePayloadFunction());
+            QueryUtils.Check(query);
+            // all 10 hits should have score = 3 because adjacent terms have payloads of 2,4
+            // and all the similarity factors are set to 1
+            hits = Searcher.Search(query, null, 100);
+            Assert.IsTrue(hits != null, "hits is null and it shouldn't be");
+            Assert.AreEqual(10, hits.TotalHits, "should be 10 hits");
+            for (int j = 0; j < hits.ScoreDocs.Length; j++)
+            {
+                ScoreDoc doc = hits.ScoreDocs[j];
+                Assert.AreEqual(3, doc.Score, doc.Score + " does not equal: " + 3);
+                Explanation explain = Searcher.Explain(query, hits.ScoreDocs[j].Doc);
+                string exp = explain.ToString();
+                Assert.IsTrue(exp.IndexOf("AveragePayloadFunction") > -1, exp);
+                Assert.AreEqual(3f, explain.Value, hits.ScoreDocs[j].Score + " explain value does not equal: " + 3);
+            }
+        }
+
+        [Test]
+        public virtual void TestMaxFunction()
+        {
+            PayloadNearQuery query;
+            TopDocs hits;
+
+            query = NewPhraseQuery("field", "twenty two", true, new MaxPayloadFunction());
+            QueryUtils.Check(query);
+            // all 10 hits should have score = 4 (max payload value)
+            hits = Searcher.Search(query, null, 100);
+            Assert.IsTrue(hits != null, "hits is null and it shouldn't be");
+            Assert.AreEqual(10, hits.TotalHits, "should be 10 hits");
+            for (int j = 0; j < hits.ScoreDocs.Length; j++)
+            {
+                ScoreDoc doc = hits.ScoreDocs[j];
+                Assert.AreEqual(4, doc.Score, doc.Score + " does not equal: " + 4);
+                Explanation explain = Searcher.Explain(query, hits.ScoreDocs[j].Doc);
+                string exp = explain.ToString();
+                Assert.IsTrue(exp.IndexOf("MaxPayloadFunction") > -1, exp);
+                Assert.AreEqual(4f, explain.Value, hits.ScoreDocs[j].Score + " explain value does not equal: " + 4);
+            }
+        }
+
+        [Test]
+        public virtual void TestMinFunction()
+        {
+            PayloadNearQuery query;
+            TopDocs hits;
+
+            query = NewPhraseQuery("field", "twenty two", true, new MinPayloadFunction());
+            QueryUtils.Check(query);
+            // all 10 hits should have score = 2 (min payload value)
+            hits = Searcher.Search(query, null, 100);
+            Assert.IsTrue(hits != null, "hits is null and it shouldn't be");
+            Assert.AreEqual(10, hits.TotalHits, "should be 10 hits");
+            for (int j = 0; j < hits.ScoreDocs.Length; j++)
+            {
+                ScoreDoc doc = hits.ScoreDocs[j];
+                Assert.AreEqual(2, doc.Score, doc.Score + " does not equal: " + 2);
+                Explanation explain = Searcher.Explain(query, hits.ScoreDocs[j].Doc);
+                string exp = explain.ToString();
+                Assert.IsTrue(exp.IndexOf("MinPayloadFunction") > -1, exp);
+                Assert.AreEqual(2f, explain.Value, hits.ScoreDocs[j].Score + " explain value does not equal: " + 2);
+            }
+        }
+
+        private SpanQuery[] Clauses
+        {
+            get
+            {
+                SpanNearQuery q1, q2;
+                q1 = SpanNearQuery("field2", "twenty two");
+                q2 = SpanNearQuery("field2", "twenty three");
+                SpanQuery[] clauses = new SpanQuery[2];
+                clauses[0] = q1;
+                clauses[1] = q2;
+                return clauses;
+            }
+        }
+
+        private SpanNearQuery SpanNearQuery(string fieldName, string words)
+        {
+            var wordList = _whiteSpaceRegex.Split(words);
+            var clauses = new SpanQuery[wordList.Length];
+            for (var i = 0; i < clauses.Length; i++)
+            {
+                clauses[i] = new PayloadTermQuery(new Term(fieldName, wordList[i]), new AveragePayloadFunction());
+            }
+            return new SpanNearQuery(clauses, 10000, false);
+        }
+
+        [Test]
+        public virtual void TestLongerSpan()
+        {
+            PayloadNearQuery query;
+            TopDocs hits;
+            query = NewPhraseQuery("field", "nine hundred ninety nine", true, new AveragePayloadFunction());
+            hits = Searcher.Search(query, null, 100);
+            Assert.IsTrue(hits != null, "hits is null and it shouldn't be");
+            ScoreDoc doc = hits.ScoreDocs[0];
+            //    System.out.println("Doc: " + doc.toString());
+            //    System.out.println("Explain: " + searcher.Explain(query, doc.Doc));
+            Assert.IsTrue(hits.TotalHits == 1, "there should only be one hit");
+            // should have score = 3 because adjacent terms have payloads of 2,4
+            Assert.AreEqual(3, doc.Score, doc.Score + " does not equal: " + 3);
+        }
+
+        [Test]
+        public virtual void TestComplexNested()
+        {
+            PayloadNearQuery query;
+            TopDocs hits;
+
+            // combine ordered and unordered spans with some nesting to make sure all payloads are counted
+
+            SpanQuery q1 = NewPhraseQuery("field", "nine hundred", true, new AveragePayloadFunction());
+            SpanQuery q2 = NewPhraseQuery("field", "ninety nine", true, new AveragePayloadFunction());
+            SpanQuery q3 = NewPhraseQuery("field", "nine ninety", false, new AveragePayloadFunction());
+            SpanQuery q4 = NewPhraseQuery("field", "hundred nine", false, new AveragePayloadFunction());
+            SpanQuery[] clauses = new SpanQuery[] { new PayloadNearQuery(new SpanQuery[] { q1, q2 }, 0, true), new PayloadNearQuery(new SpanQuery[] { q3, q4 }, 0, false) };
+            query = new PayloadNearQuery(clauses, 0, false);
+            hits = Searcher.Search(query, null, 100);
+            Assert.IsTrue(hits != null, "hits is null and it shouldn't be");
+            // should be only 1 hit - doc 999
+            Assert.IsTrue(hits.ScoreDocs.Length == 1, "should only be one hit");
+            // the score should be 3 - the average of all the underlying payloads
+            ScoreDoc doc = hits.ScoreDocs[0];
+            //    System.out.println("Doc: " + doc.toString());
+            //    System.out.println("Explain: " + searcher.Explain(query, doc.Doc));
+            Assert.IsTrue(doc.Score == 3, doc.Score + " does not equal: " + 3);
+        }
+
+        internal class BoostingSimilarity : DefaultSimilarity
+        {
+            public override float QueryNorm(float sumOfSquaredWeights)
+            {
+                return 1.0f;
+            }
+
+            public override float Coord(int overlap, int maxOverlap)
+            {
+                return 1.0f;
+            }
+
+            public override float ScorePayload(int docId, int start, int end, BytesRef payload)
+            {
+                //we know it is size 4 here, so ignore the offset/length
+                return payload.Bytes[payload.Offset];
+            }
+
+            //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+            //Make everything else 1 so we see the effect of the payload
+            //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+            public override float LengthNorm(FieldInvertState state)
+            {
+                return state.Boost;
+            }
+
+            public override float SloppyFreq(int distance)
+            {
+                return 1.0f;
+            }
+
+            public override float Tf(float freq)
+            {
+                return 1.0f;
+            }
+
+            // idf used for phrase queries
+            public override Explanation IdfExplain(CollectionStatistics collectionStats, TermStatistics[] termStats)
+            {
+                return new Explanation(1.0f, "Inexplicable");
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/Payloads/TestPayloadTermQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/Payloads/TestPayloadTermQuery.cs b/src/Lucene.Net.Tests/Search/Payloads/TestPayloadTermQuery.cs
new file mode 100644
index 0000000..a68867e
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/Payloads/TestPayloadTermQuery.cs
@@ -0,0 +1,367 @@
+using Lucene.Net.Analysis.TokenAttributes;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search.Payloads
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    using Lucene.Net.Analysis;
+    using NUnit.Framework;
+    using System.IO;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using DefaultSimilarity = Lucene.Net.Search.Similarities.DefaultSimilarity;
+    using Directory = Lucene.Net.Store.Directory;
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
+    using Document = Documents.Document;
+    using English = Lucene.Net.Util.English;
+    using Field = Field;
+    using FieldInvertState = Lucene.Net.Index.FieldInvertState;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MultiSpansWrapper = Lucene.Net.Search.Spans.MultiSpansWrapper;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Similarity = Lucene.Net.Search.Similarities.Similarity;
+    using Spans = Lucene.Net.Search.Spans.Spans;
+    using SpanTermQuery = Lucene.Net.Search.Spans.SpanTermQuery;
+    using Term = Lucene.Net.Index.Term;
+
+    [TestFixture]
+    public class TestPayloadTermQuery : LuceneTestCase
+    {
+        private static IndexSearcher Searcher;
+        private static IndexReader Reader;
+        private static readonly Similarity similarity = new BoostingSimilarity();
+        private static readonly byte[] PayloadField = { 1 };
+        private static readonly byte[] PayloadMultiField1 = { 2 };
+        private static readonly byte[] PayloadMultiField2 = { 4 };
+        protected internal static Directory Directory;
+
+        private class PayloadAnalyzer : Analyzer
+        {
+            internal PayloadAnalyzer()
+                : base(PER_FIELD_REUSE_STRATEGY)
+            {
+            }
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                Tokenizer result = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
+                return new TokenStreamComponents(result, new PayloadFilter(result, fieldName));
+            }
+        }
+
+        private class PayloadFilter : TokenFilter
+        {
+            internal readonly string FieldName;
+            internal int NumSeen = 0;
+
+            internal readonly IPayloadAttribute PayloadAtt;
+
+            public PayloadFilter(TokenStream input, string fieldName)
+                : base(input)
+            {
+                this.FieldName = fieldName;
+                PayloadAtt = AddAttribute<IPayloadAttribute>();
+            }
+
+            public sealed override bool IncrementToken()
+            {
+                bool hasNext = m_input.IncrementToken();
+                if (hasNext)
+                {
+                    if (FieldName.Equals("field"))
+                    {
+                        PayloadAtt.Payload = new BytesRef(PayloadField);
+                    }
+                    else if (FieldName.Equals("multiField"))
+                    {
+                        if (NumSeen % 2 == 0)
+                        {
+                            PayloadAtt.Payload = new BytesRef(PayloadMultiField1);
+                        }
+                        else
+                        {
+                            PayloadAtt.Payload = new BytesRef(PayloadMultiField2);
+                        }
+                        NumSeen++;
+                    }
+                    return true;
+                }
+                else
+                {
+                    return false;
+                }
+            }
+
+            public override void Reset()
+            {
+                base.Reset();
+                this.NumSeen = 0;
+            }
+        }
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Is non-static because NewIndexWriterConfig is no longer static.
+        /// </summary>
+        [OneTimeSetUp]
+        public void BeforeClass()
+        {
+            Directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), Directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new PayloadAnalyzer()).SetSimilarity(similarity).SetMergePolicy(NewLogMergePolicy()));
+            //writer.infoStream = System.out;
+            for (int i = 0; i < 1000; i++)
+            {
+                Document doc = new Document();
+                Field noPayloadField = NewTextField(PayloadHelper.NO_PAYLOAD_FIELD, English.IntToEnglish(i), Field.Store.YES);
+                //noPayloadField.setBoost(0);
+                doc.Add(noPayloadField);
+                doc.Add(NewTextField("field", English.IntToEnglish(i), Field.Store.YES));
+                doc.Add(NewTextField("multiField", English.IntToEnglish(i) + "  " + English.IntToEnglish(i), Field.Store.YES));
+                writer.AddDocument(doc);
+            }
+            Reader = writer.Reader;
+            writer.Dispose();
+
+            Searcher = NewSearcher(Reader);
+            Searcher.Similarity = similarity;
+        }
+
+        [OneTimeTearDown]
+        public static void AfterClass()
+        {
+            Searcher = null;
+            Reader.Dispose();
+            Reader = null;
+            Directory.Dispose();
+            Directory = null;
+        }
+
+        [Test]
+        public virtual void Test()
+        {
+            PayloadTermQuery query = new PayloadTermQuery(new Term("field", "seventy"), new MaxPayloadFunction());
+            TopDocs hits = Searcher.Search(query, null, 100);
+            Assert.IsTrue(hits != null, "hits is null and it shouldn't be");
+            Assert.IsTrue(hits.TotalHits == 100, "hits Size: " + hits.TotalHits + " is not: " + 100);
+
+            //they should all have the exact same score, because they all contain seventy once, and we set
+            //all the other similarity factors to be 1
+
+            Assert.IsTrue(hits.MaxScore == 1, hits.MaxScore + " does not equal: " + 1);
+            for (int i = 0; i < hits.ScoreDocs.Length; i++)
+            {
+                ScoreDoc doc = hits.ScoreDocs[i];
+                Assert.IsTrue(doc.Score == 1, doc.Score + " does not equal: " + 1);
+            }
+            CheckHits.CheckExplanations(query, PayloadHelper.FIELD, Searcher, true);
+            Spans spans = MultiSpansWrapper.Wrap(Searcher.TopReaderContext, query);
+            Assert.IsTrue(spans != null, "spans is null and it shouldn't be");
+            /*float score = hits.Score(0);
+            for (int i =1; i < hits.Length(); i++)
+            {
+              Assert.IsTrue(score == hits.Score(i), "scores are not equal and they should be");
+            }*/
+        }
+
+        [Test]
+        public virtual void TestQuery()
+        {
+            PayloadTermQuery boostingFuncTermQuery = new PayloadTermQuery(new Term(PayloadHelper.MULTI_FIELD, "seventy"), new MaxPayloadFunction());
+            QueryUtils.Check(boostingFuncTermQuery);
+
+            SpanTermQuery spanTermQuery = new SpanTermQuery(new Term(PayloadHelper.MULTI_FIELD, "seventy"));
+
+            Assert.IsTrue(boostingFuncTermQuery.Equals(spanTermQuery) == spanTermQuery.Equals(boostingFuncTermQuery));
+
+            PayloadTermQuery boostingFuncTermQuery2 = new PayloadTermQuery(new Term(PayloadHelper.MULTI_FIELD, "seventy"), new AveragePayloadFunction());
+
+            QueryUtils.CheckUnequal(boostingFuncTermQuery, boostingFuncTermQuery2);
+        }
+
+        [Test]
+        public virtual void TestMultipleMatchesPerDoc()
+        {
+            PayloadTermQuery query = new PayloadTermQuery(new Term(PayloadHelper.MULTI_FIELD, "seventy"), new MaxPayloadFunction());
+            TopDocs hits = Searcher.Search(query, null, 100);
+            Assert.IsTrue(hits != null, "hits is null and it shouldn't be");
+            Assert.IsTrue(hits.TotalHits == 100, "hits Size: " + hits.TotalHits + " is not: " + 100);
+
+            //they should all have the exact same score, because they all contain seventy once, and we set
+            //all the other similarity factors to be 1
+
+            //System.out.println("Hash: " + seventyHash + " Twice Hash: " + 2*seventyHash);
+            Assert.IsTrue(hits.MaxScore == 4.0, hits.MaxScore + " does not equal: " + 4.0);
+            //there should be exactly 10 items that score a 4, all the rest should score a 2
+            //The 10 items are: 70 + i*100 where i in [0-9]
+            int numTens = 0;
+            for (int i = 0; i < hits.ScoreDocs.Length; i++)
+            {
+                ScoreDoc doc = hits.ScoreDocs[i];
+                if (doc.Doc % 10 == 0)
+                {
+                    numTens++;
+                    Assert.IsTrue(doc.Score == 4.0, doc.Score + " does not equal: " + 4.0);
+                }
+                else
+                {
+                    Assert.IsTrue(doc.Score == 2, doc.Score + " does not equal: " + 2);
+                }
+            }
+            Assert.IsTrue(numTens == 10, numTens + " does not equal: " + 10);
+            CheckHits.CheckExplanations(query, "field", Searcher, true);
+            Spans spans = MultiSpansWrapper.Wrap(Searcher.TopReaderContext, query);
+            Assert.IsTrue(spans != null, "spans is null and it shouldn't be");
+            //should be two matches per document
+            int count = 0;
+            //100 hits times 2 matches per hit, we should have 200 in count
+            while (spans.Next())
+            {
+                count++;
+            }
+            Assert.IsTrue(count == 200, count + " does not equal: " + 200);
+        }
+
+        //Set includeSpanScore to false, in which case just the payload score comes through.
+        [Test]
+        public virtual void TestIgnoreSpanScorer()
+        {
+            PayloadTermQuery query = new PayloadTermQuery(new Term(PayloadHelper.MULTI_FIELD, "seventy"), new MaxPayloadFunction(), false);
+
+            IndexReader reader = DirectoryReader.Open(Directory);
+            IndexSearcher theSearcher = NewSearcher(reader);
+            theSearcher.Similarity = new FullSimilarity();
+            TopDocs hits = Searcher.Search(query, null, 100);
+            Assert.IsTrue(hits != null, "hits is null and it shouldn't be");
+            Assert.IsTrue(hits.TotalHits == 100, "hits Size: " + hits.TotalHits + " is not: " + 100);
+
+            //they should all have the exact same score, because they all contain seventy once, and we set
+            //all the other similarity factors to be 1
+
+            //System.out.println("Hash: " + seventyHash + " Twice Hash: " + 2*seventyHash);
+            Assert.IsTrue(hits.MaxScore == 4.0, hits.MaxScore + " does not equal: " + 4.0);
+            //there should be exactly 10 items that score a 4, all the rest should score a 2
+            //The 10 items are: 70 + i*100 where i in [0-9]
+            int numTens = 0;
+            for (int i = 0; i < hits.ScoreDocs.Length; i++)
+            {
+                ScoreDoc doc = hits.ScoreDocs[i];
+                if (doc.Doc % 10 == 0)
+                {
+                    numTens++;
+                    Assert.IsTrue(doc.Score == 4.0, doc.Score + " does not equal: " + 4.0);
+                }
+                else
+                {
+                    Assert.IsTrue(doc.Score == 2, doc.Score + " does not equal: " + 2);
+                }
+            }
+            Assert.IsTrue(numTens == 10, numTens + " does not equal: " + 10);
+            CheckHits.CheckExplanations(query, "field", Searcher, true);
+            Spans spans = MultiSpansWrapper.Wrap(Searcher.TopReaderContext, query);
+            Assert.IsTrue(spans != null, "spans is null and it shouldn't be");
+            //should be two matches per document
+            int count = 0;
+            //100 hits times 2 matches per hit, we should have 200 in count
+            while (spans.Next())
+            {
+                count++;
+            }
+            reader.Dispose();
+        }
+
+        [Test]
+        public virtual void TestNoMatch()
+        {
+            PayloadTermQuery query = new PayloadTermQuery(new Term(PayloadHelper.FIELD, "junk"), new MaxPayloadFunction());
+            TopDocs hits = Searcher.Search(query, null, 100);
+            Assert.IsTrue(hits != null, "hits is null and it shouldn't be");
+            Assert.IsTrue(hits.TotalHits == 0, "hits Size: " + hits.TotalHits + " is not: " + 0);
+        }
+
+        [Test]
+        public virtual void TestNoPayload()
+        {
+            PayloadTermQuery q1 = new PayloadTermQuery(new Term(PayloadHelper.NO_PAYLOAD_FIELD, "zero"), new MaxPayloadFunction());
+            PayloadTermQuery q2 = new PayloadTermQuery(new Term(PayloadHelper.NO_PAYLOAD_FIELD, "foo"), new MaxPayloadFunction());
+            BooleanClause c1 = new BooleanClause(q1, Occur.MUST);
+            BooleanClause c2 = new BooleanClause(q2, Occur.MUST_NOT);
+            BooleanQuery query = new BooleanQuery();
+            query.Add(c1);
+            query.Add(c2);
+            TopDocs hits = Searcher.Search(query, null, 100);
+            Assert.IsTrue(hits != null, "hits is null and it shouldn't be");
+            Assert.IsTrue(hits.TotalHits == 1, "hits Size: " + hits.TotalHits + " is not: " + 1);
+            int[] results = new int[1];
+            results[0] = 0; //hits.ScoreDocs[0].Doc;
+            CheckHits.CheckHitCollector(Random(), query, PayloadHelper.NO_PAYLOAD_FIELD, Searcher, results, Similarity);
+        }
+
+        internal class BoostingSimilarity : DefaultSimilarity
+        {
+            public override float QueryNorm(float sumOfSquaredWeights)
+            {
+                return 1;
+            }
+
+            public override float Coord(int overlap, int maxOverlap)
+            {
+                return 1;
+            }
+
+            // TODO: Remove warning after API has been finalized
+            public override float ScorePayload(int docId, int start, int end, BytesRef payload)
+            {
+                //we know it is size 4 here, so ignore the offset/length
+                return payload.Bytes[payload.Offset];
+            }
+
+            //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+            //Make everything else 1 so we see the effect of the payload
+            //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+            public override float LengthNorm(FieldInvertState state)
+            {
+                return state.Boost;
+            }
+
+            public override float SloppyFreq(int distance)
+            {
+                return 1;
+            }
+
+            public override float Idf(long docFreq, long numDocs)
+            {
+                return 1;
+            }
+
+            public override float Tf(float freq)
+            {
+                return freq == 0 ? 0 : 1;
+            }
+        }
+
+        internal class FullSimilarity : DefaultSimilarity
+        {
+            public virtual float ScorePayload(int docId, string fieldName, sbyte[] payload, int offset, int length)
+            {
+                //we know it is size 4 here, so ignore the offset/length
+                return payload[offset];
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/Similarities/TestSimilarity2.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/Similarities/TestSimilarity2.cs b/src/Lucene.Net.Tests/Search/Similarities/TestSimilarity2.cs
new file mode 100644
index 0000000..5f92b87
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/Similarities/TestSimilarity2.cs
@@ -0,0 +1,275 @@
+using System.Collections.Generic;
+using Lucene.Net.Documents;
+using Lucene.Net.Index;
+
+namespace Lucene.Net.Search.Similarities
+{
+    using NUnit.Framework;
+    using Directory = Lucene.Net.Store.Directory;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldType = FieldType;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using SpanOrQuery = Lucene.Net.Search.Spans.SpanOrQuery;
+    using SpanTermQuery = Lucene.Net.Search.Spans.SpanTermQuery;
+    using Term = Lucene.Net.Index.Term;
+    using TextField = TextField;
+
+    /// <summary>
+    /// Tests against all the similarities we have
+    /// </summary>
+    [TestFixture]
+    public class TestSimilarity2 : LuceneTestCase
+    {
+        internal IList<Similarity> Sims;
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Sims = new List<Similarity>();
+            Sims.Add(new DefaultSimilarity());
+            Sims.Add(new BM25Similarity());
+            // TODO: not great that we dup this all with TestSimilarityBase
+            foreach (BasicModel basicModel in TestSimilarityBase.BASIC_MODELS)
+            {
+                foreach (AfterEffect afterEffect in TestSimilarityBase.AFTER_EFFECTS)
+                {
+                    foreach (Normalization normalization in TestSimilarityBase.NORMALIZATIONS)
+                    {
+                        Sims.Add(new DFRSimilarity(basicModel, afterEffect, normalization));
+                    }
+                }
+            }
+            foreach (Distribution distribution in TestSimilarityBase.DISTRIBUTIONS)
+            {
+                foreach (Lambda lambda in TestSimilarityBase.LAMBDAS)
+                {
+                    foreach (Normalization normalization in TestSimilarityBase.NORMALIZATIONS)
+                    {
+                        Sims.Add(new IBSimilarity(distribution, lambda, normalization));
+                    }
+                }
+            }
+            Sims.Add(new LMDirichletSimilarity());
+            Sims.Add(new LMJelinekMercerSimilarity(0.1f));
+            Sims.Add(new LMJelinekMercerSimilarity(0.7f));
+        }
+
+        /// <summary>
+        /// because of stupid things like querynorm, its possible we computeStats on a field that doesnt exist at all
+        ///  test this against a totally empty index, to make sure sims handle it
+        /// </summary>
+        [Test]
+        public virtual void TestEmptyIndex()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            IndexReader ir = iw.Reader;
+            iw.Dispose();
+            IndexSearcher @is = NewSearcher(ir);
+
+            foreach (Similarity sim in Sims)
+            {
+                @is.Similarity = sim;
+                Assert.AreEqual(0, @is.Search(new TermQuery(new Term("foo", "bar")), 10).TotalHits);
+            }
+            ir.Dispose();
+            dir.Dispose();
+        }
+
+        /// <summary>
+        /// similar to the above, but ORs the query with a real field </summary>
+        [Test]
+        public virtual void TestEmptyField()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            Document doc = new Document();
+            doc.Add(NewTextField("foo", "bar", Field.Store.NO));
+            iw.AddDocument(doc);
+            IndexReader ir = iw.Reader;
+            iw.Dispose();
+            IndexSearcher @is = NewSearcher(ir);
+
+            foreach (Similarity sim in Sims)
+            {
+                @is.Similarity = sim;
+                BooleanQuery query = new BooleanQuery(true);
+                query.Add(new TermQuery(new Term("foo", "bar")), Occur.SHOULD);
+                query.Add(new TermQuery(new Term("bar", "baz")), Occur.SHOULD);
+                Assert.AreEqual(1, @is.Search(query, 10).TotalHits);
+            }
+            ir.Dispose();
+            dir.Dispose();
+        }
+
+        /// <summary>
+        /// similar to the above, however the field exists, but we query with a term that doesnt exist too </summary>
+        [Test]
+        public virtual void TestEmptyTerm()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            Document doc = new Document();
+            doc.Add(NewTextField("foo", "bar", Field.Store.NO));
+            iw.AddDocument(doc);
+            IndexReader ir = iw.Reader;
+            iw.Dispose();
+            IndexSearcher @is = NewSearcher(ir);
+
+            foreach (Similarity sim in Sims)
+            {
+                @is.Similarity = sim;
+                BooleanQuery query = new BooleanQuery(true);
+                query.Add(new TermQuery(new Term("foo", "bar")), Occur.SHOULD);
+                query.Add(new TermQuery(new Term("foo", "baz")), Occur.SHOULD);
+                Assert.AreEqual(1, @is.Search(query, 10).TotalHits);
+            }
+            ir.Dispose();
+            dir.Dispose();
+        }
+
+        /// <summary>
+        /// make sure we can retrieve when norms are disabled </summary>
+        [Test]
+        public virtual void TestNoNorms()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            Document doc = new Document();
+            FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
+            ft.OmitNorms = true;
+            ft.Freeze();
+            doc.Add(NewField("foo", "bar", ft));
+            iw.AddDocument(doc);
+            IndexReader ir = iw.Reader;
+            iw.Dispose();
+            IndexSearcher @is = NewSearcher(ir);
+
+            foreach (Similarity sim in Sims)
+            {
+                @is.Similarity = sim;
+                BooleanQuery query = new BooleanQuery(true);
+                query.Add(new TermQuery(new Term("foo", "bar")), Occur.SHOULD);
+                Assert.AreEqual(1, @is.Search(query, 10).TotalHits);
+            }
+            ir.Dispose();
+            dir.Dispose();
+        }
+
+        /// <summary>
+        /// make sure all sims work if TF is omitted </summary>
+        [Test]
+        public virtual void TestOmitTF()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            Document doc = new Document();
+            FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
+            ft.IndexOptions = IndexOptions.DOCS_ONLY;
+            ft.Freeze();
+            Field f = NewField("foo", "bar", ft);
+            doc.Add(f);
+            iw.AddDocument(doc);
+            IndexReader ir = iw.Reader;
+            iw.Dispose();
+            IndexSearcher @is = NewSearcher(ir);
+
+            foreach (Similarity sim in Sims)
+            {
+                @is.Similarity = sim;
+                BooleanQuery query = new BooleanQuery(true);
+                query.Add(new TermQuery(new Term("foo", "bar")), Occur.SHOULD);
+                Assert.AreEqual(1, @is.Search(query, 10).TotalHits);
+            }
+            ir.Dispose();
+            dir.Dispose();
+        }
+
+        /// <summary>
+        /// make sure all sims work if TF and norms is omitted </summary>
+        [Test]
+        public virtual void TestOmitTFAndNorms()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            Document doc = new Document();
+            FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
+            ft.IndexOptions = IndexOptions.DOCS_ONLY;
+            ft.OmitNorms = true;
+            ft.Freeze();
+            Field f = NewField("foo", "bar", ft);
+            doc.Add(f);
+            iw.AddDocument(doc);
+            IndexReader ir = iw.Reader;
+            iw.Dispose();
+            IndexSearcher @is = NewSearcher(ir);
+
+            foreach (Similarity sim in Sims)
+            {
+                @is.Similarity = sim;
+                BooleanQuery query = new BooleanQuery(true);
+                query.Add(new TermQuery(new Term("foo", "bar")), Occur.SHOULD);
+                Assert.AreEqual(1, @is.Search(query, 10).TotalHits);
+            }
+            ir.Dispose();
+            dir.Dispose();
+        }
+
+        /// <summary>
+        /// make sure all sims work with spanOR(termX, termY) where termY does not exist </summary>
+        [Test]
+        public virtual void TestCrazySpans()
+        {
+            // The problem: "normal" lucene queries create scorers, returning null if terms dont exist
+            // this means they never score a term that does not exist.
+            // however with spans, there is only one scorer for the whole hierarchy:
+            // inner queries are not real queries, their boosts are ignored, etc.
+            Directory dir = NewDirectory();
+            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            Document doc = new Document();
+            FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
+            doc.Add(NewField("foo", "bar", ft));
+            iw.AddDocument(doc);
+            IndexReader ir = iw.Reader;
+            iw.Dispose();
+            IndexSearcher @is = NewSearcher(ir);
+
+            foreach (Similarity sim in Sims)
+            {
+                @is.Similarity = sim;
+                SpanTermQuery s1 = new SpanTermQuery(new Term("foo", "bar"));
+                SpanTermQuery s2 = new SpanTermQuery(new Term("foo", "baz"));
+                Query query = new SpanOrQuery(s1, s2);
+                TopDocs td = @is.Search(query, 10);
+                Assert.AreEqual(1, td.TotalHits);
+                float score = td.ScoreDocs[0].Score;
+                Assert.IsTrue(score >= 0.0f);
+                Assert.IsFalse(float.IsInfinity(score), "inf score for " + sim);
+            }
+            ir.Dispose();
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file


[53/72] [abbrv] lucenenet git commit: Lucene.Net.TestFramework: Renamed Codecs\asserting\ to Codecs\Asserting\

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/77e95ccc/src/Lucene.Net.TestFramework/Codecs/asserting/AssertingTermVectorsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/asserting/AssertingTermVectorsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/asserting/AssertingTermVectorsFormat.cs
deleted file mode 100644
index eeba7da..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/asserting/AssertingTermVectorsFormat.cs
+++ /dev/null
@@ -1,208 +0,0 @@
-using System.Collections.Generic;
-using System.Diagnostics;
-
-namespace Lucene.Net.Codecs.asserting
-{
-    using AssertingAtomicReader = Lucene.Net.Index.AssertingAtomicReader;
-    using BytesRef = Lucene.Net.Util.BytesRef;
-    using Directory = Lucene.Net.Store.Directory;
-    using FieldInfo = Lucene.Net.Index.FieldInfo;
-    using FieldInfos = Lucene.Net.Index.FieldInfos;
-    using Fields = Lucene.Net.Index.Fields;
-    using IOContext = Lucene.Net.Store.IOContext;
-
-    /*
-         * Licensed to the Apache Software Foundation (ASF) under one or more
-         * contributor license agreements.  See the NOTICE file distributed with
-         * this work for additional information regarding copyright ownership.
-         * The ASF licenses this file to You under the Apache License, Version 2.0
-         * (the "License"); you may not use this file except in compliance with
-         * the License.  You may obtain a copy of the License at
-         *
-         *     http://www.apache.org/licenses/LICENSE-2.0
-         *
-         * Unless required by applicable law or agreed to in writing, software
-         * distributed under the License is distributed on an "AS IS" BASIS,
-         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-         * See the License for the specific language governing permissions and
-         * limitations under the License.
-         */
-
-    using Lucene40TermVectorsFormat = Lucene.Net.Codecs.Lucene40.Lucene40TermVectorsFormat;
-    using SegmentInfo = Lucene.Net.Index.SegmentInfo;
-
-    /// <summary>
-    /// Just like <seealso cref="Lucene40TermVectorsFormat"/> but with additional asserts.
-    /// </summary>
-    public class AssertingTermVectorsFormat : TermVectorsFormat
-    {
-        private readonly TermVectorsFormat @in = new Lucene40TermVectorsFormat();
-
-        public override TermVectorsReader VectorsReader(Directory directory, SegmentInfo segmentInfo, FieldInfos fieldInfos, IOContext context)
-        {
-            return new AssertingTermVectorsReader(@in.VectorsReader(directory, segmentInfo, fieldInfos, context));
-        }
-
-        public override TermVectorsWriter VectorsWriter(Directory directory, SegmentInfo segmentInfo, IOContext context)
-        {
-            return new AssertingTermVectorsWriter(@in.VectorsWriter(directory, segmentInfo, context));
-        }
-
-        internal class AssertingTermVectorsReader : TermVectorsReader
-        {
-            internal readonly TermVectorsReader @in;
-
-            internal AssertingTermVectorsReader(TermVectorsReader @in)
-            {
-                this.@in = @in;
-            }
-
-            protected override void Dispose(bool disposing)
-            {
-                if (disposing)
-                    @in.Dispose();
-            }
-
-            public override Fields Get(int doc)
-            {
-                Fields fields = @in.Get(doc);
-                return fields == null ? null : new AssertingAtomicReader.AssertingFields(fields);
-            }
-
-            public override object Clone()
-            {
-                return new AssertingTermVectorsReader((TermVectorsReader)@in.Clone());
-            }
-
-            public override long RamBytesUsed()
-            {
-                return @in.RamBytesUsed();
-            }
-
-            public override void CheckIntegrity()
-            {
-                @in.CheckIntegrity();
-            }
-        }
-
-        internal enum Status
-        {
-            UNDEFINED,
-            STARTED,
-            FINISHED
-        }
-
-        internal class AssertingTermVectorsWriter : TermVectorsWriter
-        {
-            internal readonly TermVectorsWriter @in;
-            internal Status DocStatus, FieldStatus, TermStatus;
-            internal int DocCount, FieldCount, TermCount, PositionCount;
-            internal bool HasPositions;
-
-            internal AssertingTermVectorsWriter(TermVectorsWriter @in)
-            {
-                this.@in = @in;
-                DocStatus = Status.UNDEFINED;
-                FieldStatus = Status.UNDEFINED;
-                TermStatus = Status.UNDEFINED;
-                FieldCount = TermCount = PositionCount = 0;
-            }
-
-            public override void StartDocument(int numVectorFields)
-            {
-                Debug.Assert(FieldCount == 0);
-                Debug.Assert(DocStatus != Status.STARTED);
-                @in.StartDocument(numVectorFields);
-                DocStatus = Status.STARTED;
-                FieldCount = numVectorFields;
-                DocCount++;
-            }
-
-            public override void FinishDocument()
-            {
-                Debug.Assert(FieldCount == 0);
-                Debug.Assert(DocStatus == Status.STARTED);
-                @in.FinishDocument();
-                DocStatus = Status.FINISHED;
-            }
-
-            public override void StartField(FieldInfo info, int numTerms, bool positions, bool offsets, bool payloads)
-            {
-                Debug.Assert(TermCount == 0);
-                Debug.Assert(DocStatus == Status.STARTED);
-                Debug.Assert(FieldStatus != Status.STARTED);
-                @in.StartField(info, numTerms, positions, offsets, payloads);
-                FieldStatus = Status.STARTED;
-                TermCount = numTerms;
-                HasPositions = positions || offsets || payloads;
-            }
-
-            public override void FinishField()
-            {
-                Debug.Assert(TermCount == 0);
-                Debug.Assert(FieldStatus == Status.STARTED);
-                @in.FinishField();
-                FieldStatus = Status.FINISHED;
-                --FieldCount;
-            }
-
-            public override void StartTerm(BytesRef term, int freq)
-            {
-                Debug.Assert(DocStatus == Status.STARTED);
-                Debug.Assert(FieldStatus == Status.STARTED);
-                Debug.Assert(TermStatus != Status.STARTED);
-                @in.StartTerm(term, freq);
-                TermStatus = Status.STARTED;
-                PositionCount = HasPositions ? freq : 0;
-            }
-
-            public override void FinishTerm()
-            {
-                Debug.Assert(PositionCount == 0);
-                Debug.Assert(DocStatus == Status.STARTED);
-                Debug.Assert(FieldStatus == Status.STARTED);
-                Debug.Assert(TermStatus == Status.STARTED);
-                @in.FinishTerm();
-                TermStatus = Status.FINISHED;
-                --TermCount;
-            }
-
-            public override void AddPosition(int position, int startOffset, int endOffset, BytesRef payload)
-            {
-                Debug.Assert(DocStatus == Status.STARTED);
-                Debug.Assert(FieldStatus == Status.STARTED);
-                Debug.Assert(TermStatus == Status.STARTED);
-                @in.AddPosition(position, startOffset, endOffset, payload);
-                --PositionCount;
-            }
-
-            public override void Abort()
-            {
-                @in.Abort();
-            }
-
-            public override void Finish(FieldInfos fis, int numDocs)
-            {
-                Debug.Assert(DocCount == numDocs);
-                Debug.Assert(DocStatus == (numDocs > 0 ? Status.FINISHED : Status.UNDEFINED));
-                Debug.Assert(FieldStatus != Status.STARTED);
-                Debug.Assert(TermStatus != Status.STARTED);
-                @in.Finish(fis, numDocs);
-            }
-
-            public override IComparer<BytesRef> Comparer
-            {
-                get
-                {
-                    return @in.Comparer;
-                }
-            }
-
-            protected override void Dispose(bool disposing)
-            {
-                if (disposing)
-                    @in.Dispose();
-            }
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/77e95ccc/src/Lucene.Net.TestFramework/Index/RandomCodec.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Index/RandomCodec.cs b/src/Lucene.Net.TestFramework/Index/RandomCodec.cs
index 3b48119..68d0098 100644
--- a/src/Lucene.Net.TestFramework/Index/RandomCodec.cs
+++ b/src/Lucene.Net.TestFramework/Index/RandomCodec.cs
@@ -7,8 +7,8 @@ using System.Diagnostics;
 namespace Lucene.Net.Index
 {
     
-    using AssertingDocValuesFormat = Lucene.Net.Codecs.asserting.AssertingDocValuesFormat;
-    using AssertingPostingsFormat = Lucene.Net.Codecs.asserting.AssertingPostingsFormat;
+    using AssertingDocValuesFormat = Lucene.Net.Codecs.Asserting.AssertingDocValuesFormat;
+    using AssertingPostingsFormat = Lucene.Net.Codecs.Asserting.AssertingPostingsFormat;
 
     /*
          * Licensed to the Apache Software Foundation (ASF) under one or more

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/77e95ccc/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj b/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
index d21e48b..f789033 100644
--- a/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
+++ b/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
@@ -122,22 +122,22 @@
     <Compile Include="Attributes\HasTimeoutAttribute.cs" />
     <Compile Include="Attributes\LongRunningTestAttribute.cs" />
     <Compile Include="Attributes\LuceneNetSpecificAttribute.cs" />
-    <Compile Include="Codecs\asserting\AssertingCodec.cs">
+    <Compile Include="Codecs\Asserting\AssertingCodec.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Codecs\asserting\AssertingDocValuesFormat.cs">
+    <Compile Include="Codecs\Asserting\AssertingDocValuesFormat.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Codecs\asserting\AssertingNormsFormat.cs">
+    <Compile Include="Codecs\Asserting\AssertingNormsFormat.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Codecs\asserting\AssertingPostingsFormat.cs">
+    <Compile Include="Codecs\Asserting\AssertingPostingsFormat.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Codecs\asserting\AssertingStoredFieldsFormat.cs">
+    <Compile Include="Codecs\Asserting\AssertingStoredFieldsFormat.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Codecs\asserting\AssertingTermVectorsFormat.cs">
+    <Compile Include="Codecs\Asserting\AssertingTermVectorsFormat.cs">
       <SubType>Code</SubType>
     </Compile>
     <Compile Include="Codecs\Bloom\TestBloomFilteredLucene41Postings.cs" />

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/77e95ccc/src/Lucene.Net.Tests/Index/TestBinaryDocValuesUpdates.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestBinaryDocValuesUpdates.cs b/src/Lucene.Net.Tests/Index/TestBinaryDocValuesUpdates.cs
index 72ea385..19351fd 100644
--- a/src/Lucene.Net.Tests/Index/TestBinaryDocValuesUpdates.cs
+++ b/src/Lucene.Net.Tests/Index/TestBinaryDocValuesUpdates.cs
@@ -12,7 +12,7 @@ namespace Lucene.Net.Index
     using NUnit.Framework;
     using System.IO;
     using System.Threading;
-    using AssertingDocValuesFormat = Lucene.Net.Codecs.asserting.AssertingDocValuesFormat;
+    using AssertingDocValuesFormat = Lucene.Net.Codecs.Asserting.AssertingDocValuesFormat;
     using BinaryDocValuesField = BinaryDocValuesField;
     using IBits = Lucene.Net.Util.IBits;
     using BytesRef = Lucene.Net.Util.BytesRef;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/77e95ccc/src/Lucene.Net.Tests/Index/TestNumericDocValuesUpdates.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestNumericDocValuesUpdates.cs b/src/Lucene.Net.Tests/Index/TestNumericDocValuesUpdates.cs
index 827433f..25b8b44 100644
--- a/src/Lucene.Net.Tests/Index/TestNumericDocValuesUpdates.cs
+++ b/src/Lucene.Net.Tests/Index/TestNumericDocValuesUpdates.cs
@@ -10,7 +10,7 @@ namespace Lucene.Net.Index
     using Lucene.Net.Support;
     using NUnit.Framework;
     using System.IO;
-    using AssertingDocValuesFormat = Lucene.Net.Codecs.asserting.AssertingDocValuesFormat;
+    using AssertingDocValuesFormat = Lucene.Net.Codecs.Asserting.AssertingDocValuesFormat;
     using BinaryDocValuesField = BinaryDocValuesField;
     using IBits = Lucene.Net.Util.IBits;
     using BytesRef = Lucene.Net.Util.BytesRef;


[02/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestNumericRangeQuery32.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestNumericRangeQuery32.cs b/src/Lucene.Net.Tests/Search/TestNumericRangeQuery32.cs
new file mode 100644
index 0000000..776286a
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestNumericRangeQuery32.cs
@@ -0,0 +1,705 @@
+using System;
+using System.Diagnostics;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using Lucene.Net.Index;
+    using NUnit.Framework;
+    using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldType = FieldType;
+    using SingleField = SingleField;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using Int32Field = Int32Field;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MultiFields = Lucene.Net.Index.MultiFields;
+    using NumericUtils = Lucene.Net.Util.NumericUtils;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using SlowCompositeReaderWrapper = Lucene.Net.Index.SlowCompositeReaderWrapper;
+    using Terms = Lucene.Net.Index.Terms;
+    using TermsEnum = Lucene.Net.Index.TermsEnum;
+    using TestNumericUtils = Lucene.Net.Util.TestNumericUtils; // NaN arrays
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    [TestFixture]
+    public class TestNumericRangeQuery32 : LuceneTestCase
+    {
+        // distance of entries
+        private static int Distance;
+
+        // shift the starting of the values to the left, to also have negative values:
+        private static readonly int StartOffset = -1 << 15;
+
+        // number of docs to generate for testing
+        private static int NoDocs;
+
+        private static Directory Directory = null;
+        private static IndexReader Reader = null;
+        private static IndexSearcher Searcher = null;
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Is non-static because NewIndexWriterConfig is no longer static.
+        /// </summary>
+        [OneTimeSetUp]
+        public void BeforeClass()
+        {
+            NoDocs = AtLeast(4096);
+            Distance = (1 << 30) / NoDocs;
+            Directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), Directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(TestUtil.NextInt(Random(), 100, 1000)).SetMergePolicy(NewLogMergePolicy()));
+
+            FieldType storedInt = new FieldType(Int32Field.TYPE_NOT_STORED);
+            storedInt.IsStored = true;
+            storedInt.Freeze();
+
+            FieldType storedInt8 = new FieldType(storedInt);
+            storedInt8.NumericPrecisionStep = 8;
+
+            FieldType storedInt4 = new FieldType(storedInt);
+            storedInt4.NumericPrecisionStep = 4;
+
+            FieldType storedInt2 = new FieldType(storedInt);
+            storedInt2.NumericPrecisionStep = 2;
+
+            FieldType storedIntNone = new FieldType(storedInt);
+            storedIntNone.NumericPrecisionStep = int.MaxValue;
+
+            FieldType unstoredInt = Int32Field.TYPE_NOT_STORED;
+
+            FieldType unstoredInt8 = new FieldType(unstoredInt);
+            unstoredInt8.NumericPrecisionStep = 8;
+
+            FieldType unstoredInt4 = new FieldType(unstoredInt);
+            unstoredInt4.NumericPrecisionStep = 4;
+
+            FieldType unstoredInt2 = new FieldType(unstoredInt);
+            unstoredInt2.NumericPrecisionStep = 2;
+
+            Int32Field field8 = new Int32Field("field8", 0, storedInt8), field4 = new Int32Field("field4", 0, storedInt4), field2 = new Int32Field("field2", 0, storedInt2), fieldNoTrie = new Int32Field("field" + int.MaxValue, 0, storedIntNone), ascfield8 = new Int32Field("ascfield8", 0, unstoredInt8), ascfield4 = new Int32Field("ascfield4", 0, unstoredInt4), ascfield2 = new Int32Field("ascfield2", 0, unstoredInt2);
+
+            Document doc = new Document();
+            // add fields, that have a distance to test general functionality
+            doc.Add(field8);
+            doc.Add(field4);
+            doc.Add(field2);
+            doc.Add(fieldNoTrie);
+            // add ascending fields with a distance of 1, beginning at -noDocs/2 to test the correct splitting of range and inclusive/exclusive
+            doc.Add(ascfield8);
+            doc.Add(ascfield4);
+            doc.Add(ascfield2);
+
+            // Add a series of noDocs docs with increasing int values
+            for (int l = 0; l < NoDocs; l++)
+            {
+                int val = Distance * l + StartOffset;
+                field8.SetInt32Value(val);
+                field4.SetInt32Value(val);
+                field2.SetInt32Value(val);
+                fieldNoTrie.SetInt32Value(val);
+
+                val = l - (NoDocs / 2);
+                ascfield8.SetInt32Value(val);
+                ascfield4.SetInt32Value(val);
+                ascfield2.SetInt32Value(val);
+                writer.AddDocument(doc);
+            }
+
+            Reader = writer.Reader;
+            Searcher = NewSearcher(Reader);
+            writer.Dispose();
+        }
+
+        [OneTimeTearDown]
+        public static void AfterClass()
+        {
+            Searcher = null;
+            Reader.Dispose();
+            Reader = null;
+            Directory.Dispose();
+            Directory = null;
+        }
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            // set the theoretical maximum term count for 8bit (see docs for the number)
+            // super.tearDown will restore the default
+            BooleanQuery.MaxClauseCount = 3 * 255 * 2 + 255;
+        }
+
+        /// <summary>
+        /// test for both constant score and boolean query, the other tests only use the constant score mode </summary>
+        private void TestRange(int precisionStep)
+        {
+            string field = "field" + precisionStep;
+            int count = 3000;
+            int lower = (Distance * 3 / 2) + StartOffset, upper = lower + count * Distance + (Distance / 3);
+            NumericRangeQuery<int> q = NumericRangeQuery.NewInt32Range(field, precisionStep, lower, upper, true, true);
+            NumericRangeFilter<int> f = NumericRangeFilter.NewInt32Range(field, precisionStep, lower, upper, true, true);
+            for (sbyte i = 0; i < 3; i++)
+            {
+                TopDocs topDocs;
+                string type;
+                switch (i)
+                {
+                    case 0:
+                        type = " (constant score filter rewrite)";
+                        q.MultiTermRewriteMethod = MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE;
+                        topDocs = Searcher.Search(q, null, NoDocs, Sort.INDEXORDER);
+                        break;
+
+                    case 1:
+                        type = " (constant score boolean rewrite)";
+                        q.MultiTermRewriteMethod = MultiTermQuery.CONSTANT_SCORE_BOOLEAN_QUERY_REWRITE;
+                        topDocs = Searcher.Search(q, null, NoDocs, Sort.INDEXORDER);
+                        break;
+
+                    case 2:
+                        type = " (filter)";
+                        topDocs = Searcher.Search(new MatchAllDocsQuery(), f, NoDocs, Sort.INDEXORDER);
+                        break;
+
+                    default:
+                        return;
+                }
+                ScoreDoc[] sd = topDocs.ScoreDocs;
+                Assert.IsNotNull(sd);
+                Assert.AreEqual(count, sd.Length, "Score doc count" + type);
+                Document doc = Searcher.Doc(sd[0].Doc);
+                Assert.AreEqual(2 * Distance + StartOffset, (int)doc.GetField(field).GetNumericValue(), "First doc" + type);
+                doc = Searcher.Doc(sd[sd.Length - 1].Doc);
+                Assert.AreEqual((1 + count) * Distance + StartOffset, (int)doc.GetField(field).GetNumericValue(), "Last doc" + type);
+            }
+        }
+
+        [Test]
+        public virtual void TestRange_8bit()
+        {
+            TestRange(8);
+        }
+
+        [Test]
+        public virtual void TestRange_4bit()
+        {
+            TestRange(4);
+        }
+
+        [Test]
+        public virtual void TestRange_2bit()
+        {
+            TestRange(2);
+        }
+
+        [Test]
+        public virtual void TestInverseRange()
+        {
+            AtomicReaderContext context = (AtomicReaderContext)SlowCompositeReaderWrapper.Wrap(Reader).Context;
+            NumericRangeFilter<int> f = NumericRangeFilter.NewInt32Range("field8", 8, 1000, -1000, true, true);
+            Assert.IsNull(f.GetDocIdSet(context, (context.AtomicReader).LiveDocs), "A inverse range should return the null instance");
+            f = NumericRangeFilter.NewInt32Range("field8", 8, int.MaxValue, null, false, false);
+            Assert.IsNull(f.GetDocIdSet(context, (context.AtomicReader).LiveDocs), "A exclusive range starting with Integer.MAX_VALUE should return the null instance");
+            f = NumericRangeFilter.NewInt32Range("field8", 8, null, int.MinValue, false, false);
+            Assert.IsNull(f.GetDocIdSet(context, (context.AtomicReader).LiveDocs), "A exclusive range ending with Integer.MIN_VALUE should return the null instance");
+        }
+
+        [Test]
+        public virtual void TestOneMatchQuery()
+        {
+            NumericRangeQuery<int> q = NumericRangeQuery.NewInt32Range("ascfield8", 8, 1000, 1000, true, true);
+            TopDocs topDocs = Searcher.Search(q, NoDocs);
+            ScoreDoc[] sd = topDocs.ScoreDocs;
+            Assert.IsNotNull(sd);
+            Assert.AreEqual(1, sd.Length, "Score doc count");
+        }
+
+        private void TestLeftOpenRange(int precisionStep)
+        {
+            string field = "field" + precisionStep;
+            int count = 3000;
+            int upper = (count - 1) * Distance + (Distance / 3) + StartOffset;
+            NumericRangeQuery<int> q = NumericRangeQuery.NewInt32Range(field, precisionStep, null, upper, true, true);
+            TopDocs topDocs = Searcher.Search(q, null, NoDocs, Sort.INDEXORDER);
+            ScoreDoc[] sd = topDocs.ScoreDocs;
+            Assert.IsNotNull(sd);
+            Assert.AreEqual(count, sd.Length, "Score doc count");
+            Document doc = Searcher.Doc(sd[0].Doc);
+            Assert.AreEqual(StartOffset, (int)doc.GetField(field).GetNumericValue(), "First doc");
+            doc = Searcher.Doc(sd[sd.Length - 1].Doc);
+            Assert.AreEqual((count - 1) * Distance + StartOffset, (int)doc.GetField(field).GetNumericValue(), "Last doc");
+
+            q = NumericRangeQuery.NewInt32Range(field, precisionStep, null, upper, false, true);
+            topDocs = Searcher.Search(q, null, NoDocs, Sort.INDEXORDER);
+            sd = topDocs.ScoreDocs;
+            Assert.IsNotNull(sd);
+            Assert.AreEqual(count, sd.Length, "Score doc count");
+            doc = Searcher.Doc(sd[0].Doc);
+            Assert.AreEqual(StartOffset, (int)doc.GetField(field).GetNumericValue(), "First doc");
+            doc = Searcher.Doc(sd[sd.Length - 1].Doc);
+            Assert.AreEqual((count - 1) * Distance + StartOffset, (int)doc.GetField(field).GetNumericValue(), "Last doc");
+        }
+
+        [Test]
+        public virtual void TestLeftOpenRange_8bit()
+        {
+            TestLeftOpenRange(8);
+        }
+
+        [Test]
+        public virtual void TestLeftOpenRange_4bit()
+        {
+            TestLeftOpenRange(4);
+        }
+
+        [Test]
+        public virtual void TestLeftOpenRange_2bit()
+        {
+            TestLeftOpenRange(2);
+        }
+
+        private void TestRightOpenRange(int precisionStep)
+        {
+            string field = "field" + precisionStep;
+            int count = 3000;
+            int lower = (count - 1) * Distance + (Distance / 3) + StartOffset;
+            NumericRangeQuery<int> q = NumericRangeQuery.NewInt32Range(field, precisionStep, lower, null, true, true);
+            TopDocs topDocs = Searcher.Search(q, null, NoDocs, Sort.INDEXORDER);
+            ScoreDoc[] sd = topDocs.ScoreDocs;
+            Assert.IsNotNull(sd);
+            Assert.AreEqual(NoDocs - count, sd.Length, "Score doc count");
+            Document doc = Searcher.Doc(sd[0].Doc);
+            Assert.AreEqual(count * Distance + StartOffset, (int)doc.GetField(field).GetNumericValue(), "First doc");
+            doc = Searcher.Doc(sd[sd.Length - 1].Doc);
+            Assert.AreEqual((NoDocs - 1) * Distance + StartOffset, (int)doc.GetField(field).GetNumericValue(), "Last doc");
+
+            q = NumericRangeQuery.NewInt32Range(field, precisionStep, lower, null, true, false);
+            topDocs = Searcher.Search(q, null, NoDocs, Sort.INDEXORDER);
+            sd = topDocs.ScoreDocs;
+            Assert.IsNotNull(sd);
+            Assert.AreEqual(NoDocs - count, sd.Length, "Score doc count");
+            doc = Searcher.Doc(sd[0].Doc);
+            Assert.AreEqual(count * Distance + StartOffset, (int)doc.GetField(field).GetNumericValue(), "First doc");
+            doc = Searcher.Doc(sd[sd.Length - 1].Doc);
+            Assert.AreEqual((NoDocs - 1) * Distance + StartOffset, (int)doc.GetField(field).GetNumericValue(), "Last doc");
+        }
+
+        [Test]
+        public virtual void TestRightOpenRange_8bit()
+        {
+            TestRightOpenRange(8);
+        }
+
+        [Test]
+        public virtual void TestRightOpenRange_4bit()
+        {
+            TestRightOpenRange(4);
+        }
+
+        [Test]
+        public virtual void TestRightOpenRange_2bit()
+        {
+            TestRightOpenRange(2);
+        }
+
+        [Test]
+        public virtual void TestInfiniteValues()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document doc = new Document();
+            doc.Add(new SingleField("float", float.NegativeInfinity, Field.Store.NO));
+            doc.Add(new Int32Field("int", int.MinValue, Field.Store.NO));
+            writer.AddDocument(doc);
+
+            doc = new Document();
+            doc.Add(new SingleField("float", float.PositiveInfinity, Field.Store.NO));
+            doc.Add(new Int32Field("int", int.MaxValue, Field.Store.NO));
+            writer.AddDocument(doc);
+
+            doc = new Document();
+            doc.Add(new SingleField("float", 0.0f, Field.Store.NO));
+            doc.Add(new Int32Field("int", 0, Field.Store.NO));
+            writer.AddDocument(doc);
+
+            foreach (float f in TestNumericUtils.FLOAT_NANs)
+            {
+                doc = new Document();
+                doc.Add(new SingleField("float", f, Field.Store.NO));
+                writer.AddDocument(doc);
+            }
+
+            writer.Dispose();
+
+            IndexReader r = DirectoryReader.Open(dir);
+            IndexSearcher s = NewSearcher(r);
+
+            Query q = NumericRangeQuery.NewInt32Range("int", null, null, true, true);
+            TopDocs topDocs = s.Search(q, 10);
+            Assert.AreEqual(3, topDocs.ScoreDocs.Length, "Score doc count");
+
+            q = NumericRangeQuery.NewInt32Range("int", null, null, false, false);
+            topDocs = s.Search(q, 10);
+            Assert.AreEqual(3, topDocs.ScoreDocs.Length, "Score doc count");
+
+            q = NumericRangeQuery.NewInt32Range("int", int.MinValue, int.MaxValue, true, true);
+            topDocs = s.Search(q, 10);
+            Assert.AreEqual(3, topDocs.ScoreDocs.Length, "Score doc count");
+
+            q = NumericRangeQuery.NewInt32Range("int", int.MinValue, int.MaxValue, false, false);
+            topDocs = s.Search(q, 10);
+            Assert.AreEqual(1, topDocs.ScoreDocs.Length, "Score doc count");
+
+            q = NumericRangeQuery.NewSingleRange("float", null, null, true, true);
+            topDocs = s.Search(q, 10);
+            Assert.AreEqual(3, topDocs.ScoreDocs.Length, "Score doc count");
+
+            q = NumericRangeQuery.NewSingleRange("float", null, null, false, false);
+            topDocs = s.Search(q, 10);
+            Assert.AreEqual(3, topDocs.ScoreDocs.Length, "Score doc count");
+
+            q = NumericRangeQuery.NewSingleRange("float", float.NegativeInfinity, float.PositiveInfinity, true, true);
+            topDocs = s.Search(q, 10);
+            Assert.AreEqual(3, topDocs.ScoreDocs.Length, "Score doc count");
+
+            q = NumericRangeQuery.NewSingleRange("float", float.NegativeInfinity, float.PositiveInfinity, false, false);
+            topDocs = s.Search(q, 10);
+            Assert.AreEqual(1, topDocs.ScoreDocs.Length, "Score doc count");
+
+            q = NumericRangeQuery.NewSingleRange("float", float.NaN, float.NaN, true, true);
+            topDocs = s.Search(q, 10);
+            Assert.AreEqual(TestNumericUtils.FLOAT_NANs.Length, topDocs.ScoreDocs.Length, "Score doc count");
+
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        private void TestRandomTrieAndClassicRangeQuery(int precisionStep)
+        {
+            string field = "field" + precisionStep;
+            int totalTermCountT = 0, totalTermCountC = 0, termCountT, termCountC;
+            int num = TestUtil.NextInt(Random(), 10, 20);
+            for (int i = 0; i < num; i++)
+            {
+                int lower = (int)(Random().NextDouble() * NoDocs * Distance) + StartOffset;
+                int upper = (int)(Random().NextDouble() * NoDocs * Distance) + StartOffset;
+                if (lower > upper)
+                {
+                    int a = lower;
+                    lower = upper;
+                    upper = a;
+                }
+                BytesRef lowerBytes = new BytesRef(NumericUtils.BUF_SIZE_INT32), upperBytes = new BytesRef(NumericUtils.BUF_SIZE_INT32);
+                NumericUtils.Int32ToPrefixCodedBytes(lower, 0, lowerBytes);
+                NumericUtils.Int32ToPrefixCodedBytes(upper, 0, upperBytes);
+
+                // test inclusive range
+                NumericRangeQuery<int> tq = NumericRangeQuery.NewInt32Range(field, precisionStep, lower, upper, true, true);
+                TermRangeQuery cq = new TermRangeQuery(field, lowerBytes, upperBytes, true, true);
+                TopDocs tTopDocs = Searcher.Search(tq, 1);
+                TopDocs cTopDocs = Searcher.Search(cq, 1);
+                Assert.AreEqual(cTopDocs.TotalHits, tTopDocs.TotalHits, "Returned count for NumericRangeQuery and TermRangeQuery must be equal");
+                totalTermCountT += termCountT = CountTerms(tq);
+                totalTermCountC += termCountC = CountTerms(cq);
+                CheckTermCounts(precisionStep, termCountT, termCountC);
+                // test exclusive range
+                tq = NumericRangeQuery.NewInt32Range(field, precisionStep, lower, upper, false, false);
+                cq = new TermRangeQuery(field, lowerBytes, upperBytes, false, false);
+                tTopDocs = Searcher.Search(tq, 1);
+                cTopDocs = Searcher.Search(cq, 1);
+                Assert.AreEqual(cTopDocs.TotalHits, tTopDocs.TotalHits, "Returned count for NumericRangeQuery and TermRangeQuery must be equal");
+                totalTermCountT += termCountT = CountTerms(tq);
+                totalTermCountC += termCountC = CountTerms(cq);
+                CheckTermCounts(precisionStep, termCountT, termCountC);
+                // test left exclusive range
+                tq = NumericRangeQuery.NewInt32Range(field, precisionStep, lower, upper, false, true);
+                cq = new TermRangeQuery(field, lowerBytes, upperBytes, false, true);
+                tTopDocs = Searcher.Search(tq, 1);
+                cTopDocs = Searcher.Search(cq, 1);
+                Assert.AreEqual(cTopDocs.TotalHits, tTopDocs.TotalHits, "Returned count for NumericRangeQuery and TermRangeQuery must be equal");
+                totalTermCountT += termCountT = CountTerms(tq);
+                totalTermCountC += termCountC = CountTerms(cq);
+                CheckTermCounts(precisionStep, termCountT, termCountC);
+                // test right exclusive range
+                tq = NumericRangeQuery.NewInt32Range(field, precisionStep, lower, upper, true, false);
+                cq = new TermRangeQuery(field, lowerBytes, upperBytes, true, false);
+                tTopDocs = Searcher.Search(tq, 1);
+                cTopDocs = Searcher.Search(cq, 1);
+                Assert.AreEqual(cTopDocs.TotalHits, tTopDocs.TotalHits, "Returned count for NumericRangeQuery and TermRangeQuery must be equal");
+                totalTermCountT += termCountT = CountTerms(tq);
+                totalTermCountC += termCountC = CountTerms(cq);
+                CheckTermCounts(precisionStep, termCountT, termCountC);
+            }
+
+            CheckTermCounts(precisionStep, totalTermCountT, totalTermCountC);
+            if (VERBOSE && precisionStep != int.MaxValue)
+            {
+                Console.WriteLine("Average number of terms during random search on '" + field + "':");
+                Console.WriteLine(" Numeric query: " + (((double)totalTermCountT) / (num * 4)));
+                Console.WriteLine(" Classical query: " + (((double)totalTermCountC) / (num * 4)));
+            }
+        }
+
+        [Test]
+        public virtual void TestEmptyEnums()
+        {
+            int count = 3000;
+            int lower = (Distance * 3 / 2) + StartOffset, upper = lower + count * Distance + (Distance / 3);
+            // test empty enum
+            Debug.Assert(lower < upper);
+            Assert.IsTrue(0 < CountTerms(NumericRangeQuery.NewInt32Range("field4", 4, lower, upper, true, true)));
+            Assert.AreEqual(0, CountTerms(NumericRangeQuery.NewInt32Range("field4", 4, upper, lower, true, true)));
+            // test empty enum outside of bounds
+            lower = Distance * NoDocs + StartOffset;
+            upper = 2 * lower;
+            Debug.Assert(lower < upper);
+            Assert.AreEqual(0, CountTerms(NumericRangeQuery.NewInt32Range("field4", 4, lower, upper, true, true)));
+        }
+
+        private int CountTerms(MultiTermQuery q)
+        {
+            Terms terms = MultiFields.GetTerms(Reader, q.Field);
+            if (terms == null)
+            {
+                return 0;
+            }
+            TermsEnum termEnum = q.GetTermsEnum(terms);
+            Assert.IsNotNull(termEnum);
+            int count = 0;
+            BytesRef cur, last = null;
+            while ((cur = termEnum.Next()) != null)
+            {
+                count++;
+                if (last != null)
+                {
+                    Assert.IsTrue(last.CompareTo(cur) < 0);
+                }
+                last = BytesRef.DeepCopyOf(cur);
+            }
+            // LUCENE-3314: the results after next() already returned null are undefined,
+            // Assert.IsNull(termEnum.Next());
+            return count;
+        }
+
+        private void CheckTermCounts(int precisionStep, int termCountT, int termCountC)
+        {
+            if (precisionStep == int.MaxValue)
+            {
+                Assert.AreEqual(termCountC, termCountT, "Number of terms should be equal for unlimited precStep");
+            }
+            else
+            {
+                Assert.IsTrue(termCountT <= termCountC, "Number of terms for NRQ should be <= compared to classical TRQ");
+            }
+        }
+
+        [Test]
+        public virtual void TestRandomTrieAndClassicRangeQuery_8bit()
+        {
+            TestRandomTrieAndClassicRangeQuery(8);
+        }
+
+        [Test]
+        public virtual void TestRandomTrieAndClassicRangeQuery_4bit()
+        {
+            TestRandomTrieAndClassicRangeQuery(4);
+        }
+
+        [Test]
+        public virtual void TestRandomTrieAndClassicRangeQuery_2bit()
+        {
+            TestRandomTrieAndClassicRangeQuery(2);
+        }
+
+        [Test]
+        public virtual void TestRandomTrieAndClassicRangeQuery_NoTrie()
+        {
+            TestRandomTrieAndClassicRangeQuery(int.MaxValue);
+        }
+
+        private void TestRangeSplit(int precisionStep)
+        {
+            string field = "ascfield" + precisionStep;
+            // 10 random tests
+            int num = TestUtil.NextInt(Random(), 10, 20);
+            for (int i = 0; i < num; i++)
+            {
+                int lower = (int)(Random().NextDouble() * NoDocs - NoDocs / 2);
+                int upper = (int)(Random().NextDouble() * NoDocs - NoDocs / 2);
+                if (lower > upper)
+                {
+                    int a = lower;
+                    lower = upper;
+                    upper = a;
+                }
+                // test inclusive range
+                Query tq = NumericRangeQuery.NewInt32Range(field, precisionStep, lower, upper, true, true);
+                TopDocs tTopDocs = Searcher.Search(tq, 1);
+                Assert.AreEqual(upper - lower + 1, tTopDocs.TotalHits, "Returned count of range query must be equal to inclusive range length");
+                // test exclusive range
+                tq = NumericRangeQuery.NewInt32Range(field, precisionStep, lower, upper, false, false);
+                tTopDocs = Searcher.Search(tq, 1);
+                Assert.AreEqual(Math.Max(upper - lower - 1, 0), tTopDocs.TotalHits, "Returned count of range query must be equal to exclusive range length");
+                // test left exclusive range
+                tq = NumericRangeQuery.NewInt32Range(field, precisionStep, lower, upper, false, true);
+                tTopDocs = Searcher.Search(tq, 1);
+                Assert.AreEqual(upper - lower, tTopDocs.TotalHits, "Returned count of range query must be equal to half exclusive range length");
+                // test right exclusive range
+                tq = NumericRangeQuery.NewInt32Range(field, precisionStep, lower, upper, true, false);
+                tTopDocs = Searcher.Search(tq, 1);
+                Assert.AreEqual(upper - lower, tTopDocs.TotalHits, "Returned count of range query must be equal to half exclusive range length");
+            }
+        }
+
+        [Test]
+        public virtual void TestRangeSplit_8bit()
+        {
+            TestRangeSplit(8);
+        }
+
+        [Test]
+        public virtual void TestRangeSplit_4bit()
+        {
+            TestRangeSplit(4);
+        }
+
+        [Test]
+        public virtual void TestRangeSplit_2bit()
+        {
+            TestRangeSplit(2);
+        }
+
+        /// <summary>
+        /// we fake a float test using int2float conversion of NumericUtils </summary>
+        private void TestFloatRange(int precisionStep)
+        {
+            string field = "ascfield" + precisionStep;
+            const int lower = -1000, upper = +2000;
+
+            Query tq = NumericRangeQuery.NewSingleRange(field, precisionStep, NumericUtils.SortableInt32ToSingle(lower), NumericUtils.SortableInt32ToSingle(upper), true, true);
+            TopDocs tTopDocs = Searcher.Search(tq, 1);
+            Assert.AreEqual(upper - lower + 1, tTopDocs.TotalHits, "Returned count of range query must be equal to inclusive range length");
+
+            Filter tf = NumericRangeFilter.NewSingleRange(field, precisionStep, NumericUtils.SortableInt32ToSingle(lower), NumericUtils.SortableInt32ToSingle(upper), true, true);
+            tTopDocs = Searcher.Search(new MatchAllDocsQuery(), tf, 1);
+            Assert.AreEqual(upper - lower + 1, tTopDocs.TotalHits, "Returned count of range filter must be equal to inclusive range length");
+        }
+
+        [Test]
+        public virtual void TestFloatRange_8bit()
+        {
+            TestFloatRange(8);
+        }
+
+        [Test]
+        public virtual void TestFloatRange_4bit()
+        {
+            TestFloatRange(4);
+        }
+
+        [Test]
+        public virtual void TestFloatRange_2bit()
+        {
+            TestFloatRange(2);
+        }
+
+        private void TestSorting(int precisionStep)
+        {
+            string field = "field" + precisionStep;
+            // 10 random tests, the index order is ascending,
+            // so using a reverse sort field should retun descending documents
+            int num = TestUtil.NextInt(Random(), 10, 20);
+            for (int i = 0; i < num; i++)
+            {
+                int lower = (int)(Random().NextDouble() * NoDocs * Distance) + StartOffset;
+                int upper = (int)(Random().NextDouble() * NoDocs * Distance) + StartOffset;
+                if (lower > upper)
+                {
+                    int a = lower;
+                    lower = upper;
+                    upper = a;
+                }
+                Query tq = NumericRangeQuery.NewInt32Range(field, precisionStep, lower, upper, true, true);
+                TopDocs topDocs = Searcher.Search(tq, null, NoDocs, new Sort(new SortField(field, SortFieldType.INT32, true)));
+                if (topDocs.TotalHits == 0)
+                {
+                    continue;
+                }
+                ScoreDoc[] sd = topDocs.ScoreDocs;
+                Assert.IsNotNull(sd);
+                int last = (int)Searcher.Doc(sd[0].Doc).GetField(field).GetNumericValue();
+                for (int j = 1; j < sd.Length; j++)
+                {
+                    int act = (int)Searcher.Doc(sd[j].Doc).GetField(field).GetNumericValue();
+                    Assert.IsTrue(last > act, "Docs should be sorted backwards");
+                    last = act;
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestSorting_8bit()
+        {
+            TestSorting(8);
+        }
+
+        [Test]
+        public virtual void TestSorting_4bit()
+        {
+            TestSorting(4);
+        }
+
+        [Test]
+        public virtual void TestSorting_2bit()
+        {
+            TestSorting(2);
+        }
+
+        [Test]
+        public virtual void TestEqualsAndHash()
+        {
+            QueryUtils.CheckHashEquals(NumericRangeQuery.NewInt32Range("test1", 4, 10, 20, true, true));
+            QueryUtils.CheckHashEquals(NumericRangeQuery.NewInt32Range("test2", 4, 10, 20, false, true));
+            QueryUtils.CheckHashEquals(NumericRangeQuery.NewInt32Range("test3", 4, 10, 20, true, false));
+            QueryUtils.CheckHashEquals(NumericRangeQuery.NewInt32Range("test4", 4, 10, 20, false, false));
+            QueryUtils.CheckHashEquals(NumericRangeQuery.NewInt32Range("test5", 4, 10, null, true, true));
+            QueryUtils.CheckHashEquals(NumericRangeQuery.NewInt32Range("test6", 4, null, 20, true, true));
+            QueryUtils.CheckHashEquals(NumericRangeQuery.NewInt32Range("test7", 4, null, null, true, true));
+            QueryUtils.CheckEqual(NumericRangeQuery.NewInt32Range("test8", 4, 10, 20, true, true), NumericRangeQuery.NewInt32Range("test8", 4, 10, 20, true, true));
+            QueryUtils.CheckUnequal(NumericRangeQuery.NewInt32Range("test9", 4, 10, 20, true, true), NumericRangeQuery.NewInt32Range("test9", 8, 10, 20, true, true));
+            QueryUtils.CheckUnequal(NumericRangeQuery.NewInt32Range("test10a", 4, 10, 20, true, true), NumericRangeQuery.NewInt32Range("test10b", 4, 10, 20, true, true));
+            QueryUtils.CheckUnequal(NumericRangeQuery.NewInt32Range("test11", 4, 10, 20, true, true), NumericRangeQuery.NewInt32Range("test11", 4, 20, 10, true, true));
+            QueryUtils.CheckUnequal(NumericRangeQuery.NewInt32Range("test12", 4, 10, 20, true, true), NumericRangeQuery.NewInt32Range("test12", 4, 10, 20, false, true));
+            QueryUtils.CheckUnequal(NumericRangeQuery.NewInt32Range("test13", 4, 10, 20, true, true), NumericRangeQuery.NewSingleRange("test13", 4, 10f, 20f, true, true));
+            // the following produces a hash collision, because Long and Integer have the same hashcode, so only test equality:
+            Query q1 = NumericRangeQuery.NewInt32Range("test14", 4, 10, 20, true, true);
+            Query q2 = NumericRangeQuery.NewInt64Range("test14", 4, 10L, 20L, true, true);
+            Assert.IsFalse(q1.Equals(q2));
+            Assert.IsFalse(q2.Equals(q1));
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestNumericRangeQuery64.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestNumericRangeQuery64.cs b/src/Lucene.Net.Tests/Search/TestNumericRangeQuery64.cs
new file mode 100644
index 0000000..c5c39c9
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestNumericRangeQuery64.cs
@@ -0,0 +1,752 @@
+using System;
+using System.Diagnostics;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using Lucene.Net.Index;
+    using NUnit.Framework;
+    using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
+    using Document = Documents.Document;
+    using DoubleField = DoubleField;
+    using Field = Field;
+    using FieldType = FieldType;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using Int64Field = Int64Field;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MultiFields = Lucene.Net.Index.MultiFields;
+    using NumericUtils = Lucene.Net.Util.NumericUtils;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using SlowCompositeReaderWrapper = Lucene.Net.Index.SlowCompositeReaderWrapper;
+    using Terms = Lucene.Net.Index.Terms;
+    using TermsEnum = Lucene.Net.Index.TermsEnum;
+    using TestNumericUtils = Lucene.Net.Util.TestNumericUtils; // NaN arrays
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    [TestFixture]
+    public class TestNumericRangeQuery64 : LuceneTestCase
+    {
+        // distance of entries
+        private static long Distance;
+
+        // shift the starting of the values to the left, to also have negative values:
+        private static readonly long StartOffset = -1L << 31;
+
+        // number of docs to generate for testing
+        private static int NoDocs;
+
+        private static Directory Directory = null;
+        private static IndexReader Reader = null;
+        private static IndexSearcher Searcher = null;
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Is non-static because NewIndexWriterConfig is no longer static.
+        /// </summary>
+        [OneTimeSetUp]
+        public void BeforeClass()
+        {
+            NoDocs = AtLeast(4096);
+            Distance = (1L << 60) / NoDocs;
+            Directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), Directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(TestUtil.NextInt(Random(), 100, 1000)).SetMergePolicy(NewLogMergePolicy()));
+
+            FieldType storedLong = new FieldType(Int64Field.TYPE_NOT_STORED);
+            storedLong.IsStored = true;
+            storedLong.Freeze();
+
+            FieldType storedLong8 = new FieldType(storedLong);
+            storedLong8.NumericPrecisionStep = 8;
+
+            FieldType storedLong4 = new FieldType(storedLong);
+            storedLong4.NumericPrecisionStep = 4;
+
+            FieldType storedLong6 = new FieldType(storedLong);
+            storedLong6.NumericPrecisionStep = 6;
+
+            FieldType storedLong2 = new FieldType(storedLong);
+            storedLong2.NumericPrecisionStep = 2;
+
+            FieldType storedLongNone = new FieldType(storedLong);
+            storedLongNone.NumericPrecisionStep = int.MaxValue;
+
+            FieldType unstoredLong = Int64Field.TYPE_NOT_STORED;
+
+            FieldType unstoredLong8 = new FieldType(unstoredLong);
+            unstoredLong8.NumericPrecisionStep = 8;
+
+            FieldType unstoredLong6 = new FieldType(unstoredLong);
+            unstoredLong6.NumericPrecisionStep = 6;
+
+            FieldType unstoredLong4 = new FieldType(unstoredLong);
+            unstoredLong4.NumericPrecisionStep = 4;
+
+            FieldType unstoredLong2 = new FieldType(unstoredLong);
+            unstoredLong2.NumericPrecisionStep = 2;
+
+            Int64Field field8 = new Int64Field("field8", 0L, storedLong8), field6 = new Int64Field("field6", 0L, storedLong6), field4 = new Int64Field("field4", 0L, storedLong4), field2 = new Int64Field("field2", 0L, storedLong2), fieldNoTrie = new Int64Field("field" + int.MaxValue, 0L, storedLongNone), ascfield8 = new Int64Field("ascfield8", 0L, unstoredLong8), ascfield6 = new Int64Field("ascfield6", 0L, unstoredLong6), ascfield4 = new Int64Field("ascfield4", 0L, unstoredLong4), ascfield2 = new Int64Field("ascfield2", 0L, unstoredLong2);
+
+            Document doc = new Document();
+            // add fields, that have a distance to test general functionality
+            doc.Add(field8);
+            doc.Add(field6);
+            doc.Add(field4);
+            doc.Add(field2);
+            doc.Add(fieldNoTrie);
+            // add ascending fields with a distance of 1, beginning at -noDocs/2 to test the correct splitting of range and inclusive/exclusive
+            doc.Add(ascfield8);
+            doc.Add(ascfield6);
+            doc.Add(ascfield4);
+            doc.Add(ascfield2);
+
+            // Add a series of noDocs docs with increasing long values, by updating the fields
+            for (int l = 0; l < NoDocs; l++)
+            {
+                long val = Distance * l + StartOffset;
+                field8.SetInt64Value(val);
+                field6.SetInt64Value(val);
+                field4.SetInt64Value(val);
+                field2.SetInt64Value(val);
+                fieldNoTrie.SetInt64Value(val);
+
+                val = l - (NoDocs / 2);
+                ascfield8.SetInt64Value(val);
+                ascfield6.SetInt64Value(val);
+                ascfield4.SetInt64Value(val);
+                ascfield2.SetInt64Value(val);
+                writer.AddDocument(doc);
+            }
+            Reader = writer.Reader;
+            Searcher = NewSearcher(Reader);
+            writer.Dispose();
+        }
+
+        [OneTimeTearDown]
+        public static void AfterClass()
+        {
+            Searcher = null;
+            Reader.Dispose();
+            Reader = null;
+            Directory.Dispose();
+            Directory = null;
+        }
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            // set the theoretical maximum term count for 8bit (see docs for the number)
+            // super.tearDown will restore the default
+            BooleanQuery.MaxClauseCount = 7 * 255 * 2 + 255;
+        }
+
+        /// <summary>
+        /// test for constant score + boolean query + filter, the other tests only use the constant score mode </summary>
+        private void TestRange(int precisionStep)
+        {
+            string field = "field" + precisionStep;
+            int count = 3000;
+            long lower = (Distance * 3 / 2) + StartOffset, upper = lower + count * Distance + (Distance / 3);
+            NumericRangeQuery<long> q = NumericRangeQuery.NewInt64Range(field, precisionStep, lower, upper, true, true);
+            NumericRangeFilter<long> f = NumericRangeFilter.NewInt64Range(field, precisionStep, lower, upper, true, true);
+            for (sbyte i = 0; i < 3; i++)
+            {
+                TopDocs topDocs;
+                string type;
+                switch (i)
+                {
+                    case 0:
+                        type = " (constant score filter rewrite)";
+                        q.MultiTermRewriteMethod = MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE;
+                        topDocs = Searcher.Search(q, null, NoDocs, Sort.INDEXORDER);
+                        break;
+
+                    case 1:
+                        type = " (constant score boolean rewrite)";
+                        q.MultiTermRewriteMethod = MultiTermQuery.CONSTANT_SCORE_BOOLEAN_QUERY_REWRITE;
+                        topDocs = Searcher.Search(q, null, NoDocs, Sort.INDEXORDER);
+                        break;
+
+                    case 2:
+                        type = " (filter)";
+                        topDocs = Searcher.Search(new MatchAllDocsQuery(), f, NoDocs, Sort.INDEXORDER);
+                        break;
+
+                    default:
+                        return;
+                }
+                ScoreDoc[] sd = topDocs.ScoreDocs;
+                Assert.IsNotNull(sd);
+                Assert.AreEqual(count, sd.Length, "Score doc count" + type);
+                Document doc = Searcher.Doc(sd[0].Doc);
+                Assert.AreEqual(2 * Distance + StartOffset, (long)doc.GetField(field).GetNumericValue(), "First doc" + type);
+                doc = Searcher.Doc(sd[sd.Length - 1].Doc);
+                Assert.AreEqual((1 + count) * Distance + StartOffset, (long)doc.GetField(field).GetNumericValue(), "Last doc" + type);
+            }
+        }
+
+        [Test]
+        public virtual void TestRange_8bit()
+        {
+            TestRange(8);
+        }
+
+        [Test]
+        public virtual void TestRange_6bit()
+        {
+            TestRange(6);
+        }
+
+        [Test]
+        public virtual void TestRange_4bit()
+        {
+            TestRange(4);
+        }
+
+        [Test]
+        public virtual void TestRange_2bit()
+        {
+            TestRange(2);
+        }
+
+        [Test]
+        public virtual void TestInverseRange()
+        {
+            AtomicReaderContext context = (AtomicReaderContext)SlowCompositeReaderWrapper.Wrap(Searcher.IndexReader).Context;
+            NumericRangeFilter<long> f = NumericRangeFilter.NewInt64Range("field8", 8, 1000L, -1000L, true, true);
+            Assert.IsNull(f.GetDocIdSet(context, (context.AtomicReader).LiveDocs), "A inverse range should return the null instance");
+            f = NumericRangeFilter.NewInt64Range("field8", 8, long.MaxValue, null, false, false);
+            Assert.IsNull(f.GetDocIdSet(context, (context.AtomicReader).LiveDocs), "A exclusive range starting with Long.MAX_VALUE should return the null instance");
+            f = NumericRangeFilter.NewInt64Range("field8", 8, null, long.MinValue, false, false);
+            Assert.IsNull(f.GetDocIdSet(context, (context.AtomicReader).LiveDocs), "A exclusive range ending with Long.MIN_VALUE should return the null instance");
+        }
+
+        [Test]
+        public virtual void TestOneMatchQuery()
+        {
+            NumericRangeQuery<long> q = NumericRangeQuery.NewInt64Range("ascfield8", 8, 1000L, 1000L, true, true);
+            TopDocs topDocs = Searcher.Search(q, NoDocs);
+            ScoreDoc[] sd = topDocs.ScoreDocs;
+            Assert.IsNotNull(sd);
+            Assert.AreEqual(1, sd.Length, "Score doc count");
+        }
+
+        private void TestLeftOpenRange(int precisionStep)
+        {
+            string field = "field" + precisionStep;
+            int count = 3000;
+            long upper = (count - 1) * Distance + (Distance / 3) + StartOffset;
+            NumericRangeQuery<long> q = NumericRangeQuery.NewInt64Range(field, precisionStep, null, upper, true, true);
+            TopDocs topDocs = Searcher.Search(q, null, NoDocs, Sort.INDEXORDER);
+            ScoreDoc[] sd = topDocs.ScoreDocs;
+            Assert.IsNotNull(sd);
+            Assert.AreEqual(count, sd.Length, "Score doc count");
+            Document doc = Searcher.Doc(sd[0].Doc);
+            Assert.AreEqual(StartOffset, (long)doc.GetField(field).GetNumericValue(), "First doc");
+            doc = Searcher.Doc(sd[sd.Length - 1].Doc);
+            Assert.AreEqual((count - 1) * Distance + StartOffset, (long)doc.GetField(field).GetNumericValue(), "Last doc");
+
+            q = NumericRangeQuery.NewInt64Range(field, precisionStep, null, upper, false, true);
+            topDocs = Searcher.Search(q, null, NoDocs, Sort.INDEXORDER);
+            sd = topDocs.ScoreDocs;
+            Assert.IsNotNull(sd);
+            Assert.AreEqual(count, sd.Length, "Score doc count");
+            doc = Searcher.Doc(sd[0].Doc);
+            Assert.AreEqual(StartOffset, (long)doc.GetField(field).GetNumericValue(), "First doc");
+            doc = Searcher.Doc(sd[sd.Length - 1].Doc);
+            Assert.AreEqual((count - 1) * Distance + StartOffset, (long)doc.GetField(field).GetNumericValue(), "Last doc");
+        }
+
+        [Test]
+        public virtual void TestLeftOpenRange_8bit()
+        {
+            TestLeftOpenRange(8);
+        }
+
+        [Test]
+        public virtual void TestLeftOpenRange_6bit()
+        {
+            TestLeftOpenRange(6);
+        }
+
+        [Test]
+        public virtual void TestLeftOpenRange_4bit()
+        {
+            TestLeftOpenRange(4);
+        }
+
+        [Test]
+        public virtual void TestLeftOpenRange_2bit()
+        {
+            TestLeftOpenRange(2);
+        }
+
+        private void TestRightOpenRange(int precisionStep)
+        {
+            string field = "field" + precisionStep;
+            int count = 3000;
+            long lower = (count - 1) * Distance + (Distance / 3) + StartOffset;
+            NumericRangeQuery<long> q = NumericRangeQuery.NewInt64Range(field, precisionStep, lower, null, true, true);
+            TopDocs topDocs = Searcher.Search(q, null, NoDocs, Sort.INDEXORDER);
+            ScoreDoc[] sd = topDocs.ScoreDocs;
+            Assert.IsNotNull(sd);
+            Assert.AreEqual(NoDocs - count, sd.Length, "Score doc count");
+            Document doc = Searcher.Doc(sd[0].Doc);
+            Assert.AreEqual(count * Distance + StartOffset, (long)doc.GetField(field).GetNumericValue(), "First doc");
+            doc = Searcher.Doc(sd[sd.Length - 1].Doc);
+            Assert.AreEqual((NoDocs - 1) * Distance + StartOffset, (long)doc.GetField(field).GetNumericValue(), "Last doc");
+
+            q = NumericRangeQuery.NewInt64Range(field, precisionStep, lower, null, true, false);
+            topDocs = Searcher.Search(q, null, NoDocs, Sort.INDEXORDER);
+            sd = topDocs.ScoreDocs;
+            Assert.IsNotNull(sd);
+            Assert.AreEqual(NoDocs - count, sd.Length, "Score doc count");
+            doc = Searcher.Doc(sd[0].Doc);
+            Assert.AreEqual(count * Distance + StartOffset, (long)doc.GetField(field).GetNumericValue(), "First doc");
+            doc = Searcher.Doc(sd[sd.Length - 1].Doc);
+            Assert.AreEqual((NoDocs - 1) * Distance + StartOffset, (long)doc.GetField(field).GetNumericValue(), "Last doc");
+        }
+
+        [Test]
+        public virtual void TestRightOpenRange_8bit()
+        {
+            TestRightOpenRange(8);
+        }
+
+        [Test]
+        public virtual void TestRightOpenRange_6bit()
+        {
+            TestRightOpenRange(6);
+        }
+
+        [Test]
+        public virtual void TestRightOpenRange_4bit()
+        {
+            TestRightOpenRange(4);
+        }
+
+        [Test]
+        public virtual void TestRightOpenRange_2bit()
+        {
+            TestRightOpenRange(2);
+        }
+
+        [Test]
+        public virtual void TestInfiniteValues()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document doc = new Document();
+            doc.Add(new DoubleField("double", double.NegativeInfinity, Field.Store.NO));
+            doc.Add(new Int64Field("long", long.MinValue, Field.Store.NO));
+            writer.AddDocument(doc);
+
+            doc = new Document();
+            doc.Add(new DoubleField("double", double.PositiveInfinity, Field.Store.NO));
+            doc.Add(new Int64Field("long", long.MaxValue, Field.Store.NO));
+            writer.AddDocument(doc);
+
+            doc = new Document();
+            doc.Add(new DoubleField("double", 0.0, Field.Store.NO));
+            doc.Add(new Int64Field("long", 0L, Field.Store.NO));
+            writer.AddDocument(doc);
+
+            foreach (double d in TestNumericUtils.DOUBLE_NANs)
+            {
+                doc = new Document();
+                doc.Add(new DoubleField("double", d, Field.Store.NO));
+                writer.AddDocument(doc);
+            }
+
+            writer.Dispose();
+
+            IndexReader r = DirectoryReader.Open(dir);
+            IndexSearcher s = NewSearcher(r);
+
+            Query q = NumericRangeQuery.NewInt64Range("long", null, null, true, true);
+            TopDocs topDocs = s.Search(q, 10);
+            Assert.AreEqual(3, topDocs.ScoreDocs.Length, "Score doc count");
+
+            q = NumericRangeQuery.NewInt64Range("long", null, null, false, false);
+            topDocs = s.Search(q, 10);
+            Assert.AreEqual(3, topDocs.ScoreDocs.Length, "Score doc count");
+
+            q = NumericRangeQuery.NewInt64Range("long", long.MinValue, long.MaxValue, true, true);
+            topDocs = s.Search(q, 10);
+            Assert.AreEqual(3, topDocs.ScoreDocs.Length, "Score doc count");
+
+            q = NumericRangeQuery.NewInt64Range("long", long.MinValue, long.MaxValue, false, false);
+            topDocs = s.Search(q, 10);
+            Assert.AreEqual(1, topDocs.ScoreDocs.Length, "Score doc count");
+
+            q = NumericRangeQuery.NewDoubleRange("double", null, null, true, true);
+            topDocs = s.Search(q, 10);
+            Assert.AreEqual(3, topDocs.ScoreDocs.Length, "Score doc count");
+
+            q = NumericRangeQuery.NewDoubleRange("double", null, null, false, false);
+            topDocs = s.Search(q, 10);
+            Assert.AreEqual(3, topDocs.ScoreDocs.Length, "Score doc count");
+
+            q = NumericRangeQuery.NewDoubleRange("double", double.NegativeInfinity, double.PositiveInfinity, true, true);
+            topDocs = s.Search(q, 10);
+            Assert.AreEqual(3, topDocs.ScoreDocs.Length, "Score doc count");
+
+            q = NumericRangeQuery.NewDoubleRange("double", double.NegativeInfinity, double.PositiveInfinity, false, false);
+            topDocs = s.Search(q, 10);
+            Assert.AreEqual(1, topDocs.ScoreDocs.Length, "Score doc count");
+
+            q = NumericRangeQuery.NewDoubleRange("double", double.NaN, double.NaN, true, true);
+            topDocs = s.Search(q, 10);
+            Assert.AreEqual(TestNumericUtils.DOUBLE_NANs.Length, topDocs.ScoreDocs.Length, "Score doc count");
+
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        private void TestRandomTrieAndClassicRangeQuery(int precisionStep)
+        {
+            string field = "field" + precisionStep;
+            int totalTermCountT = 0, totalTermCountC = 0, termCountT, termCountC;
+            int num = TestUtil.NextInt(Random(), 10, 20);
+            for (int i = 0; i < num; i++)
+            {
+                long lower = (long)(Random().NextDouble() * NoDocs * Distance) + StartOffset;
+                long upper = (long)(Random().NextDouble() * NoDocs * Distance) + StartOffset;
+                if (lower > upper)
+                {
+                    long a = lower;
+                    lower = upper;
+                    upper = a;
+                }
+                BytesRef lowerBytes = new BytesRef(NumericUtils.BUF_SIZE_INT64), upperBytes = new BytesRef(NumericUtils.BUF_SIZE_INT64);
+                NumericUtils.Int64ToPrefixCodedBytes(lower, 0, lowerBytes);
+                NumericUtils.Int64ToPrefixCodedBytes(upper, 0, upperBytes);
+
+                // test inclusive range
+                NumericRangeQuery<long> tq = NumericRangeQuery.NewInt64Range(field, precisionStep, lower, upper, true, true);
+                TermRangeQuery cq = new TermRangeQuery(field, lowerBytes, upperBytes, true, true);
+                TopDocs tTopDocs = Searcher.Search(tq, 1);
+                TopDocs cTopDocs = Searcher.Search(cq, 1);
+                Assert.AreEqual(cTopDocs.TotalHits, tTopDocs.TotalHits, "Returned count for NumericRangeQuery and TermRangeQuery must be equal");
+                totalTermCountT += termCountT = CountTerms(tq);
+                totalTermCountC += termCountC = CountTerms(cq);
+                CheckTermCounts(precisionStep, termCountT, termCountC);
+                // test exclusive range
+                tq = NumericRangeQuery.NewInt64Range(field, precisionStep, lower, upper, false, false);
+                cq = new TermRangeQuery(field, lowerBytes, upperBytes, false, false);
+                tTopDocs = Searcher.Search(tq, 1);
+                cTopDocs = Searcher.Search(cq, 1);
+                Assert.AreEqual(cTopDocs.TotalHits, tTopDocs.TotalHits, "Returned count for NumericRangeQuery and TermRangeQuery must be equal");
+                totalTermCountT += termCountT = CountTerms(tq);
+                totalTermCountC += termCountC = CountTerms(cq);
+                CheckTermCounts(precisionStep, termCountT, termCountC);
+                // test left exclusive range
+                tq = NumericRangeQuery.NewInt64Range(field, precisionStep, lower, upper, false, true);
+                cq = new TermRangeQuery(field, lowerBytes, upperBytes, false, true);
+                tTopDocs = Searcher.Search(tq, 1);
+                cTopDocs = Searcher.Search(cq, 1);
+                Assert.AreEqual(cTopDocs.TotalHits, tTopDocs.TotalHits, "Returned count for NumericRangeQuery and TermRangeQuery must be equal");
+                totalTermCountT += termCountT = CountTerms(tq);
+                totalTermCountC += termCountC = CountTerms(cq);
+                CheckTermCounts(precisionStep, termCountT, termCountC);
+                // test right exclusive range
+                tq = NumericRangeQuery.NewInt64Range(field, precisionStep, lower, upper, true, false);
+                cq = new TermRangeQuery(field, lowerBytes, upperBytes, true, false);
+                tTopDocs = Searcher.Search(tq, 1);
+                cTopDocs = Searcher.Search(cq, 1);
+                Assert.AreEqual(cTopDocs.TotalHits, tTopDocs.TotalHits, "Returned count for NumericRangeQuery and TermRangeQuery must be equal");
+                totalTermCountT += termCountT = CountTerms(tq);
+                totalTermCountC += termCountC = CountTerms(cq);
+                CheckTermCounts(precisionStep, termCountT, termCountC);
+            }
+
+            CheckTermCounts(precisionStep, totalTermCountT, totalTermCountC);
+            if (VERBOSE && precisionStep != int.MaxValue)
+            {
+                Console.WriteLine("Average number of terms during random search on '" + field + "':");
+                Console.WriteLine(" Numeric query: " + (((double)totalTermCountT) / (num * 4)));
+                Console.WriteLine(" Classical query: " + (((double)totalTermCountC) / (num * 4)));
+            }
+        }
+
+        [Test]
+        public virtual void TestEmptyEnums()
+        {
+            int count = 3000;
+            long lower = (Distance * 3 / 2) + StartOffset, upper = lower + count * Distance + (Distance / 3);
+            // test empty enum
+            Debug.Assert(lower < upper);
+            Assert.IsTrue(0 < CountTerms(NumericRangeQuery.NewInt64Range("field4", 4, lower, upper, true, true)));
+            Assert.AreEqual(0, CountTerms(NumericRangeQuery.NewInt64Range("field4", 4, upper, lower, true, true)));
+            // test empty enum outside of bounds
+            lower = Distance * NoDocs + StartOffset;
+            upper = 2L * lower;
+            Debug.Assert(lower < upper);
+            Assert.AreEqual(0, CountTerms(NumericRangeQuery.NewInt64Range("field4", 4, lower, upper, true, true)));
+        }
+
+        private int CountTerms(MultiTermQuery q)
+        {
+            Terms terms = MultiFields.GetTerms(Reader, q.Field);
+            if (terms == null)
+            {
+                return 0;
+            }
+            TermsEnum termEnum = q.GetTermsEnum(terms);
+            Assert.IsNotNull(termEnum);
+            int count = 0;
+            BytesRef cur, last = null;
+            while ((cur = termEnum.Next()) != null)
+            {
+                count++;
+                if (last != null)
+                {
+                    Assert.IsTrue(last.CompareTo(cur) < 0);
+                }
+                last = BytesRef.DeepCopyOf(cur);
+            }
+            // LUCENE-3314: the results after next() already returned null are undefined,
+            // Assert.IsNull(termEnum.Next());
+            return count;
+        }
+
+        private void CheckTermCounts(int precisionStep, int termCountT, int termCountC)
+        {
+            if (precisionStep == int.MaxValue)
+            {
+                Assert.AreEqual(termCountC, termCountT, "Number of terms should be equal for unlimited precStep");
+            }
+            else
+            {
+                Assert.IsTrue(termCountT <= termCountC, "Number of terms for NRQ should be <= compared to classical TRQ");
+            }
+        }
+
+        [Test]
+        public virtual void TestRandomTrieAndClassicRangeQuery_8bit()
+        {
+            TestRandomTrieAndClassicRangeQuery(8);
+        }
+
+        [Test]
+        public virtual void TestRandomTrieAndClassicRangeQuery_6bit()
+        {
+            TestRandomTrieAndClassicRangeQuery(6);
+        }
+
+        [Test]
+        public virtual void TestRandomTrieAndClassicRangeQuery_4bit()
+        {
+            TestRandomTrieAndClassicRangeQuery(4);
+        }
+
+        [Test]
+        public virtual void TestRandomTrieAndClassicRangeQuery_2bit()
+        {
+            TestRandomTrieAndClassicRangeQuery(2);
+        }
+
+        [Test]
+        public virtual void TestRandomTrieAndClassicRangeQuery_NoTrie()
+        {
+            TestRandomTrieAndClassicRangeQuery(int.MaxValue);
+        }
+
+        private void TestRangeSplit(int precisionStep)
+        {
+            string field = "ascfield" + precisionStep;
+            // 10 random tests
+            int num = TestUtil.NextInt(Random(), 10, 20);
+            for (int i = 0; i < num; i++)
+            {
+                long lower = (long)(Random().NextDouble() * NoDocs - NoDocs / 2);
+                long upper = (long)(Random().NextDouble() * NoDocs - NoDocs / 2);
+                if (lower > upper)
+                {
+                    long a = lower;
+                    lower = upper;
+                    upper = a;
+                }
+                // test inclusive range
+                Query tq = NumericRangeQuery.NewInt64Range(field, precisionStep, lower, upper, true, true);
+                TopDocs tTopDocs = Searcher.Search(tq, 1);
+                Assert.AreEqual(upper - lower + 1, tTopDocs.TotalHits, "Returned count of range query must be equal to inclusive range length");
+                // test exclusive range
+                tq = NumericRangeQuery.NewInt64Range(field, precisionStep, lower, upper, false, false);
+                tTopDocs = Searcher.Search(tq, 1);
+                Assert.AreEqual(Math.Max(upper - lower - 1, 0), tTopDocs.TotalHits, "Returned count of range query must be equal to exclusive range length");
+                // test left exclusive range
+                tq = NumericRangeQuery.NewInt64Range(field, precisionStep, lower, upper, false, true);
+                tTopDocs = Searcher.Search(tq, 1);
+                Assert.AreEqual(upper - lower, tTopDocs.TotalHits, "Returned count of range query must be equal to half exclusive range length");
+                // test right exclusive range
+                tq = NumericRangeQuery.NewInt64Range(field, precisionStep, lower, upper, true, false);
+                tTopDocs = Searcher.Search(tq, 1);
+                Assert.AreEqual(upper - lower, tTopDocs.TotalHits, "Returned count of range query must be equal to half exclusive range length");
+            }
+        }
+
+        [Test]
+        public virtual void TestRangeSplit_8bit()
+        {
+            TestRangeSplit(8);
+        }
+
+        [Test]
+        public virtual void TestRangeSplit_6bit()
+        {
+            TestRangeSplit(6);
+        }
+
+        [Test]
+        public virtual void TestRangeSplit_4bit()
+        {
+            TestRangeSplit(4);
+        }
+
+        [Test]
+        public virtual void TestRangeSplit_2bit()
+        {
+            TestRangeSplit(2);
+        }
+
+        /// <summary>
+        /// we fake a double test using long2double conversion of NumericUtils </summary>
+        private void TestDoubleRange(int precisionStep)
+        {
+            string field = "ascfield" + precisionStep;
+            const long lower = -1000L, upper = +2000L;
+
+            Query tq = NumericRangeQuery.NewDoubleRange(field, precisionStep, NumericUtils.SortableInt64ToDouble(lower), NumericUtils.SortableInt64ToDouble(upper), true, true);
+            TopDocs tTopDocs = Searcher.Search(tq, 1);
+            Assert.AreEqual(upper - lower + 1, tTopDocs.TotalHits, "Returned count of range query must be equal to inclusive range length");
+
+            Filter tf = NumericRangeFilter.NewDoubleRange(field, precisionStep, NumericUtils.SortableInt64ToDouble(lower), NumericUtils.SortableInt64ToDouble(upper), true, true);
+            tTopDocs = Searcher.Search(new MatchAllDocsQuery(), tf, 1);
+            Assert.AreEqual(upper - lower + 1, tTopDocs.TotalHits, "Returned count of range filter must be equal to inclusive range length");
+        }
+
+        [Test]
+        public virtual void TestDoubleRange_8bit()
+        {
+            TestDoubleRange(8);
+        }
+
+        [Test]
+        public virtual void TestDoubleRange_6bit()
+        {
+            TestDoubleRange(6);
+        }
+
+        [Test]
+        public virtual void TestDoubleRange_4bit()
+        {
+            TestDoubleRange(4);
+        }
+
+        [Test]
+        public virtual void TestDoubleRange_2bit()
+        {
+            TestDoubleRange(2);
+        }
+
+        private void TestSorting(int precisionStep)
+        {
+            string field = "field" + precisionStep;
+            // 10 random tests, the index order is ascending,
+            // so using a reverse sort field should retun descending documents
+            int num = TestUtil.NextInt(Random(), 10, 20);
+            for (int i = 0; i < num; i++)
+            {
+                long lower = (long)(Random().NextDouble() * NoDocs * Distance) + StartOffset;
+                long upper = (long)(Random().NextDouble() * NoDocs * Distance) + StartOffset;
+                if (lower > upper)
+                {
+                    long a = lower;
+                    lower = upper;
+                    upper = a;
+                }
+                Query tq = NumericRangeQuery.NewInt64Range(field, precisionStep, lower, upper, true, true);
+                TopDocs topDocs = Searcher.Search(tq, null, NoDocs, new Sort(new SortField(field, SortFieldType.INT64, true)));
+                if (topDocs.TotalHits == 0)
+                {
+                    continue;
+                }
+                ScoreDoc[] sd = topDocs.ScoreDocs;
+                Assert.IsNotNull(sd);
+                long last = (long)Searcher.Doc(sd[0].Doc).GetField(field).GetNumericValue();
+                for (int j = 1; j < sd.Length; j++)
+                {
+                    long act = (long)Searcher.Doc(sd[j].Doc).GetField(field).GetNumericValue();
+                    Assert.IsTrue(last > act, "Docs should be sorted backwards");
+                    last = act;
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestSorting_8bit()
+        {
+            TestSorting(8);
+        }
+
+        [Test]
+        public virtual void TestSorting_6bit()
+        {
+            TestSorting(6);
+        }
+
+        [Test]
+        public virtual void TestSorting_4bit()
+        {
+            TestSorting(4);
+        }
+
+        [Test]
+        public virtual void TestSorting_2bit()
+        {
+            TestSorting(2);
+        }
+
+        [Test]
+        public virtual void TestEqualsAndHash()
+        {
+            QueryUtils.CheckHashEquals(NumericRangeQuery.NewInt64Range("test1", 4, 10L, 20L, true, true));
+            QueryUtils.CheckHashEquals(NumericRangeQuery.NewInt64Range("test2", 4, 10L, 20L, false, true));
+            QueryUtils.CheckHashEquals(NumericRangeQuery.NewInt64Range("test3", 4, 10L, 20L, true, false));
+            QueryUtils.CheckHashEquals(NumericRangeQuery.NewInt64Range("test4", 4, 10L, 20L, false, false));
+            QueryUtils.CheckHashEquals(NumericRangeQuery.NewInt64Range("test5", 4, 10L, null, true, true));
+            QueryUtils.CheckHashEquals(NumericRangeQuery.NewInt64Range("test6", 4, null, 20L, true, true));
+            QueryUtils.CheckHashEquals(NumericRangeQuery.NewInt64Range("test7", 4, null, null, true, true));
+            QueryUtils.CheckEqual(NumericRangeQuery.NewInt64Range("test8", 4, 10L, 20L, true, true), NumericRangeQuery.NewInt64Range("test8", 4, 10L, 20L, true, true));
+            QueryUtils.CheckUnequal(NumericRangeQuery.NewInt64Range("test9", 4, 10L, 20L, true, true), NumericRangeQuery.NewInt64Range("test9", 8, 10L, 20L, true, true));
+            QueryUtils.CheckUnequal(NumericRangeQuery.NewInt64Range("test10a", 4, 10L, 20L, true, true), NumericRangeQuery.NewInt64Range("test10b", 4, 10L, 20L, true, true));
+            QueryUtils.CheckUnequal(NumericRangeQuery.NewInt64Range("test11", 4, 10L, 20L, true, true), NumericRangeQuery.NewInt64Range("test11", 4, 20L, 10L, true, true));
+            QueryUtils.CheckUnequal(NumericRangeQuery.NewInt64Range("test12", 4, 10L, 20L, true, true), NumericRangeQuery.NewInt64Range("test12", 4, 10L, 20L, false, true));
+            QueryUtils.CheckUnequal(NumericRangeQuery.NewInt64Range("test13", 4, 10L, 20L, true, true), NumericRangeQuery.NewSingleRange("test13", 4, 10f, 20f, true, true));
+            // difference to int range is tested in TestNumericRangeQuery32
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestPhrasePrefixQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestPhrasePrefixQuery.cs b/src/Lucene.Net.Tests/Search/TestPhrasePrefixQuery.cs
new file mode 100644
index 0000000..d143f18
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestPhrasePrefixQuery.cs
@@ -0,0 +1,108 @@
+using System.Collections.Generic;
+using System.Linq;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using NUnit.Framework;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MultiFields = Lucene.Net.Index.MultiFields;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Term = Lucene.Net.Index.Term;
+    using TermsEnum = Lucene.Net.Index.TermsEnum;
+
+    /// <summary>
+    /// this class tests PhrasePrefixQuery class.
+    /// </summary>
+    [TestFixture]
+    public class TestPhrasePrefixQuery : LuceneTestCase
+    {
+        ///
+        [Test]
+        public virtual void TestPhrasePrefix()
+        {
+            Directory indexStore = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), indexStore, Similarity, TimeZone);
+            Document doc1 = new Document();
+            Document doc2 = new Document();
+            Document doc3 = new Document();
+            Document doc4 = new Document();
+            Document doc5 = new Document();
+            doc1.Add(NewTextField("body", "blueberry pie", Field.Store.YES));
+            doc2.Add(NewTextField("body", "blueberry strudel", Field.Store.YES));
+            doc3.Add(NewTextField("body", "blueberry pizza", Field.Store.YES));
+            doc4.Add(NewTextField("body", "blueberry chewing gum", Field.Store.YES));
+            doc5.Add(NewTextField("body", "piccadilly circus", Field.Store.YES));
+            writer.AddDocument(doc1);
+            writer.AddDocument(doc2);
+            writer.AddDocument(doc3);
+            writer.AddDocument(doc4);
+            writer.AddDocument(doc5);
+            IndexReader reader = writer.Reader;
+            writer.Dispose();
+
+            IndexSearcher searcher = NewSearcher(reader);
+
+            // PhrasePrefixQuery query1 = new PhrasePrefixQuery();
+            MultiPhraseQuery query1 = new MultiPhraseQuery();
+            // PhrasePrefixQuery query2 = new PhrasePrefixQuery();
+            MultiPhraseQuery query2 = new MultiPhraseQuery();
+            query1.Add(new Term("body", "blueberry"));
+            query2.Add(new Term("body", "strawberry"));
+
+            LinkedList<Term> termsWithPrefix = new LinkedList<Term>();
+
+            // this TermEnum gives "piccadilly", "pie" and "pizza".
+            string prefix = "pi";
+            TermsEnum te = MultiFields.GetFields(reader).GetTerms("body").GetIterator(null);
+            te.SeekCeil(new BytesRef(prefix));
+            do
+            {
+                string s = te.Term.Utf8ToString();
+                if (s.StartsWith(prefix))
+                {
+                    termsWithPrefix.AddLast(new Term("body", s));
+                }
+                else
+                {
+                    break;
+                }
+            } while (te.Next() != null);
+
+            query1.Add(termsWithPrefix.ToArray(/*new Term[0]*/));
+            query2.Add(termsWithPrefix.ToArray(/*new Term[0]*/));
+
+            ScoreDoc[] result;
+            result = searcher.Search(query1, null, 1000).ScoreDocs;
+            Assert.AreEqual(2, result.Length);
+
+            result = searcher.Search(query2, null, 1000).ScoreDocs;
+            Assert.AreEqual(0, result.Length);
+            reader.Dispose();
+            indexStore.Dispose();
+        }
+    }
+}
\ No newline at end of file


[68/72] [abbrv] lucenenet git commit: Lucene.Net.TestFramework: Removed IllegalStateException and replaced references with System.InvalidOperationException.

Posted by ni...@apache.org.
Lucene.Net.TestFramework: Removed IllegalStateException and replaced references with System.InvalidOperationException.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/ab626cef
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/ab626cef
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/ab626cef

Branch: refs/heads/api-work
Commit: ab626cef7a0185ff0ea303cb582639116baa6cb6
Parents: 971b438
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Feb 26 03:58:47 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Mon Feb 27 06:18:01 2017 +0700

----------------------------------------------------------------------
 .../Lucene.Net.TestFramework.csproj             |  3 --
 .../Randomized/IllegalStateException.cs         | 35 --------------------
 .../Randomized/RandomizedContext.cs             |  2 +-
 .../Randomized/SingleThreadedRandom.cs          |  2 +-
 4 files changed, 2 insertions(+), 40 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ab626cef/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj b/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
index c7b9446..c6ad07b 100644
--- a/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
+++ b/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
@@ -332,9 +332,6 @@
     <Compile Include="Randomized\Generators\RandomInts.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Randomized\IllegalStateException.cs">
-      <SubType>Code</SubType>
-    </Compile>
     <Compile Include="Randomized\InternalAssumptionViolatedException.cs">
       <SubType>Code</SubType>
     </Compile>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ab626cef/src/Lucene.Net.TestFramework/Randomized/IllegalStateException.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Randomized/IllegalStateException.cs b/src/Lucene.Net.TestFramework/Randomized/IllegalStateException.cs
deleted file mode 100644
index a942d28..0000000
--- a/src/Lucene.Net.TestFramework/Randomized/IllegalStateException.cs
+++ /dev/null
@@ -1,35 +0,0 @@
-\ufeff/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-using System;
-using System.Diagnostics;
-
-
-namespace Lucene.Net.Randomized
-{
-#if FEATURE_SERIALIZABLE
-    [Serializable]
-#endif
-    public class IllegalStateException : Exception
-    {
-        public IllegalStateException() { }
-
-        public IllegalStateException(string message) : base(message) { }
-
-        public IllegalStateException(string message, Exception inner) : base(message, inner) { }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ab626cef/src/Lucene.Net.TestFramework/Randomized/RandomizedContext.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Randomized/RandomizedContext.cs b/src/Lucene.Net.TestFramework/Randomized/RandomizedContext.cs
index 0f3f6b8..752ef32 100644
--- a/src/Lucene.Net.TestFramework/Randomized/RandomizedContext.cs
+++ b/src/Lucene.Net.TestFramework/Randomized/RandomizedContext.cs
@@ -131,7 +131,7 @@ namespace Lucene.Net.Randomized
                 var message = "No context information for thread," + thread.Name + ". " +
                             "Is this thread running under a " + typeof(RandomizedRunner).Name + " context? ";
 
-                throw new IllegalStateException(message);
+                throw new InvalidOperationException(message);
             }
 
             lock (context.contextLock)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ab626cef/src/Lucene.Net.TestFramework/Randomized/SingleThreadedRandom.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Randomized/SingleThreadedRandom.cs b/src/Lucene.Net.TestFramework/Randomized/SingleThreadedRandom.cs
index 3f75ffe..c3c6cb3 100644
--- a/src/Lucene.Net.TestFramework/Randomized/SingleThreadedRandom.cs
+++ b/src/Lucene.Net.TestFramework/Randomized/SingleThreadedRandom.cs
@@ -102,7 +102,7 @@ namespace Lucene.Net.Randomized
                                 " and must not be shared.  The current thread is " + Thread.CurrentThread.Name + ".";
 
                 throw new InvalidOperationException(message,
-                    new IllegalStateException("The instance was illegally accessed\n" + this.trace));
+                    new Exception("The instance was illegally accessed\n" + this.trace));
             }
         }
 


[32/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestIndexWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriter.cs b/src/Lucene.Net.Tests/Index/TestIndexWriter.cs
new file mode 100644
index 0000000..1c3a56a
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestIndexWriter.cs
@@ -0,0 +1,2888 @@
+using Lucene.Net.Analysis.TokenAttributes;
+using System;
+using System.Collections.Generic;
+using System.Text;
+using System.Threading;
+using Lucene.Net.Documents;
+using Lucene.Net.Search;
+
+namespace Lucene.Net.Index
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    using Lucene.Net.Analysis;
+    using Lucene.Net.Randomized.Generators;
+    using Lucene.Net.Support;
+    using Lucene.Net.Util;
+    using NUnit.Framework;
+    using System.Diagnostics;
+    using System.IO;
+    using AlreadyClosedException = Lucene.Net.Store.AlreadyClosedException;
+    using Automaton = Lucene.Net.Util.Automaton.Automaton;
+    using BaseDirectoryWrapper = Lucene.Net.Store.BaseDirectoryWrapper;
+    using BasicAutomata = Lucene.Net.Util.Automaton.BasicAutomata;
+
+    //using SimpleTextCodec = Lucene.Net.Codecs.simpletext.SimpleTextCodec;
+    using BinaryDocValuesField = BinaryDocValuesField;
+    using IBits = Lucene.Net.Util.IBits;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using CharacterRunAutomaton = Lucene.Net.Util.Automaton.CharacterRunAutomaton;
+    using CharTermAttribute = Lucene.Net.Analysis.TokenAttributes.CharTermAttribute;
+    using Constants = Lucene.Net.Util.Constants;
+    using Directory = Lucene.Net.Store.Directory;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldType = FieldType;
+    using IndexOutput = Lucene.Net.Store.IndexOutput;
+    using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+    using IOContext = Lucene.Net.Store.IOContext;
+    using IOUtils = Lucene.Net.Util.IOUtils;
+    using Lock = Lucene.Net.Store.Lock;
+    using LockFactory = Lucene.Net.Store.LockFactory;
+    using LockObtainFailedException = Lucene.Net.Store.LockObtainFailedException;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MatchAllDocsQuery = Lucene.Net.Search.MatchAllDocsQuery;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+    using NoLockFactory = Lucene.Net.Store.NoLockFactory;
+    using NumericDocValuesField = NumericDocValuesField;
+    using PackedInt32s = Lucene.Net.Util.Packed.PackedInt32s;
+    using PhraseQuery = Lucene.Net.Search.PhraseQuery;
+    using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+    using ScoreDoc = Lucene.Net.Search.ScoreDoc;
+    using SimpleFSLockFactory = Lucene.Net.Store.SimpleFSLockFactory;
+    using SingleInstanceLockFactory = Lucene.Net.Store.SingleInstanceLockFactory;
+    using SortedDocValuesField = SortedDocValuesField;
+    using SortedSetDocValuesField = SortedSetDocValuesField;
+    using StoredField = StoredField;
+    using StringField = StringField;
+    using TermQuery = Lucene.Net.Search.TermQuery;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using TextField = TextField;
+
+    [TestFixture]
+    public class TestIndexWriter : LuceneTestCase
+    {
+        private static readonly FieldType StoredTextType = new FieldType(TextField.TYPE_NOT_STORED);
+
+        [Test]
+        public virtual void TestDocCount()
+        {
+            Directory dir = NewDirectory();
+
+            IndexWriter writer = null;
+            IndexReader reader = null;
+            int i;
+
+            long savedWriteLockTimeout = IndexWriterConfig.DefaultWriteLockTimeout;
+            try
+            {
+                IndexWriterConfig.DefaultWriteLockTimeout = 2000;
+                Assert.AreEqual(2000, IndexWriterConfig.DefaultWriteLockTimeout);
+                writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            }
+            finally
+            {
+                IndexWriterConfig.DefaultWriteLockTimeout = savedWriteLockTimeout;
+            }
+
+            // add 100 documents
+            for (i = 0; i < 100; i++)
+            {
+                AddDocWithIndex(writer, i);
+            }
+            Assert.AreEqual(100, writer.MaxDoc);
+            writer.Dispose();
+
+            // delete 40 documents
+            writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NoMergePolicy.NO_COMPOUND_FILES));
+            for (i = 0; i < 40; i++)
+            {
+                writer.DeleteDocuments(new Term("id", "" + i));
+            }
+            writer.Dispose();
+
+            reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(60, reader.NumDocs);
+            reader.Dispose();
+
+            // merge the index down and check that the new doc count is correct
+            writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Assert.AreEqual(60, writer.NumDocs);
+            writer.ForceMerge(1);
+            Assert.AreEqual(60, writer.MaxDoc);
+            Assert.AreEqual(60, writer.NumDocs);
+            writer.Dispose();
+
+            // check that the index reader gives the same numbers.
+            reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(60, reader.MaxDoc);
+            Assert.AreEqual(60, reader.NumDocs);
+            reader.Dispose();
+
+            // make sure opening a new index for create over
+            // this existing one works correctly:
+            writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE));
+            Assert.AreEqual(0, writer.MaxDoc);
+            Assert.AreEqual(0, writer.NumDocs);
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Changed from internal static method to private to remove
+        /// inter-dependencies between TestIndexWriter*.cs, TestAddIndexes.cs
+        /// and TestDeletionPolicy.cs tests
+        /// </summary>
+        private void AddDoc(IndexWriter writer)
+        {
+            Document doc = new Document();
+            doc.Add(NewTextField("content", "aaa", Field.Store.NO));
+            writer.AddDocument(doc);
+        }
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Changed from internal static method to private to remove
+        /// inter-dependencies between TestIndexWriter*.cs, TestAddIndexes.cs
+        /// and TestDeletionPolicy.cs tests
+        /// </summary>
+        private void AddDocWithIndex(IndexWriter writer, int index)
+        {
+            Document doc = new Document();
+            doc.Add(NewField("content", "aaa " + index, StoredTextType));
+            doc.Add(NewField("id", "" + index, StoredTextType));
+            writer.AddDocument(doc);
+        }
+
+        public static void AssertNoUnreferencedFiles(Directory dir, string message)
+        {
+            string[] startFiles = dir.ListAll();
+            (new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())))).Rollback();
+            string[] endFiles = dir.ListAll();
+
+            Array.Sort(startFiles);
+            Array.Sort(endFiles);
+
+            if (!Arrays.Equals(startFiles, endFiles))
+            {
+                Assert.Fail(message + ": before delete:\n    " + ArrayToString(startFiles) + "\n  after delete:\n    " + ArrayToString(endFiles));
+            }
+        }
+
+        internal static string ArrayToString(string[] l)
+        {
+            string s = "";
+            for (int i = 0; i < l.Length; i++)
+            {
+                if (i > 0)
+                {
+                    s += "\n    ";
+                }
+                s += l[i];
+            }
+            return s;
+        }
+
+        // Make sure we can open an index for create even when a
+        // reader holds it open (this fails pre lock-less
+        // commits on windows):
+        [Test]
+        public virtual void TestCreateWithReader()
+        {
+            Directory dir = NewDirectory();
+
+            // add one document & close writer
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            AddDoc(writer);
+            writer.Dispose();
+
+            // now open reader:
+            IndexReader reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(reader.NumDocs, 1, "should be one document");
+
+            // now open index for create:
+            writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE));
+            Assert.AreEqual(writer.MaxDoc, 0, "should be zero documents");
+            AddDoc(writer);
+            writer.Dispose();
+
+            Assert.AreEqual(reader.NumDocs, 1, "should be one document");
+            IndexReader reader2 = DirectoryReader.Open(dir);
+            Assert.AreEqual(reader2.NumDocs, 1, "should be one document");
+            reader.Dispose();
+            reader2.Dispose();
+
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestChangesAfterClose([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
+        {
+            Directory dir = NewDirectory();
+
+            IndexWriter writer = null;
+
+            var config = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergeScheduler(scheduler);
+            writer = new IndexWriter(dir, config);
+            AddDoc(writer);
+
+            // close
+            writer.Dispose();
+            try
+            {
+                AddDoc(writer);
+                Assert.Fail("did not hit AlreadyClosedException");
+            }
+#pragma warning disable 168
+            catch (AlreadyClosedException e)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestIndexNoDocuments()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            writer.Commit();
+            writer.Dispose();
+
+            IndexReader reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(0, reader.MaxDoc);
+            Assert.AreEqual(0, reader.NumDocs);
+            reader.Dispose();
+
+            writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.APPEND));
+            writer.Commit();
+            writer.Dispose();
+
+            reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(0, reader.MaxDoc);
+            Assert.AreEqual(0, reader.NumDocs);
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestManyFields()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(10));
+            for (int j = 0; j < 100; j++)
+            {
+                Document doc = new Document();
+                doc.Add(NewField("a" + j, "aaa" + j, StoredTextType));
+                doc.Add(NewField("b" + j, "aaa" + j, StoredTextType));
+                doc.Add(NewField("c" + j, "aaa" + j, StoredTextType));
+                doc.Add(NewField("d" + j, "aaa", StoredTextType));
+                doc.Add(NewField("e" + j, "aaa", StoredTextType));
+                doc.Add(NewField("f" + j, "aaa", StoredTextType));
+                writer.AddDocument(doc);
+            }
+            writer.Dispose();
+
+            IndexReader reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(100, reader.MaxDoc);
+            Assert.AreEqual(100, reader.NumDocs);
+            for (int j = 0; j < 100; j++)
+            {
+                Assert.AreEqual(1, reader.DocFreq(new Term("a" + j, "aaa" + j)));
+                Assert.AreEqual(1, reader.DocFreq(new Term("b" + j, "aaa" + j)));
+                Assert.AreEqual(1, reader.DocFreq(new Term("c" + j, "aaa" + j)));
+                Assert.AreEqual(1, reader.DocFreq(new Term("d" + j, "aaa")));
+                Assert.AreEqual(1, reader.DocFreq(new Term("e" + j, "aaa")));
+                Assert.AreEqual(1, reader.DocFreq(new Term("f" + j, "aaa")));
+            }
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestSmallRAMBuffer()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetRAMBufferSizeMB(0.000001).SetMergePolicy(NewLogMergePolicy(10)));
+            int lastNumFile = dir.ListAll().Length;
+            for (int j = 0; j < 9; j++)
+            {
+                Document doc = new Document();
+                doc.Add(NewField("field", "aaa" + j, StoredTextType));
+                writer.AddDocument(doc);
+                int numFile = dir.ListAll().Length;
+                // Verify that with a tiny RAM buffer we see new
+                // segment after every doc
+                Assert.IsTrue(numFile > lastNumFile);
+                lastNumFile = numFile;
+            }
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        // Make sure it's OK to change RAM buffer size and
+        // maxBufferedDocs in a write session
+        [Test]
+        public virtual void TestChangingRAMBuffer()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            writer.Config.SetMaxBufferedDocs(10);
+            writer.Config.SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH);
+
+            int lastFlushCount = -1;
+            for (int j = 1; j < 52; j++)
+            {
+                Document doc = new Document();
+                doc.Add(new Field("field", "aaa" + j, StoredTextType));
+                writer.AddDocument(doc);
+                TestUtil.SyncConcurrentMerges(writer);
+                int flushCount = writer.FlushCount;
+                if (j == 1)
+                {
+                    lastFlushCount = flushCount;
+                }
+                else if (j < 10)
+                // No new files should be created
+                {
+                    Assert.AreEqual(flushCount, lastFlushCount);
+                }
+                else if (10 == j)
+                {
+                    Assert.IsTrue(flushCount > lastFlushCount);
+                    lastFlushCount = flushCount;
+                    writer.Config.SetRAMBufferSizeMB(0.000001);
+                    writer.Config.SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH);
+                }
+                else if (j < 20)
+                {
+                    Assert.IsTrue(flushCount > lastFlushCount);
+                    lastFlushCount = flushCount;
+                }
+                else if (20 == j)
+                {
+                    writer.Config.SetRAMBufferSizeMB(16);
+                    writer.Config.SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH);
+                    lastFlushCount = flushCount;
+                }
+                else if (j < 30)
+                {
+                    Assert.AreEqual(flushCount, lastFlushCount);
+                }
+                else if (30 == j)
+                {
+                    writer.Config.SetRAMBufferSizeMB(0.000001);
+                    writer.Config.SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH);
+                }
+                else if (j < 40)
+                {
+                    Assert.IsTrue(flushCount > lastFlushCount);
+                    lastFlushCount = flushCount;
+                }
+                else if (40 == j)
+                {
+                    writer.Config.SetMaxBufferedDocs(10);
+                    writer.Config.SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH);
+                    lastFlushCount = flushCount;
+                }
+                else if (j < 50)
+                {
+                    Assert.AreEqual(flushCount, lastFlushCount);
+                    writer.Config.SetMaxBufferedDocs(10);
+                    writer.Config.SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH);
+                }
+                else if (50 == j)
+                {
+                    Assert.IsTrue(flushCount > lastFlushCount);
+                }
+            }
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestChangingRAMBuffer2()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            writer.Config.SetMaxBufferedDocs(10);
+            writer.Config.SetMaxBufferedDeleteTerms(10);
+            writer.Config.SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH);
+
+            for (int j = 1; j < 52; j++)
+            {
+                Document doc = new Document();
+                doc.Add(new Field("field", "aaa" + j, StoredTextType));
+                writer.AddDocument(doc);
+            }
+
+            int lastFlushCount = -1;
+            for (int j = 1; j < 52; j++)
+            {
+                writer.DeleteDocuments(new Term("field", "aaa" + j));
+                TestUtil.SyncConcurrentMerges(writer);
+                int flushCount = writer.FlushCount;
+
+                if (j == 1)
+                {
+                    lastFlushCount = flushCount;
+                }
+                else if (j < 10)
+                {
+                    // No new files should be created
+                    Assert.AreEqual(flushCount, lastFlushCount);
+                }
+                else if (10 == j)
+                {
+                    Assert.IsTrue(flushCount > lastFlushCount, "" + j);
+                    lastFlushCount = flushCount;
+                    writer.Config.SetRAMBufferSizeMB(0.000001);
+                    writer.Config.SetMaxBufferedDeleteTerms(1);
+                }
+                else if (j < 20)
+                {
+                    Assert.IsTrue(flushCount > lastFlushCount);
+                    lastFlushCount = flushCount;
+                }
+                else if (20 == j)
+                {
+                    writer.Config.SetRAMBufferSizeMB(16);
+                    writer.Config.SetMaxBufferedDeleteTerms(IndexWriterConfig.DISABLE_AUTO_FLUSH);
+                    lastFlushCount = flushCount;
+                }
+                else if (j < 30)
+                {
+                    Assert.AreEqual(flushCount, lastFlushCount);
+                }
+                else if (30 == j)
+                {
+                    writer.Config.SetRAMBufferSizeMB(0.000001);
+                    writer.Config.SetMaxBufferedDeleteTerms(IndexWriterConfig.DISABLE_AUTO_FLUSH);
+                    writer.Config.SetMaxBufferedDeleteTerms(1);
+                }
+                else if (j < 40)
+                {
+                    Assert.IsTrue(flushCount > lastFlushCount);
+                    lastFlushCount = flushCount;
+                }
+                else if (40 == j)
+                {
+                    writer.Config.SetMaxBufferedDeleteTerms(10);
+                    writer.Config.SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH);
+                    lastFlushCount = flushCount;
+                }
+                else if (j < 50)
+                {
+                    Assert.AreEqual(flushCount, lastFlushCount);
+                    writer.Config.SetMaxBufferedDeleteTerms(10);
+                    writer.Config.SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH);
+                }
+                else if (50 == j)
+                {
+                    Assert.IsTrue(flushCount > lastFlushCount);
+                }
+            }
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestDiverseDocs()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetRAMBufferSizeMB(0.5));
+            int n = AtLeast(1);
+            for (int i = 0; i < n; i++)
+            {
+                // First, docs where every term is unique (heavy on
+                // Posting instances)
+                for (int j = 0; j < 100; j++)
+                {
+                    Document doc = new Document();
+                    for (int k = 0; k < 100; k++)
+                    {
+                        doc.Add(NewField("field", Convert.ToString(Random().Next()), StoredTextType));
+                    }
+                    writer.AddDocument(doc);
+                }
+
+                // Next, many single term docs where only one term
+                // occurs (heavy on byte blocks)
+                for (int j = 0; j < 100; j++)
+                {
+                    Document doc = new Document();
+                    doc.Add(NewField("field", "aaa aaa aaa aaa aaa aaa aaa aaa aaa aaa", StoredTextType));
+                    writer.AddDocument(doc);
+                }
+
+                // Next, many single term docs where only one term
+                // occurs but the terms are very long (heavy on
+                // char[] arrays)
+                for (int j = 0; j < 100; j++)
+                {
+                    StringBuilder b = new StringBuilder();
+                    string x = Convert.ToString(j) + ".";
+                    for (int k = 0; k < 1000; k++)
+                    {
+                        b.Append(x);
+                    }
+                    string longTerm = b.ToString();
+
+                    Document doc = new Document();
+                    doc.Add(NewField("field", longTerm, StoredTextType));
+                    writer.AddDocument(doc);
+                }
+            }
+            writer.Dispose();
+
+            IndexReader reader = DirectoryReader.Open(dir);
+            IndexSearcher searcher = NewSearcher(reader);
+            int totalHits = searcher.Search(new TermQuery(new Term("field", "aaa")), null, 1).TotalHits;
+            Assert.AreEqual(n * 100, totalHits);
+            reader.Dispose();
+
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestEnablingNorms()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(10));
+            // Enable norms for only 1 doc, pre flush
+            FieldType customType = new FieldType(TextField.TYPE_STORED);
+            customType.OmitNorms = true;
+            for (int j = 0; j < 10; j++)
+            {
+                Document doc = new Document();
+                Field f = null;
+                if (j != 8)
+                {
+                    f = NewField("field", "aaa", customType);
+                }
+                else
+                {
+                    f = NewField("field", "aaa", StoredTextType);
+                }
+                doc.Add(f);
+                writer.AddDocument(doc);
+            }
+            writer.Dispose();
+
+            Term searchTerm = new Term("field", "aaa");
+
+            IndexReader reader = DirectoryReader.Open(dir);
+            IndexSearcher searcher = NewSearcher(reader);
+            ScoreDoc[] hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
+            Assert.AreEqual(10, hits.Length);
+            reader.Dispose();
+
+            writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(10));
+            // Enable norms for only 1 doc, post flush
+            for (int j = 0; j < 27; j++)
+            {
+                Document doc = new Document();
+                Field f = null;
+                if (j != 26)
+                {
+                    f = NewField("field", "aaa", customType);
+                }
+                else
+                {
+                    f = NewField("field", "aaa", StoredTextType);
+                }
+                doc.Add(f);
+                writer.AddDocument(doc);
+            }
+            writer.Dispose();
+            reader = DirectoryReader.Open(dir);
+            searcher = NewSearcher(reader);
+            hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
+            Assert.AreEqual(27, hits.Length);
+            reader.Dispose();
+
+            reader = DirectoryReader.Open(dir);
+            reader.Dispose();
+
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestHighFreqTerm()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetRAMBufferSizeMB(0.01));
+            // Massive doc that has 128 K a's
+            StringBuilder b = new StringBuilder(1024 * 1024);
+            for (int i = 0; i < 4096; i++)
+            {
+                b.Append(" a a a a a a a a");
+                b.Append(" a a a a a a a a");
+                b.Append(" a a a a a a a a");
+                b.Append(" a a a a a a a a");
+            }
+            Document doc = new Document();
+            FieldType customType = new FieldType(TextField.TYPE_STORED);
+            customType.StoreTermVectors = true;
+            customType.StoreTermVectorPositions = true;
+            customType.StoreTermVectorOffsets = true;
+            doc.Add(NewField("field", b.ToString(), customType));
+            writer.AddDocument(doc);
+            writer.Dispose();
+
+            IndexReader reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(1, reader.MaxDoc);
+            Assert.AreEqual(1, reader.NumDocs);
+            Term t = new Term("field", "a");
+            Assert.AreEqual(1, reader.DocFreq(t));
+            DocsEnum td = TestUtil.Docs(Random(), reader, "field", new BytesRef("a"), MultiFields.GetLiveDocs(reader), null, DocsEnum.FLAG_FREQS);
+            td.NextDoc();
+            Assert.AreEqual(128 * 1024, td.Freq);
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        //Helper class for TestNullLockFactory
+        public class MyRAMDirectory : MockDirectoryWrapper
+        {
+            private LockFactory myLockFactory;
+
+            public MyRAMDirectory(Directory @delegate)
+                : base(Random(), @delegate)
+            {
+                LockFactory_Renamed = null;
+                myLockFactory = new SingleInstanceLockFactory();
+            }
+
+            public override Lock MakeLock(string name)
+            {
+                return myLockFactory.MakeLock(name);
+            }
+        }
+
+        // Make sure that a Directory implementation that does
+        // not use LockFactory at all (ie overrides makeLock and
+        // implements its own private locking) works OK.  this
+        // was raised on java-dev as loss of backwards
+        // compatibility.
+        [Test]
+        public virtual void TestNullLockFactory()
+        {
+            Directory dir = new MyRAMDirectory(new RAMDirectory());
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            for (int i = 0; i < 100; i++)
+            {
+                AddDoc(writer);
+            }
+            writer.Dispose();
+            Term searchTerm = new Term("content", "aaa");
+            IndexReader reader = DirectoryReader.Open(dir);
+            IndexSearcher searcher = NewSearcher(reader);
+            ScoreDoc[] hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
+            Assert.AreEqual(100, hits.Length, "did not get right number of hits");
+            reader.Dispose();
+
+            writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE));
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestFlushWithNoMerging()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy(10)));
+            Document doc = new Document();
+            FieldType customType = new FieldType(TextField.TYPE_STORED);
+            customType.StoreTermVectors = true;
+            customType.StoreTermVectorPositions = true;
+            customType.StoreTermVectorOffsets = true;
+            doc.Add(NewField("field", "aaa", customType));
+            for (int i = 0; i < 19; i++)
+            {
+                writer.AddDocument(doc);
+            }
+            writer.Flush(false, true);
+            writer.Dispose();
+            SegmentInfos sis = new SegmentInfos();
+            sis.Read(dir);
+            // Since we flushed w/o allowing merging we should now
+            // have 10 segments
+            Assert.AreEqual(10, sis.Count);
+            dir.Dispose();
+        }
+
+        // Make sure we can flush segment w/ norms, then add
+        // empty doc (no norms) and flush
+        [Test]
+        public virtual void TestEmptyDocAfterFlushingRealDoc()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document doc = new Document();
+            FieldType customType = new FieldType(TextField.TYPE_STORED);
+            customType.StoreTermVectors = true;
+            customType.StoreTermVectorPositions = true;
+            customType.StoreTermVectorOffsets = true;
+            doc.Add(NewField("field", "aaa", customType));
+            writer.AddDocument(doc);
+            writer.Commit();
+            if (VERBOSE)
+            {
+                Console.WriteLine("\nTEST: now add empty doc");
+            }
+            writer.AddDocument(new Document());
+            writer.Dispose();
+            IndexReader reader = DirectoryReader.Open(dir);
+            Assert.AreEqual(2, reader.NumDocs);
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        /// <summary>
+        /// Test that no NullPointerException will be raised,
+        /// when adding one document with a single, empty field
+        /// and term vectors enabled.
+        /// </summary>
+        [Test]
+        public virtual void TestBadSegment()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter iw = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+
+            Document document = new Document();
+            FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
+            customType.StoreTermVectors = true;
+            document.Add(NewField("tvtest", "", customType));
+            iw.AddDocument(document);
+            iw.Dispose();
+            dir.Dispose();
+        }
+
+#if !NETSTANDARD //NOTE: Cannot set ThreadPriority in .NET Core.
+        // LUCENE-1036
+        [Test]
+        public virtual void TestMaxThreadPriority()
+        {
+            ThreadPriority pri = ThreadClass.Current().Priority;
+            try
+            {
+                Directory dir = NewDirectory();
+                IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy());
+                ((LogMergePolicy)conf.MergePolicy).MergeFactor = 2;
+                IndexWriter iw = new IndexWriter(dir, conf);
+                Document document = new Document();
+                FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
+                customType.StoreTermVectors = true;
+                document.Add(NewField("tvtest", "a b c", customType));
+                Thread.CurrentThread.Priority = ThreadPriority.Highest;
+                for (int i = 0; i < 4; i++)
+                {
+                    iw.AddDocument(document);
+                }
+                iw.Dispose();
+                dir.Dispose();
+            }
+            finally
+            {
+                Thread.CurrentThread.Priority = pri;
+            }
+        }
+#endif
+
+        [Test]
+        public virtual void TestVariableSchema()
+        {
+            Directory dir = NewDirectory();
+            for (int i = 0; i < 20; i++)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: iter=" + i);
+                }
+                IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy()));
+                //LogMergePolicy lmp = (LogMergePolicy) writer.getConfig().getMergePolicy();
+                //lmp.setMergeFactor(2);
+                //lmp.setNoCFSRatio(0.0);
+                Document doc = new Document();
+                string contents = "aa bb cc dd ee ff gg hh ii jj kk";
+
+                FieldType customType = new FieldType(TextField.TYPE_STORED);
+                FieldType type = null;
+                if (i == 7)
+                {
+                    // Add empty docs here
+                    doc.Add(NewTextField("content3", "", Field.Store.NO));
+                }
+                else
+                {
+                    if (i % 2 == 0)
+                    {
+                        doc.Add(NewField("content4", contents, customType));
+                        type = customType;
+                    }
+                    else
+                    {
+                        type = TextField.TYPE_NOT_STORED;
+                    }
+                    doc.Add(NewTextField("content1", contents, Field.Store.NO));
+                    doc.Add(NewField("content3", "", customType));
+                    doc.Add(NewField("content5", "", type));
+                }
+
+                for (int j = 0; j < 4; j++)
+                {
+                    writer.AddDocument(doc);
+                }
+
+                writer.Dispose();
+
+                if (0 == i % 4)
+                {
+                    writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+                    //LogMergePolicy lmp2 = (LogMergePolicy) writer.getConfig().getMergePolicy();
+                    //lmp2.setNoCFSRatio(0.0);
+                    writer.ForceMerge(1);
+                    writer.Dispose();
+                }
+            }
+            dir.Dispose();
+        }
+
+        // LUCENE-1084: test unlimited field length
+        [Test]
+        public virtual void TestUnlimitedMaxFieldLength()
+        {
+            Directory dir = NewDirectory();
+
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+
+            Document doc = new Document();
+            StringBuilder b = new StringBuilder();
+            for (int i = 0; i < 10000; i++)
+            {
+                b.Append(" a");
+            }
+            b.Append(" x");
+            doc.Add(NewTextField("field", b.ToString(), Field.Store.NO));
+            writer.AddDocument(doc);
+            writer.Dispose();
+
+            IndexReader reader = DirectoryReader.Open(dir);
+            Term t = new Term("field", "x");
+            Assert.AreEqual(1, reader.DocFreq(t));
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        // LUCENE-1179
+        [Test]
+        public virtual void TestEmptyFieldName()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document doc = new Document();
+            doc.Add(NewTextField("", "a b c", Field.Store.NO));
+            writer.AddDocument(doc);
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestEmptyFieldNameTerms()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document doc = new Document();
+            doc.Add(NewTextField("", "a b c", Field.Store.NO));
+            writer.AddDocument(doc);
+            writer.Dispose();
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            AtomicReader subreader = GetOnlySegmentReader(reader);
+            TermsEnum te = subreader.Fields.GetTerms("").GetIterator(null);
+            Assert.AreEqual(new BytesRef("a"), te.Next());
+            Assert.AreEqual(new BytesRef("b"), te.Next());
+            Assert.AreEqual(new BytesRef("c"), te.Next());
+            Assert.IsNull(te.Next());
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestEmptyFieldNameWithEmptyTerm()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document doc = new Document();
+            doc.Add(NewStringField("", "", Field.Store.NO));
+            doc.Add(NewStringField("", "a", Field.Store.NO));
+            doc.Add(NewStringField("", "b", Field.Store.NO));
+            doc.Add(NewStringField("", "c", Field.Store.NO));
+            writer.AddDocument(doc);
+            writer.Dispose();
+            DirectoryReader reader = DirectoryReader.Open(dir);
+            AtomicReader subreader = GetOnlySegmentReader(reader);
+            TermsEnum te = subreader.Fields.GetTerms("").GetIterator(null);
+            Assert.AreEqual(new BytesRef(""), te.Next());
+            Assert.AreEqual(new BytesRef("a"), te.Next());
+            Assert.AreEqual(new BytesRef("b"), te.Next());
+            Assert.AreEqual(new BytesRef("c"), te.Next());
+            Assert.IsNull(te.Next());
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        private sealed class MockIndexWriter : IndexWriter
+        {
+            public MockIndexWriter(Directory dir, IndexWriterConfig conf)
+                : base(dir, conf)
+            {
+            }
+
+            internal bool AfterWasCalled;
+            internal bool BeforeWasCalled;
+
+            protected override void DoAfterFlush()
+            {
+                AfterWasCalled = true;
+            }
+
+            protected override void DoBeforeFlush()
+            {
+                BeforeWasCalled = true;
+            }
+        }
+
+        // LUCENE-1222
+        [Test]
+        public virtual void TestDoBeforeAfterFlush()
+        {
+            Directory dir = NewDirectory();
+            MockIndexWriter w = new MockIndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document doc = new Document();
+            FieldType customType = new FieldType(TextField.TYPE_STORED);
+            doc.Add(NewField("field", "a field", customType));
+            w.AddDocument(doc);
+            w.Commit();
+            Assert.IsTrue(w.BeforeWasCalled);
+            Assert.IsTrue(w.AfterWasCalled);
+            w.BeforeWasCalled = false;
+            w.AfterWasCalled = false;
+            w.DeleteDocuments(new Term("field", "field"));
+            w.Commit();
+            Assert.IsTrue(w.BeforeWasCalled);
+            Assert.IsTrue(w.AfterWasCalled);
+            w.Dispose();
+
+            IndexReader ir = DirectoryReader.Open(dir);
+            Assert.AreEqual(0, ir.NumDocs);
+            ir.Dispose();
+
+            dir.Dispose();
+        }
+
+        // LUCENE-1255
+        [Test]
+        public virtual void TestNegativePositions()
+        {
+            TokenStream tokens = new TokenStreamAnonymousInnerClassHelper(this);
+
+            Directory dir = NewDirectory();
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document doc = new Document();
+            doc.Add(new TextField("field", tokens));
+            try
+            {
+                w.AddDocument(doc);
+                Assert.Fail("did not hit expected exception");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iea)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            w.Dispose();
+            dir.Dispose();
+        }
+
+        private class TokenStreamAnonymousInnerClassHelper : TokenStream
+        {
+            private readonly TestIndexWriter OuterInstance;
+
+            public TokenStreamAnonymousInnerClassHelper(TestIndexWriter outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+                termAtt = AddAttribute<ICharTermAttribute>();
+                posIncrAtt = AddAttribute<IPositionIncrementAttribute>();
+                terms = Arrays.AsList("a", "b", "c").GetEnumerator();
+                first = true;
+            }
+
+            internal readonly ICharTermAttribute termAtt;
+            internal readonly IPositionIncrementAttribute posIncrAtt;
+
+            internal readonly IEnumerator<string> terms;
+            internal bool first;
+
+            public sealed override bool IncrementToken()
+            {
+                if (!terms.MoveNext())
+                {
+                    return false;
+                }
+                ClearAttributes();
+                termAtt.Append(terms.Current);
+                posIncrAtt.PositionIncrement = first ? 0 : 1;
+                first = false;
+                return true;
+            }
+        }
+
+        // LUCENE-2529
+        [Test]
+        public virtual void TestPositionIncrementGapEmptyField()
+        {
+            Directory dir = NewDirectory();
+            MockAnalyzer analyzer = new MockAnalyzer(Random());
+            analyzer.PositionIncrementGap = 100;
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
+            Document doc = new Document();
+            FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
+            customType.StoreTermVectors = true;
+            customType.StoreTermVectorPositions = true;
+            Field f = NewField("field", "", customType);
+            Field f2 = NewField("field", "crunch man", customType);
+            doc.Add(f);
+            doc.Add(f2);
+            w.AddDocument(doc);
+            w.Dispose();
+
+            IndexReader r = DirectoryReader.Open(dir);
+            Terms tpv = r.GetTermVectors(0).GetTerms("field");
+            TermsEnum termsEnum = tpv.GetIterator(null);
+            Assert.IsNotNull(termsEnum.Next());
+            DocsAndPositionsEnum dpEnum = termsEnum.DocsAndPositions(null, null);
+            Assert.IsNotNull(dpEnum);
+            Assert.IsTrue(dpEnum.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(1, dpEnum.Freq);
+            Assert.AreEqual(100, dpEnum.NextPosition());
+
+            Assert.IsNotNull(termsEnum.Next());
+            dpEnum = termsEnum.DocsAndPositions(null, dpEnum);
+            Assert.IsNotNull(dpEnum);
+            Assert.IsTrue(dpEnum.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(1, dpEnum.Freq);
+            Assert.AreEqual(101, dpEnum.NextPosition());
+            Assert.IsNull(termsEnum.Next());
+
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestDeadlock()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2));
+            Document doc = new Document();
+
+            FieldType customType = new FieldType(TextField.TYPE_STORED);
+            customType.StoreTermVectors = true;
+            customType.StoreTermVectorPositions = true;
+            customType.StoreTermVectorOffsets = true;
+
+            doc.Add(NewField("content", "aaa bbb ccc ddd eee fff ggg hhh iii", customType));
+            writer.AddDocument(doc);
+            writer.AddDocument(doc);
+            writer.AddDocument(doc);
+            writer.Commit();
+            // index has 2 segments
+
+            Directory dir2 = NewDirectory();
+            IndexWriter writer2 = new IndexWriter(dir2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            writer2.AddDocument(doc);
+            writer2.Dispose();
+
+            IndexReader r1 = DirectoryReader.Open(dir2);
+            writer.AddIndexes(r1, r1);
+            writer.Dispose();
+
+            IndexReader r3 = DirectoryReader.Open(dir);
+            Assert.AreEqual(5, r3.NumDocs);
+            r3.Dispose();
+
+            r1.Dispose();
+
+            dir2.Dispose();
+            dir.Dispose();
+        }
+
+        private class IndexerThreadInterrupt : ThreadClass
+        {
+            private readonly TestIndexWriter OuterInstance;
+
+            internal volatile bool Failed;
+            internal volatile bool Finish;
+
+            internal volatile bool AllowInterrupt = false;
+            internal readonly Random Random;
+            internal readonly Directory Adder;
+
+            internal IndexerThreadInterrupt(TestIndexWriter outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+                this.Random = new Random(Random().Next());
+                // make a little directory for addIndexes
+                // LUCENE-2239: won't work with NIOFS/MMAP
+                Adder = new MockDirectoryWrapper(this.Random, new RAMDirectory());
+                IndexWriterConfig conf = OuterInstance.NewIndexWriterConfig(this.Random, TEST_VERSION_CURRENT, new MockAnalyzer(this.Random));
+                IndexWriter w = new IndexWriter(Adder, conf);
+                Document doc = new Document();
+                doc.Add(OuterInstance.NewStringField(this.Random, "id", "500", Field.Store.NO));
+                doc.Add(OuterInstance.NewField(this.Random, "field", "some prepackaged text contents", StoredTextType));
+                if (DefaultCodecSupportsDocValues())
+                {
+                    doc.Add(new BinaryDocValuesField("binarydv", new BytesRef("500")));
+                    doc.Add(new NumericDocValuesField("numericdv", 500));
+                    doc.Add(new SortedDocValuesField("sorteddv", new BytesRef("500")));
+                }
+                if (DefaultCodecSupportsSortedSet())
+                {
+                    doc.Add(new SortedSetDocValuesField("sortedsetdv", new BytesRef("one")));
+                    doc.Add(new SortedSetDocValuesField("sortedsetdv", new BytesRef("two")));
+                }
+                w.AddDocument(doc);
+                doc = new Document();
+                doc.Add(OuterInstance.NewStringField(this.Random, "id", "501", Field.Store.NO));
+                doc.Add(OuterInstance.NewField(this.Random, "field", "some more contents", StoredTextType));
+                if (DefaultCodecSupportsDocValues())
+                {
+                    doc.Add(new BinaryDocValuesField("binarydv", new BytesRef("501")));
+                    doc.Add(new NumericDocValuesField("numericdv", 501));
+                    doc.Add(new SortedDocValuesField("sorteddv", new BytesRef("501")));
+                }
+                if (DefaultCodecSupportsSortedSet())
+                {
+                    doc.Add(new SortedSetDocValuesField("sortedsetdv", new BytesRef("two")));
+                    doc.Add(new SortedSetDocValuesField("sortedsetdv", new BytesRef("three")));
+                }
+                w.AddDocument(doc);
+                w.DeleteDocuments(new Term("id", "500"));
+                w.Dispose();
+            }
+
+            public override void Run()
+            {
+                // LUCENE-2239: won't work with NIOFS/MMAP
+                MockDirectoryWrapper dir = new MockDirectoryWrapper(Random, new RAMDirectory());
+
+                // When interrupt arrives in w.Dispose(), when it's
+                // writing liveDocs, this can lead to double-write of
+                // _X_N.del:
+                //dir.setPreventDoubleWrite(false);
+                IndexWriter w = null;
+                while (!Finish)
+                {
+                    try
+                    {
+                        while (!Finish)
+                        {
+                            if (w != null)
+                            {
+                                // If interrupt arrives inside here, it's
+                                // fine: we will cycle back and the first
+                                // thing we do is try to close again,
+                                // i.e. we'll never try to open a new writer
+                                // until this one successfully closes:
+                                w.Dispose();
+                                w = null;
+                            }
+                            IndexWriterConfig conf = OuterInstance.NewIndexWriterConfig(Random, TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2);
+                            w = new IndexWriter(dir, conf);
+
+                            Document doc = new Document();
+                            Field idField = OuterInstance.NewStringField(Random, "id", "", Field.Store.NO);
+                            Field binaryDVField = null;
+                            Field numericDVField = null;
+                            Field sortedDVField = null;
+                            Field sortedSetDVField = new SortedSetDocValuesField("sortedsetdv", new BytesRef());
+                            doc.Add(idField);
+                            doc.Add(OuterInstance.NewField(Random, "field", "some text contents", StoredTextType));
+                            if (DefaultCodecSupportsDocValues())
+                            {
+                                binaryDVField = new BinaryDocValuesField("binarydv", new BytesRef());
+                                numericDVField = new NumericDocValuesField("numericdv", 0);
+                                sortedDVField = new SortedDocValuesField("sorteddv", new BytesRef());
+                                doc.Add(binaryDVField);
+                                doc.Add(numericDVField);
+                                doc.Add(sortedDVField);
+                            }
+                            if (DefaultCodecSupportsSortedSet())
+                            {
+                                doc.Add(sortedSetDVField);
+                            }
+                            for (int i = 0; i < 100; i++)
+                            {
+                                idField.SetStringValue(Convert.ToString(i));
+                                if (DefaultCodecSupportsDocValues())
+                                {
+                                    binaryDVField.SetBytesValue(new BytesRef(idField.GetStringValue()));
+                                    numericDVField.SetInt64Value(i);
+                                    sortedDVField.SetBytesValue(new BytesRef(idField.GetStringValue()));
+                                }
+                                sortedSetDVField.SetBytesValue(new BytesRef(idField.GetStringValue()));
+                                int action = Random.Next(100);
+                                if (action == 17)
+                                {
+                                    w.AddIndexes(Adder);
+                                }
+                                else if (action % 30 == 0)
+                                {
+                                    w.DeleteAll();
+                                }
+                                else if (action % 2 == 0)
+                                {
+                                    w.UpdateDocument(new Term("id", idField.GetStringValue()), doc);
+                                }
+                                else
+                                {
+                                    w.AddDocument(doc);
+                                }
+                                if (Random.Next(3) == 0)
+                                {
+                                    IndexReader r = null;
+                                    try
+                                    {
+                                        r = DirectoryReader.Open(w, Random.NextBoolean());
+                                        if (Random.NextBoolean() && r.MaxDoc > 0)
+                                        {
+                                            int docid = Random.Next(r.MaxDoc);
+                                            w.TryDeleteDocument(r, docid);
+                                        }
+                                    }
+                                    finally
+                                    {
+                                        IOUtils.CloseWhileHandlingException(r);
+                                    }
+                                }
+                                if (i % 10 == 0)
+                                {
+                                    w.Commit();
+                                }
+                                if (Random.Next(50) == 0)
+                                {
+                                    w.ForceMerge(1);
+                                }
+                            }
+                            w.Dispose();
+                            w = null;
+                            DirectoryReader.Open(dir).Dispose();
+
+                            // Strangely, if we interrupt a thread before
+                            // all classes are loaded, the class loader
+                            // seems to do scary things with the interrupt
+                            // status.  In java 1.5, it'll throw an
+                            // incorrect ClassNotFoundException.  In java
+                            // 1.6, it'll silently clear the interrupt.
+                            // So, on first iteration through here we
+                            // don't open ourselves up for interrupts
+                            // until we've done the above loop.
+                            AllowInterrupt = true;
+                        }
+                    }
+#if !NETSTANDARD
+                    catch (ThreadInterruptedException re)
+                    {
+                        // NOTE: important to leave this verbosity/noise
+                        // on!!  this test doesn't repro easily so when
+                        // Jenkins hits a fail we need to study where the
+                        // interrupts struck!
+                        Console.WriteLine("TEST: got interrupt");
+                        Console.WriteLine(re.StackTrace);
+                        Exception e = re.InnerException;
+                        Assert.IsTrue(e is ThreadInterruptedException);
+                        if (Finish)
+                        {
+                            break;
+                        }
+                    }
+#endif
+                    catch (Exception t)
+                    {
+                        Console.WriteLine("FAILED; unexpected exception");
+                        Console.WriteLine(t.StackTrace);
+                        Failed = true;
+                        break;
+                    }
+                }
+
+                if (!Failed)
+                {
+                    if (VERBOSE)
+                    {
+                        Console.WriteLine("TEST: now rollback");
+                    }
+                    // clear interrupt state:
+                    //Thread.interrupted();
+                    if (w != null)
+                    {
+                        try
+                        {
+                            w.Rollback();
+                        }
+                        catch (IOException ioe)
+                        {
+                            throw new Exception(ioe.Message, ioe);
+                        }
+                    }
+
+                    try
+                    {
+                        TestUtil.CheckIndex(dir);
+                    }
+                    catch (Exception e)
+                    {
+                        Failed = true;
+                        Console.WriteLine("CheckIndex FAILED: unexpected exception");
+                        Console.WriteLine(e.StackTrace);
+                    }
+                    try
+                    {
+                        IndexReader r = DirectoryReader.Open(dir);
+                        //System.out.println("doc count=" + r.NumDocs);
+                        r.Dispose();
+                    }
+                    catch (Exception e)
+                    {
+                        Failed = true;
+                        Console.WriteLine("DirectoryReader.open FAILED: unexpected exception");
+                        Console.WriteLine(e.StackTrace);
+                    }
+                }
+                try
+                {
+                    IOUtils.Close(dir);
+                }
+                catch (IOException e)
+                {
+                    throw new Exception(e.Message, e);
+                }
+                try
+                {
+                    IOUtils.Close(Adder);
+                }
+                catch (IOException e)
+                {
+                    throw new Exception(e.Message, e);
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestThreadInterruptDeadlock()
+        {
+            IndexerThreadInterrupt t = new IndexerThreadInterrupt(this);
+            t.SetDaemon(true);
+            t.Start();
+
+            // Force class loader to load ThreadInterruptedException
+            // up front... else we can see a false failure if 2nd
+            // interrupt arrives while class loader is trying to
+            // init this class (in servicing a first interrupt):
+            //Assert.IsTrue((new ThreadInterruptedException(new Exception("Thread interrupted"))).InnerException is ThreadInterruptedException);
+
+            // issue 300 interrupts to child thread
+            int numInterrupts = AtLeast(300);
+            int i = 0;
+            while (i < numInterrupts)
+            {
+                // TODO: would be nice to also sometimes interrupt the
+                // CMS merge threads too ...
+                Thread.Sleep(10);
+                if (t.AllowInterrupt)
+                {
+                    i++;
+                    t.Interrupt();
+                }
+                if (!t.IsAlive)
+                {
+                    break;
+                }
+            }
+            t.Finish = true;
+            t.Join();
+            Assert.IsFalse(t.Failed);
+        }
+
+        /// <summary>
+        /// testThreadInterruptDeadlock but with 2 indexer threads </summary>
+        [Test]
+        public virtual void TestTwoThreadsInterruptDeadlock()
+        {
+            IndexerThreadInterrupt t1 = new IndexerThreadInterrupt(this);
+            t1.SetDaemon(true);
+            t1.Start();
+
+            IndexerThreadInterrupt t2 = new IndexerThreadInterrupt(this);
+            t2.SetDaemon(true);
+            t2.Start();
+
+            // Force class loader to load ThreadInterruptedException
+            // up front... else we can see a false failure if 2nd
+            // interrupt arrives while class loader is trying to
+            // init this class (in servicing a first interrupt):
+            // C# does not have the late load problem.
+            //Assert.IsTrue((new ThreadInterruptedException(new Exception("Thread interrupted"))).InnerException is ThreadInterruptedException);
+
+            // issue 300 interrupts to child thread
+            int numInterrupts = AtLeast(300);
+            int i = 0;
+            while (i < numInterrupts)
+            {
+                // TODO: would be nice to also sometimes interrupt the
+                // CMS merge threads too ...
+                Thread.Sleep(10);
+                IndexerThreadInterrupt t = Random().NextBoolean() ? t1 : t2;
+                if (t.AllowInterrupt)
+                {
+                    i++;
+                    t.Interrupt();
+                }
+                if (!t1.IsAlive && !t2.IsAlive)
+                {
+                    break;
+                }
+            }
+            t1.Finish = true;
+            t2.Finish = true;
+            t1.Join();
+            t2.Join();
+            Assert.IsFalse(t1.Failed);
+            Assert.IsFalse(t2.Failed);
+        }
+
+        [Test]
+        public virtual void TestIndexStoreCombos()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            var b = new byte[50];
+            for (int i = 0; i < 50; i++)
+            {
+                b[i] = (byte)(i + 77);
+            }
+
+            Document doc = new Document();
+
+            FieldType customType = new FieldType(StoredField.TYPE);
+            customType.IsTokenized = true;
+
+            Field f = new Field("binary", b, 10, 17, customType);
+            customType.IsIndexed = true;
+            f.SetTokenStream(new MockTokenizer(new StringReader("doc1field1"), MockTokenizer.WHITESPACE, false));
+
+            FieldType customType2 = new FieldType(TextField.TYPE_STORED);
+
+            Field f2 = NewField("string", "value", customType2);
+            f2.SetTokenStream(new MockTokenizer(new StringReader("doc1field2"), MockTokenizer.WHITESPACE, false));
+            doc.Add(f);
+            doc.Add(f2);
+            w.AddDocument(doc);
+
+            // add 2 docs to test in-memory merging
+            f.SetTokenStream(new MockTokenizer(new StringReader("doc2field1"), MockTokenizer.WHITESPACE, false));
+            f2.SetTokenStream(new MockTokenizer(new StringReader("doc2field2"), MockTokenizer.WHITESPACE, false));
+            w.AddDocument(doc);
+
+            // force segment flush so we can force a segment merge with doc3 later.
+            w.Commit();
+
+            f.SetTokenStream(new MockTokenizer(new StringReader("doc3field1"), MockTokenizer.WHITESPACE, false));
+            f2.SetTokenStream(new MockTokenizer(new StringReader("doc3field2"), MockTokenizer.WHITESPACE, false));
+
+            w.AddDocument(doc);
+            w.Commit();
+            w.ForceMerge(1); // force segment merge.
+            w.Dispose();
+
+            IndexReader ir = DirectoryReader.Open(dir);
+            Document doc2 = ir.Document(0);
+            IIndexableField f3 = doc2.GetField("binary");
+            b = f3.GetBinaryValue().Bytes;
+            Assert.IsTrue(b != null);
+            Assert.AreEqual(17, b.Length, 17);
+            Assert.AreEqual(87, b[0]);
+
+            Assert.IsTrue(ir.Document(0).GetField("binary").GetBinaryValue() != null);
+            Assert.IsTrue(ir.Document(1).GetField("binary").GetBinaryValue() != null);
+            Assert.IsTrue(ir.Document(2).GetField("binary").GetBinaryValue() != null);
+
+            Assert.AreEqual("value", ir.Document(0).Get("string"));
+            Assert.AreEqual("value", ir.Document(1).Get("string"));
+            Assert.AreEqual("value", ir.Document(2).Get("string"));
+
+            // test that the terms were indexed.
+            Assert.IsTrue(TestUtil.Docs(Random(), ir, "binary", new BytesRef("doc1field1"), null, null, DocsEnum.FLAG_NONE).NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.IsTrue(TestUtil.Docs(Random(), ir, "binary", new BytesRef("doc2field1"), null, null, DocsEnum.FLAG_NONE).NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.IsTrue(TestUtil.Docs(Random(), ir, "binary", new BytesRef("doc3field1"), null, null, DocsEnum.FLAG_NONE).NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.IsTrue(TestUtil.Docs(Random(), ir, "string", new BytesRef("doc1field2"), null, null, DocsEnum.FLAG_NONE).NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.IsTrue(TestUtil.Docs(Random(), ir, "string", new BytesRef("doc2field2"), null, null, DocsEnum.FLAG_NONE).NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.IsTrue(TestUtil.Docs(Random(), ir, "string", new BytesRef("doc3field2"), null, null, DocsEnum.FLAG_NONE).NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+
+            ir.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestNoDocsIndex()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            writer.AddDocument(new Document());
+            writer.Dispose();
+
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestIndexDivisor()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            config.SetTermIndexInterval(2);
+            IndexWriter w = new IndexWriter(dir, config);
+            StringBuilder s = new StringBuilder();
+            // must be > 256
+            for (int i = 0; i < 300; i++)
+            {
+                s.Append(' ').Append(i);
+            }
+            Document d = new Document();
+            Field f = NewTextField("field", s.ToString(), Field.Store.NO);
+            d.Add(f);
+            w.AddDocument(d);
+
+            AtomicReader r = GetOnlySegmentReader(w.Reader);
+            TermsEnum t = r.Fields.GetTerms("field").GetIterator(null);
+            int count = 0;
+            while (t.Next() != null)
+            {
+                DocsEnum docs = TestUtil.Docs(Random(), t, null, null, DocsEnum.FLAG_NONE);
+                Assert.AreEqual(0, docs.NextDoc());
+                Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, docs.NextDoc());
+                count++;
+            }
+            Assert.AreEqual(300, count);
+            r.Dispose();
+            w.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestDeleteUnusedFiles()
+        {
+            for (int iter = 0; iter < 2; iter++)
+            {
+                Directory dir = NewMockDirectory(); // relies on windows semantics
+
+                MergePolicy mergePolicy = NewLogMergePolicy(true);
+
+                // this test expects all of its segments to be in CFS
+                mergePolicy.NoCFSRatio = 1.0;
+                mergePolicy.MaxCFSSegmentSizeMB = double.PositiveInfinity;
+
+                IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(mergePolicy).SetUseCompoundFile(true));
+                Document doc = new Document();
+                doc.Add(NewTextField("field", "go", Field.Store.NO));
+                w.AddDocument(doc);
+                DirectoryReader r;
+                if (iter == 0)
+                {
+                    // use NRT
+                    r = w.Reader;
+                }
+                else
+                {
+                    // don't use NRT
+                    w.Commit();
+                    r = DirectoryReader.Open(dir);
+                }
+
+                IList<string> files = new List<string>(Arrays.AsList(dir.ListAll()));
+
+                // RAMDir won't have a write.lock, but fs dirs will:
+                files.Remove("write.lock");
+
+                Assert.IsTrue(files.Contains("_0.cfs"));
+                Assert.IsTrue(files.Contains("_0.cfe"));
+                Assert.IsTrue(files.Contains("_0.si"));
+                if (iter == 1)
+                {
+                    // we run a full commit so there should be a segments file etc.
+                    Assert.IsTrue(files.Contains("segments_1"));
+                    Assert.IsTrue(files.Contains("segments.gen"));
+                    Assert.AreEqual(files.Count, 5, files.ToString());
+                }
+                else
+                {
+                    // this is an NRT reopen - no segments files yet
+
+                    Assert.AreEqual(files.Count, 3, files.ToString());
+                }
+                w.AddDocument(doc);
+                w.ForceMerge(1);
+                if (iter == 1)
+                {
+                    w.Commit();
+                }
+                IndexReader r2 = DirectoryReader.OpenIfChanged(r);
+                Assert.IsNotNull(r2);
+                Assert.IsTrue(r != r2);
+                files = Arrays.AsList(dir.ListAll());
+
+                // NOTE: here we rely on "Windows" behavior, ie, even
+                // though IW wanted to delete _0.cfs since it was
+                // merged away, because we have a reader open
+                // against this file, it should still be here:
+                Assert.IsTrue(files.Contains("_0.cfs"));
+                // forceMerge created this
+                //Assert.IsTrue(files.Contains("_2.cfs"));
+                w.DeleteUnusedFiles();
+
+                files = Arrays.AsList(dir.ListAll());
+                // r still holds this file open
+                Assert.IsTrue(files.Contains("_0.cfs"));
+                //Assert.IsTrue(files.Contains("_2.cfs"));
+
+                r.Dispose();
+                if (iter == 0)
+                {
+                    // on closing NRT reader, it calls writer.deleteUnusedFiles
+                    files = Arrays.AsList(dir.ListAll());
+                    Assert.IsFalse(files.Contains("_0.cfs"));
+                }
+                else
+                {
+                    // now writer can remove it
+                    w.DeleteUnusedFiles();
+                    files = Arrays.AsList(dir.ListAll());
+                    Assert.IsFalse(files.Contains("_0.cfs"));
+                }
+                //Assert.IsTrue(files.Contains("_2.cfs"));
+
+                w.Dispose();
+                r2.Dispose();
+
+                dir.Dispose();
+            }
+        }
+
+        [Test]
+        public virtual void TestDeleteUnsedFiles2()
+        {
+            // Validates that iw.DeleteUnusedFiles() also deletes unused index commits
+            // in case a deletion policy which holds onto commits is used.
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetIndexDeletionPolicy(new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy())));
+            SnapshotDeletionPolicy sdp = (SnapshotDeletionPolicy)writer.Config.IndexDeletionPolicy;
+
+            // First commit
+            Document doc = new Document();
+
+            FieldType customType = new FieldType(TextField.TYPE_STORED);
+            customType.StoreTermVectors = true;
+            customType.StoreTermVectorPositions = true;
+            customType.StoreTermVectorOffsets = true;
+
+            doc.Add(NewField("c", "val", customType));
+            writer.AddDocument(doc);
+            writer.Commit();
+            Assert.AreEqual(1, DirectoryReader.ListCommits(dir).Count);
+
+            // Keep that commit
+            IndexCommit id = sdp.Snapshot();
+
+            // Second commit - now KeepOnlyLastCommit cannot delete the prev commit.
+            doc = new Document();
+            doc.Add(NewField("c", "val", customType));
+            writer.AddDocument(doc);
+            writer.Commit();
+            Assert.AreEqual(2, DirectoryReader.ListCommits(dir).Count);
+
+            // Should delete the unreferenced commit
+            sdp.Release(id);
+            writer.DeleteUnusedFiles();
+            Assert.AreEqual(1, DirectoryReader.ListCommits(dir).Count);
+
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestEmptyFSDirWithNoLock()
+        {
+            // Tests that if FSDir is opened w/ a NoLockFactory (or SingleInstanceLF),
+            // then IndexWriter ctor succeeds. Previously (LUCENE-2386) it failed
+            // when listAll() was called in IndexFileDeleter.
+            Directory dir = NewFSDirectory(CreateTempDir("emptyFSDirNoLock"), NoLockFactory.GetNoLockFactory());
+            (new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())))).Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestEmptyDirRollback()
+        {
+            // TODO: generalize this test
+            //AssumeFalse("test makes assumptions about file counts", Codec.Default is SimpleTextCodec);
+
+            // Tests that if IW is created over an empty Directory, some documents are
+            // indexed, flushed (but not committed) and then IW rolls back, then no
+            // files are left in the Directory.
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy()).SetUseCompoundFile(false));
+            string[] files = dir.ListAll();
+
+            // Creating over empty dir should not create any files,
+            // or, at most the write.lock file
+            int extraFileCount;
+            if (files.Length == 1)
+            {
+                Assert.IsTrue(files[0].EndsWith("write.lock"));
+                extraFileCount = 1;
+            }
+            else
+            {
+                Assert.AreEqual(0, files.Length);
+                extraFileCount = 0;
+            }
+
+            Document doc = new Document();
+            FieldType customType = new FieldType(TextField.TYPE_STORED);
+            customType.StoreTermVectors = true;
+            customType.StoreTermVectorPositions = true;
+            customType.StoreTermVectorOffsets = true;
+            // create as many files as possible
+            doc.Add(NewField("c", "val", customType));
+            writer.AddDocument(doc);
+            // Adding just one document does not call flush yet.
+            int computedExtraFileCount = 0;
+            foreach (string file in dir.ListAll())
+            {
+                if (file.LastIndexOf('.') < 0 || !Arrays.AsList("fdx", "fdt", "tvx", "tvd", "tvf").Contains(file.Substring(file.LastIndexOf('.') + 1)))
+                // don't count stored fields and term vectors in
+                {
+                    ++computedExtraFileCount;
+                }
+            }
+            Assert.AreEqual(extraFileCount, computedExtraFileCount, "only the stored and term vector files should exist in the directory");
+
+            doc = new Document();
+            doc.Add(NewField("c", "val", customType));
+            writer.AddDocument(doc);
+
+            // The second document should cause a flush.
+            Assert.IsTrue(dir.ListAll().Length > 5 + extraFileCount, "flush should have occurred and files should have been created");
+
+            // After rollback, IW should remove all files
+            writer.Rollback();
+            string[] allFiles = dir.ListAll();
+            Assert.IsTrue(allFiles.Length == 0 || Arrays.Equals(allFiles, new string[] { IndexWriter.WRITE_LOCK_NAME }), "no files should exist in the directory after rollback");
+
+            // Since we rolled-back above, that close should be a no-op
+            writer.Dispose();
+            allFiles = dir.ListAll();
+            Assert.IsTrue(allFiles.Length == 0 || Arrays.Equals(allFiles, new string[] { IndexWriter.WRITE_LOCK_NAME }), "expected a no-op close after IW.Rollback()");
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestNoSegmentFile()
+        {
+            BaseDirectoryWrapper dir = NewDirectory();
+            dir.SetLockFactory(NoLockFactory.GetNoLockFactory());
+            IndexWriter w = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2));
+
+            Document doc = new Document();
+            FieldType customType = new FieldType(TextField.TYPE_STORED);
+            customType.StoreTermVectors = true;
+            customType.StoreTermVectorPositions = true;
+            customType.StoreTermVectorOffsets = true;
+            doc.Add(NewField("c", "val", customType));
+            w.AddDocument(doc);
+            w.AddDocument(doc);
+            IndexWriter w2 = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2).SetOpenMode(OpenMode.CREATE));
+
+            w2.Dispose();
+            // If we don't do that, the test fails on Windows
+            w.Rollback();
+
+            // this test leaves only segments.gen, which causes
+            // DirectoryReader.indexExists to return true:
+            dir.CheckIndexOnClose = false;
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestNoUnwantedTVFiles()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter indexWriter = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetRAMBufferSizeMB(0.01).SetMergePolicy(NewLogMergePolicy()));
+            indexWriter.Config.MergePolicy.NoCFSRatio = 0.0;
+
+            string BIG = "alskjhlaksjghlaksjfhalksvjepgjioefgjnsdfjgefgjhelkgjhqewlrkhgwlekgrhwelkgjhwelkgrhwlkejg";
+            BIG = BIG + BIG + BIG + BIG;
+
+            FieldType customType = new FieldType(TextField.TYPE_STORED);
+            customType.OmitNorms = true;
+            FieldType customType2 = new FieldType(TextField.TYPE_STORED);
+            customType2.IsTokenized = false;
+            FieldType customType3 = new FieldType(TextField.TYPE_STORED);
+            customType3.IsTokenized = false;
+            customType3.OmitNorms = true;
+
+            for (int i = 0; i < 2; i++)
+            {
+                Document doc = new Document();
+                doc.Add(new Field("id", Convert.ToString(i) + BIG, customType3));
+                doc.Add(new Field("str", Convert.ToString(i) + BIG, customType2));
+                doc.Add(new Field("str2", Convert.ToString(i) + BIG, StoredTextType));
+                doc.Add(new Field("str3", Convert.ToString(i) + BIG, customType));
+                indexWriter.AddDocument(doc);
+            }
+
+            indexWriter.Dispose();
+
+            TestUtil.CheckIndex(dir);
+
+            AssertNoUnreferencedFiles(dir, "no tv files");
+            DirectoryReader r0 = DirectoryReader.Open(dir);
+            foreach (AtomicReaderContext ctx in r0.Leaves)
+            {
+                SegmentReader sr = (SegmentReader)ctx.Reader;
+                Assert.IsFalse(sr.FieldInfos.HasVectors);
+            }
+
+            r0.Dispose();
+            dir.Dispose();
+        }
+
+        internal sealed class StringSplitAnalyzer : Analyzer
+        {
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                return new TokenStreamComponents(new StringSplitTokenizer(reader));
+            }
+        }
+
+        private class StringSplitTokenizer : Tokenizer
+        {
+            private string[] Tokens;
+            private int Upto;
+            private readonly ICharTermAttribute TermAtt;
+
+            public StringSplitTokenizer(TextReader r)
+                : base(r)
+            {
+                TermAtt = AddAttribute<ICharTermAttribute>();
+                try
+                {
+                    SetReader(r);
+                }
+                catch (IOException e)
+                {
+                    throw new Exception(e.Message, e);
+                }
+            }
+
+            public sealed override bool IncrementToken()
+            {
+                ClearAttributes();
+                if (Upto < Tokens.Length)
+                {
+                    TermAtt.SetEmpty();
+                    TermAtt.Append(Tokens[Upto]);
+                    Upto++;
+                    return true;
+                }
+                else
+                {
+                    return false;
+                }
+            }
+
+            public override void Reset()
+            {
+                base.Reset();
+                this.Upto = 0;
+                StringBuilder b = new StringBuilder();
+                char[] buffer = new char[1024];
+                int n;
+                while ((n = m_input.Read(buffer, 0, buffer.Length)) > 0)
+                {
+                    b.Append(buffer, 0, n);
+                }
+                this.Tokens = b.ToString().Split(' ');
+            }
+        }
+
+        /// <summary>
+        /// Make sure we skip wicked long terms.
+        /// </summary>
+        [Test]
+        public virtual void TestWickedLongTerm()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), dir, new StringSplitAnalyzer(), Similarity, TimeZone);
+
+            char[] chars = new char[DocumentsWriterPerThread.MAX_TERM_LENGTH_UTF8];
+            Arrays.Fill(chars, 'x');
+            Document doc = new Document();
+            string bigTerm = new string(chars);
+            BytesRef bigTermBytesRef = new BytesRef(bigTerm);
+
+            // this contents produces a too-long term:
+            string contents = "abc xyz x" + bigTerm + " another term";
+            doc.Add(new TextField("content", contents, Field.Store.NO));
+            try
+            {
+                w.AddDocument(doc);
+                Assert.Fail("should have hit exception");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // expected
+            }
+
+            // Make sure we can add another normal document
+            doc = new Document();
+            doc.Add(new TextField("content", "abc bbb ccc", Field.Store.NO));
+            w.AddDocument(doc);
+
+            // So we remove the deleted doc:
+            w.ForceMerge(1);
+
+            IndexReader reader = w.Reader;
+            w.Dispose();
+
+            // Make sure all terms < max size were indexed
+            Assert.AreEqual(1, reader.DocFreq(new Term("content", "abc")));
+            Assert.AreEqual(1, reader.DocFreq(new Term("content", "bbb")));
+            Assert.AreEqual(0, reader.DocFreq(new Term("content", "term")));
+
+            // Make sure the doc that has the massive term is NOT in
+            // the index:
+            Assert.AreEqual(1, reader.NumDocs, "document with wicked long term is in the index!");
+
+            reader.Dispose();
+            dir.Dispose();
+            dir = NewDirectory();
+
+            // Make sure we can add a document with exactly the
+            // maximum length term, and search on that term:
+            doc = new Document();
+            FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
+            customType.IsTokenized = false;
+            Field contentField = new Field("content", "", customType);
+            doc.Add(contentField);
+
+            w = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+
+            contentField.SetStringValue("other");
+            w.AddDocument(doc);
+
+            contentField.SetStringValue("term");
+            w.AddDocument(doc);
+
+            contentField.SetStringValue(bigTerm);
+            w.AddDocument(doc);
+
+            contentField.SetStringValue("zzz");
+            w.AddDocument(doc);
+
+            reader = w.Reader;
+            w.Dispose();
+            Assert.AreEqual(1, reader.DocFreq(new Term("content", bigTerm)));
+
+            SortedDocValues dti = FieldCache.DEFAULT.GetTermsIndex(SlowCompositeReaderWrapper.Wrap(reader), "content", (float)Random().NextDouble() * PackedInt32s.FAST);
+            Assert.AreEqual(4, dti.ValueCount);
+            BytesRef br = new BytesRef();
+            dti.LookupOrd(2, br);
+            Assert.AreEqual(bigTermBytesRef, br);
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        // LUCENE-3183
+        [Test]
+        public virtual void TestEmptyFieldNameTIIOne()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            iwc.SetTermIndexInterval(1);
+            iwc.SetReaderTermsIndexDivisor(1);
+            IndexWriter writer = new IndexWriter(dir, iwc);
+            Document doc = new Document();
+            doc.Add(NewTextField("", "a b c", Field.Store.NO));
+            writer.AddDocument(doc);
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestDeleteAllNRTLeftoverFiles()
+        {
+            Directory d = new MockDirectoryWrapper(Random(), new RAMDirectory());
+            IndexWriter w = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document doc = new Document();
+            for (int i = 0; i < 20; i++)
+            {
+                for (int j = 0; j < 100; ++j)
+                {
+                    w.AddDocument(doc);
+                }
+                w.Commit();
+                DirectoryReader.Open(w, true).Dispose();
+
+                w.DeleteAll();
+                w.Commit();
+                // Make sure we accumulate no files except for empty
+                // segments_N and segments.gen:
+                Assert.IsTrue(d.ListAll().Length <= 2);
+            }
+
+            w.Dispose();
+            d.Dispose();
+        }
+
+        [Test]
+        public virtual void TestNRTReaderVersion()
+        {
+            Directory d = new MockDirectoryWrapper(Random(), new RAMDirectory());
+            IndexWriter w = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document doc = new Document();
+            doc.Add(NewStringField("id", "0", Field.Store.YES));
+            w.AddDocument(doc);
+            DirectoryReader r = w.Reader;
+            long version = r.Version;
+            r.Dispose();
+
+            w.AddDocument(doc);
+            r = w.Reader;
+            long version2 = r.Version;
+            r.Dispose();
+            Debug.Assert(version2 > version);
+
+            w.DeleteDocuments(new Term("id", "0"));
+            r = w.Reader;
+            w.Dispose();
+            long version3 = r.Version;
+            r.Dispose();
+            Debug.Assert(version3 > version2);
+            d.Dispose();
+        }
+
+        [Test]
+        public virtual void TestWhetherDeleteAllDeletesWriteLock()
+        {
+            Directory d = NewFSDirectory(CreateTempDir("TestIndexWriter.testWhetherDeleteAllDeletesWriteLock"

<TRUNCATED>

[55/72] [abbrv] lucenenet git commit: Lucene.Net.TestFramework: Renamed Codecs\lucene3x\ to Codecs\Lucene3x\

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8304ca82/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWTermVectorsWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWTermVectorsWriter.cs b/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWTermVectorsWriter.cs
deleted file mode 100644
index db3e4c3..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWTermVectorsWriter.cs
+++ /dev/null
@@ -1,243 +0,0 @@
-using System;
-using System.Collections.Generic;
-using System.Diagnostics;
-
-namespace Lucene.Net.Codecs.Lucene3x
-{
-    using ArrayUtil = Lucene.Net.Util.ArrayUtil;
-    using BytesRef = Lucene.Net.Util.BytesRef;
-    using Directory = Lucene.Net.Store.Directory;
-
-    /*
-         * Licensed to the Apache Software Foundation (ASF) under one or more
-         * contributor license agreements.  See the NOTICE file distributed with
-         * this work for additional information regarding copyright ownership.
-         * The ASF licenses this file to You under the Apache License, Version 2.0
-         * (the "License"); you may not use this file except in compliance with
-         * the License.  You may obtain a copy of the License at
-         *
-         *     http://www.apache.org/licenses/LICENSE-2.0
-         *
-         * Unless required by applicable law or agreed to in writing, software
-         * distributed under the License is distributed on an "AS IS" BASIS,
-         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-         * See the License for the specific language governing permissions and
-         * limitations under the License.
-         */
-
-    using FieldInfo = Lucene.Net.Index.FieldInfo;
-    using FieldInfos = Lucene.Net.Index.FieldInfos;
-    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
-    using IndexOutput = Lucene.Net.Store.IndexOutput;
-    using IOContext = Lucene.Net.Store.IOContext;
-    using IOUtils = Lucene.Net.Util.IOUtils;
-    using StringHelper = Lucene.Net.Util.StringHelper;
-
-#pragma warning disable 612, 618
-    internal sealed class PreFlexRWTermVectorsWriter : TermVectorsWriter
-    {
-        private readonly Directory Directory;
-        private readonly string Segment;
-        private IndexOutput Tvx = null, Tvd = null, Tvf = null;
-
-        public PreFlexRWTermVectorsWriter(Directory directory, string segment, IOContext context)
-        {
-            this.Directory = directory;
-            this.Segment = segment;
-            bool success = false;
-            try
-            {
-                // Open files for TermVector storage
-                Tvx = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", Lucene3xTermVectorsReader.VECTORS_INDEX_EXTENSION), context);
-                Tvx.WriteInt32(Lucene3xTermVectorsReader.FORMAT_CURRENT);
-                Tvd = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", Lucene3xTermVectorsReader.VECTORS_DOCUMENTS_EXTENSION), context);
-                Tvd.WriteInt32(Lucene3xTermVectorsReader.FORMAT_CURRENT);
-                Tvf = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", Lucene3xTermVectorsReader.VECTORS_FIELDS_EXTENSION), context);
-                Tvf.WriteInt32(Lucene3xTermVectorsReader.FORMAT_CURRENT);
-                success = true;
-            }
-            finally
-            {
-                if (!success)
-                {
-                    Abort();
-                }
-            }
-        }
-
-        public override void StartDocument(int numVectorFields)
-        {
-            LastFieldName = null;
-            this.NumVectorFields = numVectorFields;
-            Tvx.WriteInt64(Tvd.FilePointer);
-            Tvx.WriteInt64(Tvf.FilePointer);
-            Tvd.WriteVInt32(numVectorFields);
-            FieldCount = 0;
-            Fps = ArrayUtil.Grow(Fps, numVectorFields);
-        }
-
-        private long[] Fps = new long[10]; // pointers to the tvf before writing each field
-        private int FieldCount = 0; // number of fields we have written so far for this document
-        private int NumVectorFields = 0; // total number of fields we will write for this document
-        private string LastFieldName;
-
-        public override void StartField(FieldInfo info, int numTerms, bool positions, bool offsets, bool payloads)
-        {
-            Debug.Assert(LastFieldName == null || info.Name.CompareTo(LastFieldName) > 0, "fieldName=" + info.Name + " lastFieldName=" + LastFieldName);
-            LastFieldName = info.Name;
-            if (payloads)
-            {
-                throw new System.NotSupportedException("3.x codec does not support payloads on vectors!");
-            }
-            this.Positions = positions;
-            this.Offsets = offsets;
-            LastTerm.Length = 0;
-            Fps[FieldCount++] = Tvf.FilePointer;
-            Tvd.WriteVInt32(info.Number);
-            Tvf.WriteVInt32(numTerms);
-            sbyte bits = 0x0;
-            if (positions)
-            {
-                bits |= Lucene3xTermVectorsReader.STORE_POSITIONS_WITH_TERMVECTOR;
-            }
-            if (offsets)
-            {
-                bits |= Lucene3xTermVectorsReader.STORE_OFFSET_WITH_TERMVECTOR;
-            }
-            Tvf.WriteByte((byte)bits);
-
-            Debug.Assert(FieldCount <= NumVectorFields);
-            if (FieldCount == NumVectorFields)
-            {
-                // last field of the document
-                // this is crazy because the file format is crazy!
-                for (int i = 1; i < FieldCount; i++)
-                {
-                    Tvd.WriteVInt64(Fps[i] - Fps[i - 1]);
-                }
-            }
-        }
-
-        private readonly BytesRef LastTerm = new BytesRef(10);
-
-        // NOTE: we override addProx, so we don't need to buffer when indexing.
-        // we also don't buffer during bulk merges.
-        private int[] OffsetStartBuffer = new int[10];
-
-        private int[] OffsetEndBuffer = new int[10];
-        private int OffsetIndex = 0;
-        private int OffsetFreq = 0;
-        private bool Positions = false;
-        private bool Offsets = false;
-
-        public override void StartTerm(BytesRef term, int freq)
-        {
-            int prefix = StringHelper.BytesDifference(LastTerm, term);
-            int suffix = term.Length - prefix;
-            Tvf.WriteVInt32(prefix);
-            Tvf.WriteVInt32(suffix);
-            Tvf.WriteBytes(term.Bytes, term.Offset + prefix, suffix);
-            Tvf.WriteVInt32(freq);
-            LastTerm.CopyBytes(term);
-            LastPosition = LastOffset = 0;
-
-            if (Offsets && Positions)
-            {
-                // we might need to buffer if its a non-bulk merge
-                OffsetStartBuffer = ArrayUtil.Grow(OffsetStartBuffer, freq);
-                OffsetEndBuffer = ArrayUtil.Grow(OffsetEndBuffer, freq);
-                OffsetIndex = 0;
-                OffsetFreq = freq;
-            }
-        }
-
-        internal int LastPosition = 0;
-        internal int LastOffset = 0;
-
-        public override void AddPosition(int position, int startOffset, int endOffset, BytesRef payload)
-        {
-            Debug.Assert(payload == null);
-            if (Positions && Offsets)
-            {
-                // write position delta
-                Tvf.WriteVInt32(position - LastPosition);
-                LastPosition = position;
-
-                // buffer offsets
-                OffsetStartBuffer[OffsetIndex] = startOffset;
-                OffsetEndBuffer[OffsetIndex] = endOffset;
-                OffsetIndex++;
-
-                // dump buffer if we are done
-                if (OffsetIndex == OffsetFreq)
-                {
-                    for (int i = 0; i < OffsetIndex; i++)
-                    {
-                        Tvf.WriteVInt32(OffsetStartBuffer[i] - LastOffset);
-                        Tvf.WriteVInt32(OffsetEndBuffer[i] - OffsetStartBuffer[i]);
-                        LastOffset = OffsetEndBuffer[i];
-                    }
-                }
-            }
-            else if (Positions)
-            {
-                // write position delta
-                Tvf.WriteVInt32(position - LastPosition);
-                LastPosition = position;
-            }
-            else if (Offsets)
-            {
-                // write offset deltas
-                Tvf.WriteVInt32(startOffset - LastOffset);
-                Tvf.WriteVInt32(endOffset - startOffset);
-                LastOffset = endOffset;
-            }
-        }
-
-        public override void Abort()
-        {
-            try
-            {
-                Dispose();
-            }
-#pragma warning disable 168
-            catch (Exception ignored)
-#pragma warning restore 168
-            {
-            }
-            IOUtils.DeleteFilesIgnoringExceptions(Directory, IndexFileNames.SegmentFileName(Segment, "", Lucene3xTermVectorsReader.VECTORS_INDEX_EXTENSION), IndexFileNames.SegmentFileName(Segment, "", Lucene3xTermVectorsReader.VECTORS_DOCUMENTS_EXTENSION), IndexFileNames.SegmentFileName(Segment, "", Lucene3xTermVectorsReader.VECTORS_FIELDS_EXTENSION));
-        }
-
-        public override void Finish(FieldInfos fis, int numDocs)
-        {
-            if (4 + ((long)numDocs) * 16 != Tvx.FilePointer)
-            // this is most likely a bug in Sun JRE 1.6.0_04/_05;
-            // we detect that the bug has struck, here, and
-            // throw an exception to prevent the corruption from
-            // entering the index.  See LUCENE-1282 for
-            // details.
-            {
-                throw new Exception("tvx size mismatch: mergedDocs is " + numDocs + " but tvx size is " + Tvx.FilePointer + " file=" + Tvx.ToString() + "; now aborting this merge to prevent index corruption");
-            }
-        }
-
-        /// <summary>
-        /// Close all streams. </summary>
-        protected override void Dispose(bool disposing)
-        {
-            // make an effort to close all streams we can but remember and re-throw
-            // the first exception encountered in this process
-            IOUtils.Close(Tvx, Tvd, Tvf);
-            Tvx = Tvd = Tvf = null;
-        }
-
-        public override IComparer<BytesRef> Comparer
-        {
-            get
-            {
-                return BytesRef.UTF8SortedAsUTF16Comparer;
-            }
-        }
-    }
-#pragma warning restore 612, 618
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8304ca82/src/Lucene.Net.TestFramework/Codecs/lucene3x/TermInfosWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/lucene3x/TermInfosWriter.cs b/src/Lucene.Net.TestFramework/Codecs/lucene3x/TermInfosWriter.cs
deleted file mode 100644
index fd7c05d..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/lucene3x/TermInfosWriter.cs
+++ /dev/null
@@ -1,334 +0,0 @@
-using System.Diagnostics;
-
-namespace Lucene.Net.Codecs.Lucene3x
-{
-    using System;
-    using System.IO;
-    using BytesRef = Lucene.Net.Util.BytesRef;
-    using CharsRef = Lucene.Net.Util.CharsRef;
-    using Directory = Lucene.Net.Store.Directory;
-
-    /*
-         * Licensed to the Apache Software Foundation (ASF) under one or more
-         * contributor license agreements.  See the NOTICE file distributed with
-         * this work for additional information regarding copyright ownership.
-         * The ASF licenses this file to You under the Apache License, Version 2.0
-         * (the "License"); you may not use this file except in compliance with
-         * the License.  You may obtain a copy of the License at
-         *
-         *     http://www.apache.org/licenses/LICENSE-2.0
-         *
-         * Unless required by applicable law or agreed to in writing, software
-         * distributed under the License is distributed on an "AS IS" BASIS,
-         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-         * See the License for the specific language governing permissions and
-         * limitations under the License.
-         */
-
-    using FieldInfos = Lucene.Net.Index.FieldInfos;
-    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
-    using IndexOutput = Lucene.Net.Store.IndexOutput;
-    using IOContext = Lucene.Net.Store.IOContext;
-    using IOUtils = Lucene.Net.Util.IOUtils;
-    using UnicodeUtil = Lucene.Net.Util.UnicodeUtil;
-
-    /// <summary>
-    /// this stores a monotonically increasing set of <Term, TermInfo> pairs in a
-    ///  Directory.  A TermInfos can be written once, in order.
-    /// </summary>
-#pragma warning disable 612, 618
-    internal sealed class TermInfosWriter : IDisposable
-    {
-        /// <summary>
-        /// The file format version, a negative number. </summary>
-        public const int FORMAT = -3;
-
-        // Changed strings to true utf8 with length-in-bytes not
-        // length-in-chars
-        public const int FORMAT_VERSION_UTF8_LENGTH_IN_BYTES = -4;
-
-        // NOTE: always change this if you switch to a new format!
-        public const int FORMAT_CURRENT = FORMAT_VERSION_UTF8_LENGTH_IN_BYTES;
-
-        private FieldInfos FieldInfos;
-        private IndexOutput Output;
-        private TermInfo LastTi = new TermInfo();
-        private long Size;
-
-        // TODO: the default values for these two parameters should be settable from
-        // IndexWriter.  However, once that's done, folks will start setting them to
-        // ridiculous values and complaining that things don't work well, as with
-        // mergeFactor.  So, let's wait until a number of folks find that alternate
-        // values work better.  Note that both of these values are stored in the
-        // segment, so that it's safe to change these w/o rebuilding all indexes.
-
-        /// <summary>
-        /// Expert: The fraction of terms in the "dictionary" which should be stored
-        /// in RAM.  Smaller values use more memory, but make searching slightly
-        /// faster, while larger values use less memory and make searching slightly
-        /// slower.  Searching is typically not dominated by dictionary lookup, so
-        /// tweaking this is rarely useful.
-        /// </summary>
-        internal int IndexInterval = 128;
-
-        /// <summary>
-        /// Expert: The fraction of term entries stored in skip tables,
-        /// used to accelerate skipping.  Larger values result in
-        /// smaller indexes, greater acceleration, but fewer accelerable cases, while
-        /// smaller values result in bigger indexes, less acceleration and more
-        /// accelerable cases. More detailed experiments would be useful here.
-        /// </summary>
-        internal int SkipInterval = 16;
-
-        /// <summary>
-        /// Expert: The maximum number of skip levels. Smaller values result in
-        /// slightly smaller indexes, but slower skipping in big posting lists.
-        /// </summary>
-        internal int MaxSkipLevels = 10;
-
-        private long LastIndexPointer;
-        private bool IsIndex;
-        private readonly BytesRef LastTerm = new BytesRef();
-        private int LastFieldNumber = -1;
-
-        private TermInfosWriter Other;
-
-        internal TermInfosWriter(Directory directory, string segment, FieldInfos fis, int interval)
-        {
-            Initialize(directory, segment, fis, interval, false);
-            bool success = false;
-            try
-            {
-                Other = new TermInfosWriter(directory, segment, fis, interval, true);
-                Other.Other = this;
-                success = true;
-            }
-            finally
-            {
-                if (!success)
-                {
-                    IOUtils.CloseWhileHandlingException(Output);
-
-                    try
-                    {
-                        directory.DeleteFile(IndexFileNames.SegmentFileName(segment, "", (IsIndex ? Lucene3xPostingsFormat.TERMS_INDEX_EXTENSION : Lucene3xPostingsFormat.TERMS_EXTENSION)));
-                    }
-#pragma warning disable 168
-                    catch (IOException ignored)
-#pragma warning restore 168
-                    {
-                    }
-                }
-            }
-        }
-
-        private TermInfosWriter(Directory directory, string segment, FieldInfos fis, int interval, bool isIndex)
-        {
-            Initialize(directory, segment, fis, interval, isIndex);
-        }
-
-        private void Initialize(Directory directory, string segment, FieldInfos fis, int interval, bool isi)
-        {
-            IndexInterval = interval;
-            FieldInfos = fis;
-            IsIndex = isi;
-            Output = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", (IsIndex ? Lucene3xPostingsFormat.TERMS_INDEX_EXTENSION : Lucene3xPostingsFormat.TERMS_EXTENSION)), IOContext.DEFAULT);
-            bool success = false;
-            try
-            {
-                Output.WriteInt32(FORMAT_CURRENT); // write format
-                Output.WriteInt64(0); // leave space for size
-                Output.WriteInt32(IndexInterval); // write indexInterval
-                Output.WriteInt32(SkipInterval); // write skipInterval
-                Output.WriteInt32(MaxSkipLevels); // write maxSkipLevels
-                Debug.Assert(InitUTF16Results());
-                success = true;
-            }
-            finally
-            {
-                if (!success)
-                {
-                    IOUtils.CloseWhileHandlingException(Output);
-
-                    try
-                    {
-                        directory.DeleteFile(IndexFileNames.SegmentFileName(segment, "", (IsIndex ? Lucene3xPostingsFormat.TERMS_INDEX_EXTENSION : Lucene3xPostingsFormat.TERMS_EXTENSION)));
-                    }
-#pragma warning disable 168
-                    catch (IOException ignored)
-#pragma warning restore 168
-                    {
-                    }
-                }
-            }
-        }
-
-        // Currently used only by assert statements
-        internal CharsRef Utf16Result1;
-
-        internal CharsRef Utf16Result2;
-        private readonly BytesRef ScratchBytes = new BytesRef();
-
-        // Currently used only by assert statements
-        private bool InitUTF16Results()
-        {
-            Utf16Result1 = new CharsRef(10);
-            Utf16Result2 = new CharsRef(10);
-            return true;
-        }
-
-        /// <summary>
-        /// note: -1 is the empty field: "" !!!! </summary>
-        internal static string FieldName(FieldInfos infos, int fieldNumber)
-        {
-            if (fieldNumber == -1)
-            {
-                return "";
-            }
-            else
-            {
-                return infos.FieldInfo(fieldNumber).Name;
-            }
-        }
-
-        // Currently used only by assert statement
-        private int CompareToLastTerm(int fieldNumber, BytesRef term)
-        {
-            if (LastFieldNumber != fieldNumber)
-            {
-                int cmp = FieldName(FieldInfos, LastFieldNumber).CompareTo(FieldName(FieldInfos, fieldNumber));
-                // If there is a field named "" (empty string) then we
-                // will get 0 on this comparison, yet, it's "OK".  But
-                // it's not OK if two different field numbers map to
-                // the same name.
-                if (cmp != 0 || LastFieldNumber != -1)
-                {
-                    return cmp;
-                }
-            }
-
-            ScratchBytes.CopyBytes(term);
-            Debug.Assert(LastTerm.Offset == 0);
-            UnicodeUtil.UTF8toUTF16(LastTerm.Bytes, 0, LastTerm.Length, Utf16Result1);
-
-            Debug.Assert(ScratchBytes.Offset == 0);
-            UnicodeUtil.UTF8toUTF16(ScratchBytes.Bytes, 0, ScratchBytes.Length, Utf16Result2);
-
-            int len;
-            if (Utf16Result1.Length < Utf16Result2.Length)
-            {
-                len = Utf16Result1.Length;
-            }
-            else
-            {
-                len = Utf16Result2.Length;
-            }
-
-            for (int i = 0; i < len; i++)
-            {
-                char ch1 = Utf16Result1.Chars[i];
-                char ch2 = Utf16Result2.Chars[i];
-                if (ch1 != ch2)
-                {
-                    return ch1 - ch2;
-                }
-            }
-            if (Utf16Result1.Length == 0 && LastFieldNumber == -1)
-            {
-                // If there is a field named "" (empty string) with a term text of "" (empty string) then we
-                // will get 0 on this comparison, yet, it's "OK".
-                return -1;
-            }
-            return Utf16Result1.Length - Utf16Result2.Length;
-        }
-
-        /// <summary>
-        /// Adds a new <<fieldNumber, termBytes>, TermInfo> pair to the set.
-        ///  Term must be lexicographically greater than all previous Terms added.
-        ///  TermInfo pointers must be positive and greater than all previous.
-        /// </summary>
-        public void Add(int fieldNumber, BytesRef term, TermInfo ti)
-        {
-            Debug.Assert(CompareToLastTerm(fieldNumber, term) < 0 || (IsIndex && term.Length == 0 && LastTerm.Length == 0), "Terms are out of order: field=" + FieldName(FieldInfos, fieldNumber) + " (number " + fieldNumber + ")" + " lastField=" + FieldName(FieldInfos, LastFieldNumber) + " (number " + LastFieldNumber + ")" + " text=" + term.Utf8ToString() + " lastText=" + LastTerm.Utf8ToString());
-
-            Debug.Assert(ti.FreqPointer >= LastTi.FreqPointer, "freqPointer out of order (" + ti.FreqPointer + " < " + LastTi.FreqPointer + ")");
-            Debug.Assert(ti.ProxPointer >= LastTi.ProxPointer, "proxPointer out of order (" + ti.ProxPointer + " < " + LastTi.ProxPointer + ")");
-
-            if (!IsIndex && Size % IndexInterval == 0)
-            {
-                Other.Add(LastFieldNumber, LastTerm, LastTi); // add an index term
-            }
-            WriteTerm(fieldNumber, term); // write term
-
-            Output.WriteVInt32(ti.DocFreq); // write doc freq
-            Output.WriteVInt64(ti.FreqPointer - LastTi.FreqPointer); // write pointers
-            Output.WriteVInt64(ti.ProxPointer - LastTi.ProxPointer);
-
-            if (ti.DocFreq >= SkipInterval)
-            {
-                Output.WriteVInt32(ti.SkipOffset);
-            }
-
-            if (IsIndex)
-            {
-                Output.WriteVInt64(Other.Output.FilePointer - LastIndexPointer);
-                LastIndexPointer = Other.Output.FilePointer; // write pointer
-            }
-
-            LastFieldNumber = fieldNumber;
-            LastTi.Set(ti);
-            Size++;
-        }
-
-        private void WriteTerm(int fieldNumber, BytesRef term)
-        {
-            //System.out.println("  tiw.write field=" + fieldNumber + " term=" + term.utf8ToString());
-
-            // TODO: UTF16toUTF8 could tell us this prefix
-            // Compute prefix in common with last term:
-            int start = 0;
-            int limit = term.Length < LastTerm.Length ? term.Length : LastTerm.Length;
-            while (start < limit)
-            {
-                if (term.Bytes[start + term.Offset] != LastTerm.Bytes[start + LastTerm.Offset])
-                {
-                    break;
-                }
-                start++;
-            }
-
-            int length = term.Length - start;
-            Output.WriteVInt32(start); // write shared prefix length
-            Output.WriteVInt32(length); // write delta length
-            Output.WriteBytes(term.Bytes, start + term.Offset, length); // write delta bytes
-            Output.WriteVInt32(fieldNumber); // write field num
-            LastTerm.CopyBytes(term);
-        }
-
-        /// <summary>
-        /// Called to complete TermInfos creation. </summary>
-        public void Dispose()
-        {
-            try
-            {
-                Output.Seek(4); // write size after format
-                Output.WriteInt64(Size);
-            }
-            finally
-            {
-                try
-                {
-                    Output.Dispose();
-                }
-                finally
-                {
-                    if (!IsIndex)
-                    {
-                        Other.Dispose();
-                    }
-                }
-            }
-        }
-    }
-#pragma warning restore 612, 618
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8304ca82/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj b/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
index 3d3fd87..4839e58 100644
--- a/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
+++ b/src/Lucene.Net.TestFramework/Lucene.Net.TestFramework.csproj
@@ -156,52 +156,52 @@
     <Compile Include="Codecs\Compressing\HighCompressionCompressingCodec.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Codecs\lucene3x\PreFlexRWCodec.cs">
+    <Compile Include="Codecs\Lucene3x\PreFlexRWCodec.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Codecs\lucene3x\PreFlexRWFieldInfosFormat.cs">
+    <Compile Include="Codecs\Lucene3x\PreFlexRWFieldInfosFormat.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Codecs\lucene3x\PreFlexRWFieldInfosReader.cs">
+    <Compile Include="Codecs\Lucene3x\PreFlexRWFieldInfosReader.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Codecs\lucene3x\PreFlexRWFieldInfosWriter.cs">
+    <Compile Include="Codecs\Lucene3x\PreFlexRWFieldInfosWriter.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Codecs\lucene3x\PreFlexRWFieldsWriter.cs">
+    <Compile Include="Codecs\Lucene3x\PreFlexRWFieldsWriter.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Codecs\lucene3x\PreFlexRWNormsConsumer.cs">
+    <Compile Include="Codecs\Lucene3x\PreFlexRWNormsConsumer.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Codecs\lucene3x\PreFlexRWNormsFormat.cs">
+    <Compile Include="Codecs\Lucene3x\PreFlexRWNormsFormat.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Codecs\lucene3x\PreFlexRWPostingsFormat.cs">
+    <Compile Include="Codecs\Lucene3x\PreFlexRWPostingsFormat.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Codecs\lucene3x\PreFlexRWSegmentInfoFormat.cs">
+    <Compile Include="Codecs\Lucene3x\PreFlexRWSegmentInfoFormat.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Codecs\lucene3x\PreFlexRWSegmentInfoWriter.cs">
+    <Compile Include="Codecs\Lucene3x\PreFlexRWSegmentInfoWriter.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Codecs\lucene3x\PreFlexRWSkipListWriter.cs">
+    <Compile Include="Codecs\Lucene3x\PreFlexRWSkipListWriter.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Codecs\lucene3x\PreFlexRWStoredFieldsFormat.cs">
+    <Compile Include="Codecs\Lucene3x\PreFlexRWStoredFieldsFormat.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Codecs\lucene3x\PreFlexRWStoredFieldsWriter.cs">
+    <Compile Include="Codecs\Lucene3x\PreFlexRWStoredFieldsWriter.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Codecs\lucene3x\PreFlexRWTermVectorsFormat.cs">
+    <Compile Include="Codecs\Lucene3x\PreFlexRWTermVectorsFormat.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Codecs\lucene3x\PreFlexRWTermVectorsWriter.cs">
+    <Compile Include="Codecs\Lucene3x\PreFlexRWTermVectorsWriter.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Codecs\lucene3x\TermInfosWriter.cs">
+    <Compile Include="Codecs\Lucene3x\TermInfosWriter.cs">
       <SubType>Code</SubType>
     </Compile>
     <Compile Include="Codecs\lucene40\Lucene40DocValuesWriter.cs">


[56/72] [abbrv] lucenenet git commit: Lucene.Net.TestFramework: Renamed Codecs\lucene3x\ to Codecs\Lucene3x\

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8304ca82/src/Lucene.Net.TestFramework/Codecs/Lucene3x/TermInfosWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/Lucene3x/TermInfosWriter.cs b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/TermInfosWriter.cs
new file mode 100644
index 0000000..fd7c05d
--- /dev/null
+++ b/src/Lucene.Net.TestFramework/Codecs/Lucene3x/TermInfosWriter.cs
@@ -0,0 +1,334 @@
+using System.Diagnostics;
+
+namespace Lucene.Net.Codecs.Lucene3x
+{
+    using System;
+    using System.IO;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using CharsRef = Lucene.Net.Util.CharsRef;
+    using Directory = Lucene.Net.Store.Directory;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using FieldInfos = Lucene.Net.Index.FieldInfos;
+    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
+    using IndexOutput = Lucene.Net.Store.IndexOutput;
+    using IOContext = Lucene.Net.Store.IOContext;
+    using IOUtils = Lucene.Net.Util.IOUtils;
+    using UnicodeUtil = Lucene.Net.Util.UnicodeUtil;
+
+    /// <summary>
+    /// this stores a monotonically increasing set of <Term, TermInfo> pairs in a
+    ///  Directory.  A TermInfos can be written once, in order.
+    /// </summary>
+#pragma warning disable 612, 618
+    internal sealed class TermInfosWriter : IDisposable
+    {
+        /// <summary>
+        /// The file format version, a negative number. </summary>
+        public const int FORMAT = -3;
+
+        // Changed strings to true utf8 with length-in-bytes not
+        // length-in-chars
+        public const int FORMAT_VERSION_UTF8_LENGTH_IN_BYTES = -4;
+
+        // NOTE: always change this if you switch to a new format!
+        public const int FORMAT_CURRENT = FORMAT_VERSION_UTF8_LENGTH_IN_BYTES;
+
+        private FieldInfos FieldInfos;
+        private IndexOutput Output;
+        private TermInfo LastTi = new TermInfo();
+        private long Size;
+
+        // TODO: the default values for these two parameters should be settable from
+        // IndexWriter.  However, once that's done, folks will start setting them to
+        // ridiculous values and complaining that things don't work well, as with
+        // mergeFactor.  So, let's wait until a number of folks find that alternate
+        // values work better.  Note that both of these values are stored in the
+        // segment, so that it's safe to change these w/o rebuilding all indexes.
+
+        /// <summary>
+        /// Expert: The fraction of terms in the "dictionary" which should be stored
+        /// in RAM.  Smaller values use more memory, but make searching slightly
+        /// faster, while larger values use less memory and make searching slightly
+        /// slower.  Searching is typically not dominated by dictionary lookup, so
+        /// tweaking this is rarely useful.
+        /// </summary>
+        internal int IndexInterval = 128;
+
+        /// <summary>
+        /// Expert: The fraction of term entries stored in skip tables,
+        /// used to accelerate skipping.  Larger values result in
+        /// smaller indexes, greater acceleration, but fewer accelerable cases, while
+        /// smaller values result in bigger indexes, less acceleration and more
+        /// accelerable cases. More detailed experiments would be useful here.
+        /// </summary>
+        internal int SkipInterval = 16;
+
+        /// <summary>
+        /// Expert: The maximum number of skip levels. Smaller values result in
+        /// slightly smaller indexes, but slower skipping in big posting lists.
+        /// </summary>
+        internal int MaxSkipLevels = 10;
+
+        private long LastIndexPointer;
+        private bool IsIndex;
+        private readonly BytesRef LastTerm = new BytesRef();
+        private int LastFieldNumber = -1;
+
+        private TermInfosWriter Other;
+
+        internal TermInfosWriter(Directory directory, string segment, FieldInfos fis, int interval)
+        {
+            Initialize(directory, segment, fis, interval, false);
+            bool success = false;
+            try
+            {
+                Other = new TermInfosWriter(directory, segment, fis, interval, true);
+                Other.Other = this;
+                success = true;
+            }
+            finally
+            {
+                if (!success)
+                {
+                    IOUtils.CloseWhileHandlingException(Output);
+
+                    try
+                    {
+                        directory.DeleteFile(IndexFileNames.SegmentFileName(segment, "", (IsIndex ? Lucene3xPostingsFormat.TERMS_INDEX_EXTENSION : Lucene3xPostingsFormat.TERMS_EXTENSION)));
+                    }
+#pragma warning disable 168
+                    catch (IOException ignored)
+#pragma warning restore 168
+                    {
+                    }
+                }
+            }
+        }
+
+        private TermInfosWriter(Directory directory, string segment, FieldInfos fis, int interval, bool isIndex)
+        {
+            Initialize(directory, segment, fis, interval, isIndex);
+        }
+
+        private void Initialize(Directory directory, string segment, FieldInfos fis, int interval, bool isi)
+        {
+            IndexInterval = interval;
+            FieldInfos = fis;
+            IsIndex = isi;
+            Output = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", (IsIndex ? Lucene3xPostingsFormat.TERMS_INDEX_EXTENSION : Lucene3xPostingsFormat.TERMS_EXTENSION)), IOContext.DEFAULT);
+            bool success = false;
+            try
+            {
+                Output.WriteInt32(FORMAT_CURRENT); // write format
+                Output.WriteInt64(0); // leave space for size
+                Output.WriteInt32(IndexInterval); // write indexInterval
+                Output.WriteInt32(SkipInterval); // write skipInterval
+                Output.WriteInt32(MaxSkipLevels); // write maxSkipLevels
+                Debug.Assert(InitUTF16Results());
+                success = true;
+            }
+            finally
+            {
+                if (!success)
+                {
+                    IOUtils.CloseWhileHandlingException(Output);
+
+                    try
+                    {
+                        directory.DeleteFile(IndexFileNames.SegmentFileName(segment, "", (IsIndex ? Lucene3xPostingsFormat.TERMS_INDEX_EXTENSION : Lucene3xPostingsFormat.TERMS_EXTENSION)));
+                    }
+#pragma warning disable 168
+                    catch (IOException ignored)
+#pragma warning restore 168
+                    {
+                    }
+                }
+            }
+        }
+
+        // Currently used only by assert statements
+        internal CharsRef Utf16Result1;
+
+        internal CharsRef Utf16Result2;
+        private readonly BytesRef ScratchBytes = new BytesRef();
+
+        // Currently used only by assert statements
+        private bool InitUTF16Results()
+        {
+            Utf16Result1 = new CharsRef(10);
+            Utf16Result2 = new CharsRef(10);
+            return true;
+        }
+
+        /// <summary>
+        /// note: -1 is the empty field: "" !!!! </summary>
+        internal static string FieldName(FieldInfos infos, int fieldNumber)
+        {
+            if (fieldNumber == -1)
+            {
+                return "";
+            }
+            else
+            {
+                return infos.FieldInfo(fieldNumber).Name;
+            }
+        }
+
+        // Currently used only by assert statement
+        private int CompareToLastTerm(int fieldNumber, BytesRef term)
+        {
+            if (LastFieldNumber != fieldNumber)
+            {
+                int cmp = FieldName(FieldInfos, LastFieldNumber).CompareTo(FieldName(FieldInfos, fieldNumber));
+                // If there is a field named "" (empty string) then we
+                // will get 0 on this comparison, yet, it's "OK".  But
+                // it's not OK if two different field numbers map to
+                // the same name.
+                if (cmp != 0 || LastFieldNumber != -1)
+                {
+                    return cmp;
+                }
+            }
+
+            ScratchBytes.CopyBytes(term);
+            Debug.Assert(LastTerm.Offset == 0);
+            UnicodeUtil.UTF8toUTF16(LastTerm.Bytes, 0, LastTerm.Length, Utf16Result1);
+
+            Debug.Assert(ScratchBytes.Offset == 0);
+            UnicodeUtil.UTF8toUTF16(ScratchBytes.Bytes, 0, ScratchBytes.Length, Utf16Result2);
+
+            int len;
+            if (Utf16Result1.Length < Utf16Result2.Length)
+            {
+                len = Utf16Result1.Length;
+            }
+            else
+            {
+                len = Utf16Result2.Length;
+            }
+
+            for (int i = 0; i < len; i++)
+            {
+                char ch1 = Utf16Result1.Chars[i];
+                char ch2 = Utf16Result2.Chars[i];
+                if (ch1 != ch2)
+                {
+                    return ch1 - ch2;
+                }
+            }
+            if (Utf16Result1.Length == 0 && LastFieldNumber == -1)
+            {
+                // If there is a field named "" (empty string) with a term text of "" (empty string) then we
+                // will get 0 on this comparison, yet, it's "OK".
+                return -1;
+            }
+            return Utf16Result1.Length - Utf16Result2.Length;
+        }
+
+        /// <summary>
+        /// Adds a new <<fieldNumber, termBytes>, TermInfo> pair to the set.
+        ///  Term must be lexicographically greater than all previous Terms added.
+        ///  TermInfo pointers must be positive and greater than all previous.
+        /// </summary>
+        public void Add(int fieldNumber, BytesRef term, TermInfo ti)
+        {
+            Debug.Assert(CompareToLastTerm(fieldNumber, term) < 0 || (IsIndex && term.Length == 0 && LastTerm.Length == 0), "Terms are out of order: field=" + FieldName(FieldInfos, fieldNumber) + " (number " + fieldNumber + ")" + " lastField=" + FieldName(FieldInfos, LastFieldNumber) + " (number " + LastFieldNumber + ")" + " text=" + term.Utf8ToString() + " lastText=" + LastTerm.Utf8ToString());
+
+            Debug.Assert(ti.FreqPointer >= LastTi.FreqPointer, "freqPointer out of order (" + ti.FreqPointer + " < " + LastTi.FreqPointer + ")");
+            Debug.Assert(ti.ProxPointer >= LastTi.ProxPointer, "proxPointer out of order (" + ti.ProxPointer + " < " + LastTi.ProxPointer + ")");
+
+            if (!IsIndex && Size % IndexInterval == 0)
+            {
+                Other.Add(LastFieldNumber, LastTerm, LastTi); // add an index term
+            }
+            WriteTerm(fieldNumber, term); // write term
+
+            Output.WriteVInt32(ti.DocFreq); // write doc freq
+            Output.WriteVInt64(ti.FreqPointer - LastTi.FreqPointer); // write pointers
+            Output.WriteVInt64(ti.ProxPointer - LastTi.ProxPointer);
+
+            if (ti.DocFreq >= SkipInterval)
+            {
+                Output.WriteVInt32(ti.SkipOffset);
+            }
+
+            if (IsIndex)
+            {
+                Output.WriteVInt64(Other.Output.FilePointer - LastIndexPointer);
+                LastIndexPointer = Other.Output.FilePointer; // write pointer
+            }
+
+            LastFieldNumber = fieldNumber;
+            LastTi.Set(ti);
+            Size++;
+        }
+
+        private void WriteTerm(int fieldNumber, BytesRef term)
+        {
+            //System.out.println("  tiw.write field=" + fieldNumber + " term=" + term.utf8ToString());
+
+            // TODO: UTF16toUTF8 could tell us this prefix
+            // Compute prefix in common with last term:
+            int start = 0;
+            int limit = term.Length < LastTerm.Length ? term.Length : LastTerm.Length;
+            while (start < limit)
+            {
+                if (term.Bytes[start + term.Offset] != LastTerm.Bytes[start + LastTerm.Offset])
+                {
+                    break;
+                }
+                start++;
+            }
+
+            int length = term.Length - start;
+            Output.WriteVInt32(start); // write shared prefix length
+            Output.WriteVInt32(length); // write delta length
+            Output.WriteBytes(term.Bytes, start + term.Offset, length); // write delta bytes
+            Output.WriteVInt32(fieldNumber); // write field num
+            LastTerm.CopyBytes(term);
+        }
+
+        /// <summary>
+        /// Called to complete TermInfos creation. </summary>
+        public void Dispose()
+        {
+            try
+            {
+                Output.Seek(4); // write size after format
+                Output.WriteInt64(Size);
+            }
+            finally
+            {
+                try
+                {
+                    Output.Dispose();
+                }
+                finally
+                {
+                    if (!IsIndex)
+                    {
+                        Other.Dispose();
+                    }
+                }
+            }
+        }
+    }
+#pragma warning restore 612, 618
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8304ca82/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWCodec.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWCodec.cs b/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWCodec.cs
deleted file mode 100644
index 4d265d9..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWCodec.cs
+++ /dev/null
@@ -1,151 +0,0 @@
-namespace Lucene.Net.Codecs.Lucene3x
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
-
-    /// <summary>
-    /// Writes 3.x-like indexes (not perfect emulation yet) for testing only!
-    /// @lucene.experimental
-    /// </summary>
-#pragma warning disable 612, 618
-    public class PreFlexRWCodec : Lucene3xCodec
-    {
-        private readonly PostingsFormat Postings = new PreFlexRWPostingsFormat();
-        private readonly Lucene3xNormsFormat Norms = new PreFlexRWNormsFormat();
-        private readonly FieldInfosFormat FieldInfos = new PreFlexRWFieldInfosFormat();
-        private readonly TermVectorsFormat TermVectors = new PreFlexRWTermVectorsFormat();
-        private readonly SegmentInfoFormat SegmentInfos = new PreFlexRWSegmentInfoFormat();
-        private readonly StoredFieldsFormat StoredFields = new PreFlexRWStoredFieldsFormat();
-        private readonly bool _oldFormatImpersonationIsActive;
-
-        /// <summary>
-        /// LUCENENET specific
-        /// Creates the codec with OldFormatImpersonationIsActive = true.
-        /// </summary>
-        /// <remarks>
-        /// Added so that SPIClassIterator can locate this Codec.  The iterator
-        /// only recognises classes that have empty constructors.
-        /// </remarks>
-        public PreFlexRWCodec()
-            : this(true)
-        { }
-
-        /// <summary>
-        /// </summary>
-        /// <param name="oldFormatImpersonationIsActive">
-        /// LUCENENET specific
-        /// Added to remove dependency on then-static <see cref="LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE"/>
-        /// </param>
-        public PreFlexRWCodec(bool oldFormatImpersonationIsActive) : base()
-        {
-            _oldFormatImpersonationIsActive = oldFormatImpersonationIsActive;
-        }
-
-        public override PostingsFormat PostingsFormat
-        {
-            get
-            {
-                if (_oldFormatImpersonationIsActive)
-                {
-                    return Postings;
-                }
-                else
-                {
-                    return base.PostingsFormat;
-                }
-            }
-        }
-
-        public override NormsFormat NormsFormat
-        {
-            get
-            {
-                if (_oldFormatImpersonationIsActive)
-                {
-                    return Norms;
-                }
-                else
-                {
-                    return base.NormsFormat;
-                }
-            }
-        }
-
-        public override SegmentInfoFormat SegmentInfoFormat
-        {
-            get
-            {
-                if (_oldFormatImpersonationIsActive)
-                {
-                    return SegmentInfos;
-                }
-                else
-                {
-                    return base.SegmentInfoFormat;
-                }
-            }
-        }
-
-        public override FieldInfosFormat FieldInfosFormat
-        {
-            get
-            {
-                if (_oldFormatImpersonationIsActive)
-                {
-                    return FieldInfos;
-                }
-                else
-                {
-                    return base.FieldInfosFormat;
-                }
-            }
-        }
-
-        public override TermVectorsFormat TermVectorsFormat
-        {
-            get
-            {
-                if (_oldFormatImpersonationIsActive)
-                {
-                    return TermVectors;
-                }
-                else
-                {
-                    return base.TermVectorsFormat;
-                }
-            }
-        }
-
-        public override StoredFieldsFormat StoredFieldsFormat
-        {
-            get
-            {
-                if (_oldFormatImpersonationIsActive)
-                {
-                    return StoredFields;
-                }
-                else
-                {
-                    return base.StoredFieldsFormat;
-                }
-            }
-        }
-    }
-#pragma warning restore 612, 618
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8304ca82/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWFieldInfosFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWFieldInfosFormat.cs b/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWFieldInfosFormat.cs
deleted file mode 100644
index a02fe7f..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWFieldInfosFormat.cs
+++ /dev/null
@@ -1,45 +0,0 @@
-namespace Lucene.Net.Codecs.Lucene3x
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    ///
-    /// <summary>
-    /// @lucene.internal
-    /// @lucene.experimental
-    /// </summary>
-#pragma warning disable 612, 618
-    internal class PreFlexRWFieldInfosFormat : Lucene3xFieldInfosFormat
-    {
-        public override FieldInfosReader FieldInfosReader
-        {
-            get
-            {
-                return new PreFlexRWFieldInfosReader();
-            }
-        }
-
-        public override FieldInfosWriter FieldInfosWriter
-        {
-            get
-            {
-                return new PreFlexRWFieldInfosWriter();
-            }
-        }
-    }
-#pragma warning restore 612, 618
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8304ca82/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWFieldInfosReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWFieldInfosReader.cs b/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWFieldInfosReader.cs
deleted file mode 100644
index 458951e..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWFieldInfosReader.cs
+++ /dev/null
@@ -1,133 +0,0 @@
-namespace Lucene.Net.Codecs.Lucene3x
-{
-    using System.Collections.Generic;
-
-    /*
-         * Licensed to the Apache Software Foundation (ASF) under one or more
-         * contributor license agreements.  See the NOTICE file distributed with
-         * this work for additional information regarding copyright ownership.
-         * The ASF licenses this file to You under the Apache License, Version 2.0
-         * (the "License"); you may not use this file except in compliance with
-         * the License.  You may obtain a copy of the License at
-         *
-         *     http://www.apache.org/licenses/LICENSE-2.0
-         *
-         * Unless required by applicable law or agreed to in writing, software
-         * distributed under the License is distributed on an "AS IS" BASIS,
-         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-         * See the License for the specific language governing permissions and
-         * limitations under the License.
-         */
-
-    using CorruptIndexException = Lucene.Net.Index.CorruptIndexException;
-    using Directory = Lucene.Net.Store.Directory;
-    using DocValuesType = Lucene.Net.Index.DocValuesType;
-    using FieldInfo = Lucene.Net.Index.FieldInfo;
-    using FieldInfos = Lucene.Net.Index.FieldInfos;
-    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
-    using IndexFormatTooNewException = Lucene.Net.Index.IndexFormatTooNewException;
-    using IndexFormatTooOldException = Lucene.Net.Index.IndexFormatTooOldException;
-    using IndexInput = Lucene.Net.Store.IndexInput;
-    using IndexOptions = Lucene.Net.Index.IndexOptions;
-    using IOContext = Lucene.Net.Store.IOContext;
-    using SegmentInfo = Lucene.Net.Index.SegmentInfo;
-
-    /// <summary>
-    /// @lucene.internal
-    /// @lucene.experimental
-    /// </summary>
-    internal class PreFlexRWFieldInfosReader : FieldInfosReader
-    {
-        internal const int FORMAT_MINIMUM = PreFlexRWFieldInfosWriter.FORMAT_START;
-
-        public override FieldInfos Read(Directory directory, string segmentName, string segmentSuffix, IOContext iocontext)
-        {
-            string fileName = IndexFileNames.SegmentFileName(segmentName, "", PreFlexRWFieldInfosWriter.FIELD_INFOS_EXTENSION);
-            IndexInput input = directory.OpenInput(fileName, iocontext);
-
-            try
-            {
-                int format = input.ReadVInt32();
-
-                if (format > FORMAT_MINIMUM)
-                {
-                    throw new IndexFormatTooOldException(input, format, FORMAT_MINIMUM, PreFlexRWFieldInfosWriter.FORMAT_CURRENT);
-                }
-                if (format < PreFlexRWFieldInfosWriter.FORMAT_CURRENT && format != PreFlexRWFieldInfosWriter.FORMAT_PREFLEX_RW)
-                {
-                    throw new IndexFormatTooNewException(input, format, FORMAT_MINIMUM, PreFlexRWFieldInfosWriter.FORMAT_CURRENT);
-                }
-
-                int size = input.ReadVInt32(); //read in the size
-                FieldInfo[] infos = new FieldInfo[size];
-
-                for (int i = 0; i < size; i++)
-                {
-                    string name = input.ReadString();
-                    int fieldNumber = format == PreFlexRWFieldInfosWriter.FORMAT_PREFLEX_RW ? input.ReadInt32() : i;
-                    byte bits = input.ReadByte();
-                    bool isIndexed = (bits & PreFlexRWFieldInfosWriter.IS_INDEXED) != 0;
-                    bool storeTermVector = (bits & PreFlexRWFieldInfosWriter.STORE_TERMVECTOR) != 0;
-                    bool omitNorms = (bits & PreFlexRWFieldInfosWriter.OMIT_NORMS) != 0;
-                    bool storePayloads = (bits & PreFlexRWFieldInfosWriter.STORE_PAYLOADS) != 0;
-                    IndexOptions? indexOptions;
-                    if (!isIndexed)
-                    {
-                        indexOptions = null;
-                    }
-                    else if ((bits & PreFlexRWFieldInfosWriter.OMIT_TERM_FREQ_AND_POSITIONS) != 0)
-                    {
-                        indexOptions = IndexOptions.DOCS_ONLY;
-                    }
-                    else if ((bits & PreFlexRWFieldInfosWriter.OMIT_POSITIONS) != 0)
-                    {
-                        if (format <= PreFlexRWFieldInfosWriter.FORMAT_OMIT_POSITIONS)
-                        {
-                            indexOptions = IndexOptions.DOCS_AND_FREQS;
-                        }
-                        else
-                        {
-                            throw new CorruptIndexException("Corrupt fieldinfos, OMIT_POSITIONS set but format=" + format + " (resource: " + input + ")");
-                        }
-                    }
-                    else
-                    {
-                        indexOptions = IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
-                    }
-
-                    // LUCENE-3027: past indices were able to write
-                    // storePayloads=true when omitTFAP is also true,
-                    // which is invalid.  We correct that, here:
-                    if (indexOptions != IndexOptions.DOCS_AND_FREQS_AND_POSITIONS)
-                    {
-                        storePayloads = false;
-                    }
-
-                    DocValuesType? normType = isIndexed && !omitNorms ? (DocValuesType?)DocValuesType.NUMERIC : null;
-                    if (format == PreFlexRWFieldInfosWriter.FORMAT_PREFLEX_RW && normType != null)
-                    {
-                        // RW can have norms but doesn't write them
-                        normType = input.ReadByte() != 0 ? (DocValuesType?)DocValuesType.NUMERIC : null;
-                    }
-
-                    infos[i] = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector, omitNorms, storePayloads, indexOptions, null, normType, null);
-                }
-
-                if (input.FilePointer != input.Length)
-                {
-                    throw new CorruptIndexException("did not read all bytes from file \"" + fileName + "\": read " + input.FilePointer + " vs size " + input.Length + " (resource: " + input + ")");
-                }
-                return new FieldInfos(infos);
-            }
-            finally
-            {
-                input.Dispose();
-            }
-        }
-
-        public static void Files(Directory dir, SegmentInfo info, ISet<string> files)
-        {
-            files.Add(IndexFileNames.SegmentFileName(info.Name, "", PreFlexRWFieldInfosWriter.FIELD_INFOS_EXTENSION));
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8304ca82/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWFieldInfosWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWFieldInfosWriter.cs b/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWFieldInfosWriter.cs
deleted file mode 100644
index e0fef49..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWFieldInfosWriter.cs
+++ /dev/null
@@ -1,130 +0,0 @@
-using System.Diagnostics;
-
-namespace Lucene.Net.Codecs.Lucene3x
-{
-    using Directory = Lucene.Net.Store.Directory;
-
-    /*
-         * Licensed to the Apache Software Foundation (ASF) under one or more
-         * contributor license agreements.  See the NOTICE file distributed with
-         * this work for additional information regarding copyright ownership.
-         * The ASF licenses this file to You under the Apache License, Version 2.0
-         * (the "License"); you may not use this file except in compliance with
-         * the License.  You may obtain a copy of the License at
-         *
-         *     http://www.apache.org/licenses/LICENSE-2.0
-         *
-         * Unless required by applicable law or agreed to in writing, software
-         * distributed under the License is distributed on an "AS IS" BASIS,
-         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-         * See the License for the specific language governing permissions and
-         * limitations under the License.
-         */
-
-    using FieldInfo = Lucene.Net.Index.FieldInfo;
-    using FieldInfos = Lucene.Net.Index.FieldInfos;
-    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
-    using IndexOptions = Lucene.Net.Index.IndexOptions;
-    using IndexOutput = Lucene.Net.Store.IndexOutput;
-    using IOContext = Lucene.Net.Store.IOContext;
-    using IOUtils = Lucene.Net.Util.IOUtils;
-
-    /// <summary>
-    /// @lucene.internal
-    /// @lucene.experimental
-    /// </summary>
-    internal class PreFlexRWFieldInfosWriter : FieldInfosWriter
-    {
-        // TODO move to test-framework preflex RW?
-
-        /// <summary>
-        /// Extension of field infos </summary>
-        internal const string FIELD_INFOS_EXTENSION = "fnm";
-
-        // First used in 2.9; prior to 2.9 there was no format header
-        internal const int FORMAT_START = -2;
-
-        // First used in 3.4: omit only positional information
-        internal const int FORMAT_OMIT_POSITIONS = -3;
-
-        internal static readonly int FORMAT_PREFLEX_RW = int.MinValue;
-
-        // whenever you add a new format, make it 1 smaller (negative version logic)!
-        internal const int FORMAT_CURRENT = FORMAT_OMIT_POSITIONS;
-
-        internal const sbyte IS_INDEXED = 0x1;
-        internal const sbyte STORE_TERMVECTOR = 0x2;
-        internal const sbyte OMIT_NORMS = 0x10;
-        internal const sbyte STORE_PAYLOADS = 0x20;
-        internal const sbyte OMIT_TERM_FREQ_AND_POSITIONS = 0x40;
-        internal const sbyte OMIT_POSITIONS = -128;
-
-        public override void Write(Directory directory, string segmentName, string segmentSuffix, FieldInfos infos, IOContext context)
-        {
-            string fileName = IndexFileNames.SegmentFileName(segmentName, "", FIELD_INFOS_EXTENSION);
-            IndexOutput output = directory.CreateOutput(fileName, context);
-            bool success = false;
-            try
-            {
-                output.WriteVInt32(FORMAT_PREFLEX_RW);
-                output.WriteVInt32(infos.Count);
-                foreach (FieldInfo fi in infos)
-                {
-                    sbyte bits = 0x0;
-                    if (fi.HasVectors)
-                    {
-                        bits |= STORE_TERMVECTOR;
-                    }
-                    if (fi.OmitsNorms)
-                    {
-                        bits |= OMIT_NORMS;
-                    }
-                    if (fi.HasPayloads)
-                    {
-                        bits |= STORE_PAYLOADS;
-                    }
-                    if (fi.IsIndexed)
-                    {
-                        bits |= IS_INDEXED;
-                        Debug.Assert(fi.IndexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS || !fi.HasPayloads);
-                        if (fi.IndexOptions == IndexOptions.DOCS_ONLY)
-                        {
-                            bits |= OMIT_TERM_FREQ_AND_POSITIONS;
-                        }
-                        else if (fi.IndexOptions == IndexOptions.DOCS_AND_FREQS)
-                        {
-                            bits |= OMIT_POSITIONS;
-                        }
-                    }
-                    output.WriteString(fi.Name);
-                    /*
-                     * we need to write the field number since IW tries
-                     * to stabelize the field numbers across segments so the
-                     * FI ordinal is not necessarily equivalent to the field number
-                     */
-                    output.WriteInt32(fi.Number);
-                    output.WriteByte((byte)bits);
-                    if (fi.IsIndexed && !fi.OmitsNorms)
-                    {
-                        // to allow null norm types we need to indicate if norms are written
-                        // only in RW case
-                        output.WriteByte((byte)(sbyte)(fi.NormType == null ? 0 : 1));
-                    }
-                    Debug.Assert(fi.Attributes == null); // not used or supported
-                }
-                success = true;
-            }
-            finally
-            {
-                if (success)
-                {
-                    output.Dispose();
-                }
-                else
-                {
-                    IOUtils.CloseWhileHandlingException(output);
-                }
-            }
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8304ca82/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWFieldsWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWFieldsWriter.cs b/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWFieldsWriter.cs
deleted file mode 100644
index b0c8174..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWFieldsWriter.cs
+++ /dev/null
@@ -1,269 +0,0 @@
-using System.Collections.Generic;
-using System.Diagnostics;
-
-namespace Lucene.Net.Codecs.Lucene3x
-{
-    using BytesRef = Lucene.Net.Util.BytesRef;
-
-    /*
-         * Licensed to the Apache Software Foundation (ASF) under one or more
-         * contributor license agreements.  See the NOTICE file distributed with
-         * this work for additional information regarding copyright ownership.
-         * The ASF licenses this file to You under the Apache License, Version 2.0
-         * (the "License"); you may not use this file except in compliance with
-         * the License.  You may obtain a copy of the License at
-         *
-         *     http://www.apache.org/licenses/LICENSE-2.0
-         *
-         * Unless required by applicable law or agreed to in writing, software
-         * distributed under the License is distributed on an "AS IS" BASIS,
-         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-         * See the License for the specific language governing permissions and
-         * limitations under the License.
-         */
-
-    using CorruptIndexException = Lucene.Net.Index.CorruptIndexException;
-    using FieldInfo = Lucene.Net.Index.FieldInfo;
-    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
-    using IndexOptions = Lucene.Net.Index.IndexOptions;
-    using IndexOutput = Lucene.Net.Store.IndexOutput;
-    using IOUtils = Lucene.Net.Util.IOUtils;
-    using SegmentWriteState = Lucene.Net.Index.SegmentWriteState;
-
-#pragma warning disable 612, 618
-    internal class PreFlexRWFieldsWriter : FieldsConsumer
-    {
-        private readonly TermInfosWriter TermsOut;
-        private readonly IndexOutput FreqOut;
-        private readonly IndexOutput ProxOut;
-        private readonly PreFlexRWSkipListWriter SkipListWriter;
-        private readonly int TotalNumDocs;
-
-        public PreFlexRWFieldsWriter(SegmentWriteState state)
-        {
-            TermsOut = new TermInfosWriter(state.Directory, state.SegmentInfo.Name, state.FieldInfos, state.TermIndexInterval);
-
-            bool success = false;
-            try
-            {
-                string freqFile = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, "", Lucene3xPostingsFormat.FREQ_EXTENSION);
-                FreqOut = state.Directory.CreateOutput(freqFile, state.Context);
-                TotalNumDocs = state.SegmentInfo.DocCount;
-                success = true;
-            }
-            finally
-            {
-                if (!success)
-                {
-                    IOUtils.CloseWhileHandlingException(TermsOut);
-                }
-            }
-
-            success = false;
-            try
-            {
-                if (state.FieldInfos.HasProx)
-                {
-                    string proxFile = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, "", Lucene3xPostingsFormat.PROX_EXTENSION);
-                    ProxOut = state.Directory.CreateOutput(proxFile, state.Context);
-                }
-                else
-                {
-                    ProxOut = null;
-                }
-                success = true;
-            }
-            finally
-            {
-                if (!success)
-                {
-                    IOUtils.CloseWhileHandlingException(TermsOut, FreqOut);
-                }
-            }
-
-            SkipListWriter = new PreFlexRWSkipListWriter(TermsOut.SkipInterval, TermsOut.MaxSkipLevels, TotalNumDocs, FreqOut, ProxOut);
-            //System.out.println("\nw start seg=" + segment);
-        }
-
-        public override TermsConsumer AddField(FieldInfo field)
-        {
-            Debug.Assert(field.Number != -1);
-            if (field.IndexOptions >= IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS)
-            {
-                throw new System.NotSupportedException("this codec cannot index offsets");
-            }
-            //System.out.println("w field=" + field.Name + " storePayload=" + field.storePayloads + " number=" + field.number);
-            return new PreFlexTermsWriter(this, field);
-        }
-
-        public override void Dispose()
-        {
-            IOUtils.Close(TermsOut, FreqOut, ProxOut);
-        }
-
-        private class PreFlexTermsWriter : TermsConsumer
-        {
-            internal virtual void InitializeInstanceFields()
-            {
-                postingsWriter = new PostingsWriter(this);
-            }
-
-            private readonly PreFlexRWFieldsWriter OuterInstance;
-
-            internal readonly FieldInfo FieldInfo;
-            internal readonly bool OmitTF;
-            internal readonly bool StorePayloads;
-
-            internal readonly TermInfo TermInfo = new TermInfo();
-            internal PostingsWriter postingsWriter;
-
-            public PreFlexTermsWriter(PreFlexRWFieldsWriter outerInstance, FieldInfo fieldInfo)
-            {
-                this.OuterInstance = outerInstance;
-
-                InitializeInstanceFields();
-                this.FieldInfo = fieldInfo;
-                OmitTF = fieldInfo.IndexOptions == IndexOptions.DOCS_ONLY;
-                StorePayloads = fieldInfo.HasPayloads;
-            }
-
-            internal class PostingsWriter : PostingsConsumer
-            {
-                private readonly PreFlexRWFieldsWriter.PreFlexTermsWriter OuterInstance;
-
-                public PostingsWriter(PreFlexRWFieldsWriter.PreFlexTermsWriter outerInstance)
-                {
-                    this.OuterInstance = outerInstance;
-                }
-
-                internal int LastDocID;
-                internal int LastPayloadLength = -1;
-                internal int LastPosition;
-                internal int Df;
-
-                public PostingsWriter Reset()
-                {
-                    Df = 0;
-                    LastDocID = 0;
-                    LastPayloadLength = -1;
-                    return this;
-                }
-
-                public override void StartDoc(int docID, int termDocFreq)
-                {
-                    //System.out.println("    w doc=" + docID);
-
-                    int delta = docID - LastDocID;
-                    if (docID < 0 || (Df > 0 && delta <= 0))
-                    {
-                        throw new CorruptIndexException("docs out of order (" + docID + " <= " + LastDocID + " )");
-                    }
-
-                    if ((++Df % OuterInstance.OuterInstance.TermsOut.SkipInterval) == 0)
-                    {
-                        OuterInstance.OuterInstance.SkipListWriter.SetSkipData(LastDocID, OuterInstance.StorePayloads, LastPayloadLength);
-                        OuterInstance.OuterInstance.SkipListWriter.BufferSkip(Df);
-                    }
-
-                    LastDocID = docID;
-
-                    Debug.Assert(docID < OuterInstance.OuterInstance.TotalNumDocs, "docID=" + docID + " totalNumDocs=" + OuterInstance.OuterInstance.TotalNumDocs);
-
-                    if (OuterInstance.OmitTF)
-                    {
-                        OuterInstance.OuterInstance.FreqOut.WriteVInt32(delta);
-                    }
-                    else
-                    {
-                        int code = delta << 1;
-                        if (termDocFreq == 1)
-                        {
-                            OuterInstance.OuterInstance.FreqOut.WriteVInt32(code | 1);
-                        }
-                        else
-                        {
-                            OuterInstance.OuterInstance.FreqOut.WriteVInt32(code);
-                            OuterInstance.OuterInstance.FreqOut.WriteVInt32(termDocFreq);
-                        }
-                    }
-                    LastPosition = 0;
-                }
-
-                public override void AddPosition(int position, BytesRef payload, int startOffset, int endOffset)
-                {
-                    Debug.Assert(OuterInstance.OuterInstance.ProxOut != null);
-                    Debug.Assert(startOffset == -1);
-                    Debug.Assert(endOffset == -1);
-                    //System.out.println("      w pos=" + position + " payl=" + payload);
-                    int delta = position - LastPosition;
-                    LastPosition = position;
-
-                    if (OuterInstance.StorePayloads)
-                    {
-                        int payloadLength = payload == null ? 0 : payload.Length;
-                        if (payloadLength != LastPayloadLength)
-                        {
-                            //System.out.println("        write payload len=" + payloadLength);
-                            LastPayloadLength = payloadLength;
-                            OuterInstance.OuterInstance.ProxOut.WriteVInt32((delta << 1) | 1);
-                            OuterInstance.OuterInstance.ProxOut.WriteVInt32(payloadLength);
-                        }
-                        else
-                        {
-                            OuterInstance.OuterInstance.ProxOut.WriteVInt32(delta << 1);
-                        }
-                        if (payloadLength > 0)
-                        {
-                            OuterInstance.OuterInstance.ProxOut.WriteBytes(payload.Bytes, payload.Offset, payload.Length);
-                        }
-                    }
-                    else
-                    {
-                        OuterInstance.OuterInstance.ProxOut.WriteVInt32(delta);
-                    }
-                }
-
-                public override void FinishDoc()
-                {
-                }
-            }
-
-            public override PostingsConsumer StartTerm(BytesRef text)
-            {
-                //System.out.println("  w term=" + text.utf8ToString());
-                OuterInstance.SkipListWriter.ResetSkip();
-                TermInfo.FreqPointer = OuterInstance.FreqOut.FilePointer;
-                if (OuterInstance.ProxOut != null)
-                {
-                    TermInfo.ProxPointer = OuterInstance.ProxOut.FilePointer;
-                }
-                return postingsWriter.Reset();
-            }
-
-            public override void FinishTerm(BytesRef text, TermStats stats)
-            {
-                if (stats.DocFreq > 0)
-                {
-                    long skipPointer = OuterInstance.SkipListWriter.WriteSkip(OuterInstance.FreqOut);
-                    TermInfo.DocFreq = stats.DocFreq;
-                    TermInfo.SkipOffset = (int)(skipPointer - TermInfo.FreqPointer);
-                    //System.out.println("  w finish term=" + text.utf8ToString() + " fnum=" + fieldInfo.number);
-                    OuterInstance.TermsOut.Add(FieldInfo.Number, text, TermInfo);
-                }
-            }
-
-            public override void Finish(long sumTotalTermCount, long sumDocFreq, int docCount)
-            {
-            }
-
-            public override IComparer<BytesRef> Comparer
-            {
-                get
-                {
-                    return BytesRef.UTF8SortedAsUTF16Comparer;
-                }
-            }
-        }
-    }
-#pragma warning restore 612, 618
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8304ca82/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWNormsConsumer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWNormsConsumer.cs b/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWNormsConsumer.cs
deleted file mode 100644
index 2a91121..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWNormsConsumer.cs
+++ /dev/null
@@ -1,116 +0,0 @@
-using System;
-using System.Diagnostics;
-
-namespace Lucene.Net.Codecs.Lucene3x
-{
-    using System.Collections.Generic;
-    using BytesRef = Lucene.Net.Util.BytesRef;
-    using Directory = Lucene.Net.Store.Directory;
-
-    /*
-         * Licensed to the Apache Software Foundation (ASF) under one or more
-         * contributor license agreements.  See the NOTICE file distributed with
-         * this work for additional information regarding copyright ownership.
-         * The ASF licenses this file to You under the Apache License, Version 2.0
-         * (the "License"); you may not use this file except in compliance with
-         * the License.  You may obtain a copy of the License at
-         *
-         *     http://www.apache.org/licenses/LICENSE-2.0
-         *
-         * Unless required by applicable law or agreed to in writing, software
-         * distributed under the License is distributed on an "AS IS" BASIS,
-         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-         * See the License for the specific language governing permissions and
-         * limitations under the License.
-         */
-
-    using FieldInfo = Lucene.Net.Index.FieldInfo;
-    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
-    using IndexOutput = Lucene.Net.Store.IndexOutput;
-    using IOContext = Lucene.Net.Store.IOContext;
-    using IOUtils = Lucene.Net.Util.IOUtils;
-
-    /// <summary>
-    /// Writes and Merges Lucene 3.x norms format
-    /// @lucene.experimental
-    /// </summary>
-    internal class PreFlexRWNormsConsumer : DocValuesConsumer
-    {
-        /// <summary>
-        /// norms header placeholder </summary>
-        private static readonly sbyte[] NORMS_HEADER = new sbyte[] { (sbyte)'N', (sbyte)'R', (sbyte)'M', -1 };
-
-        /// <summary>
-        /// Extension of norms file </summary>
-        private const string NORMS_EXTENSION = "nrm";
-
-        /// <summary>
-        /// Extension of separate norms file </summary>
-        /// @deprecated Only for reading existing 3.x indexes
-        [Obsolete("Only for reading existing 3.x indexes")]
-        private const string SEPARATE_NORMS_EXTENSION = "s";
-
-        private readonly IndexOutput @out;
-        private int LastFieldNumber = -1; // only for assert
-
-        public PreFlexRWNormsConsumer(Directory directory, string segment, IOContext context)
-        {
-            string normsFileName = IndexFileNames.SegmentFileName(segment, "", NORMS_EXTENSION);
-            bool success = false;
-            IndexOutput output = null;
-            try
-            {
-                output = directory.CreateOutput(normsFileName, context);
-                // output.WriteBytes(NORMS_HEADER, 0, NORMS_HEADER.Length);
-                foreach (var @sbyte in NORMS_HEADER)
-                {
-                    output.WriteByte((byte)@sbyte);
-                }
-                @out = output;
-                success = true;
-            }
-            finally
-            {
-                if (!success)
-                {
-                    IOUtils.CloseWhileHandlingException(output);
-                }
-            }
-        }
-
-        public override void AddNumericField(FieldInfo field, IEnumerable<long?> values)
-        {
-            Debug.Assert(field.Number > LastFieldNumber, "writing norms fields out of order" + LastFieldNumber + " -> " + field.Number);
-            foreach (var n in values)
-            {
-                if (((sbyte)(byte)(long)n) < sbyte.MinValue || ((sbyte)(byte)(long)n) > sbyte.MaxValue)
-                {
-                    throw new System.NotSupportedException("3.x cannot index norms that won't fit in a byte, got: " + ((sbyte)(byte)(long)n));
-                }
-                @out.WriteByte((byte)(sbyte)n);
-            }
-            LastFieldNumber = field.Number;
-        }
-
-        protected override void Dispose(bool disposing)
-        {
-            if (disposing)
-                IOUtils.Close(@out);
-        }
-
-        public override void AddBinaryField(FieldInfo field, IEnumerable<BytesRef> values)
-        {
-            throw new InvalidOperationException();
-        }
-
-        public override void AddSortedField(FieldInfo field, IEnumerable<BytesRef> values, IEnumerable<long?> docToOrd)
-        {
-            throw new InvalidOperationException();
-        }
-
-        public override void AddSortedSetField(FieldInfo field, IEnumerable<BytesRef> values, IEnumerable<long?> docToOrdCount, IEnumerable<long?> ords)
-        {
-            throw new InvalidOperationException();
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8304ca82/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWNormsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWNormsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWNormsFormat.cs
deleted file mode 100644
index d85d5d3..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWNormsFormat.cs
+++ /dev/null
@@ -1,35 +0,0 @@
-namespace Lucene.Net.Codecs.Lucene3x
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    using SegmentWriteState = Lucene.Net.Index.SegmentWriteState;
-
-    /// <summary>
-    /// @lucene.internal
-    /// @lucene.experimental
-    /// </summary>
-#pragma warning disable 612, 618
-    internal class PreFlexRWNormsFormat : Lucene3xNormsFormat
-    {
-        public override DocValuesConsumer NormsConsumer(SegmentWriteState state)
-        {
-            return new PreFlexRWNormsConsumer(state.Directory, state.SegmentInfo.Name, state.Context);
-        }
-    }
-#pragma warning restore 612, 618
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8304ca82/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWPostingsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWPostingsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWPostingsFormat.cs
deleted file mode 100644
index 962d95c..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWPostingsFormat.cs
+++ /dev/null
@@ -1,87 +0,0 @@
-using System;
-using System.Reflection;
-using System.Diagnostics;
-
-namespace Lucene.Net.Codecs.Lucene3x
-{
-    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
-    using SegmentReadState = Lucene.Net.Index.SegmentReadState;
-
-    /*
-         * Licensed to the Apache Software Foundation (ASF) under one or more
-         * contributor license agreements.  See the NOTICE file distributed with
-         * this work for additional information regarding copyright ownership.
-         * The ASF licenses this file to You under the Apache License, Version 2.0
-         * (the "License"); you may not use this file except in compliance with
-         * the License.  You may obtain a copy of the License at
-         *
-         *     http://www.apache.org/licenses/LICENSE-2.0
-         *
-         * Unless required by applicable law or agreed to in writing, software
-         * distributed under the License is distributed on an "AS IS" BASIS,
-         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-         * See the License for the specific language governing permissions and
-         * limitations under the License.
-         */
-
-    using SegmentWriteState = Lucene.Net.Index.SegmentWriteState;
-
-    /// <summary>
-    /// Codec, only for testing, that can write and read the
-    ///  pre-flex index format.
-    ///
-    /// @lucene.experimental
-    /// </summary>
-#pragma warning disable 612, 618
-    internal class PreFlexRWPostingsFormat : Lucene3xPostingsFormat
-    {
-        public PreFlexRWPostingsFormat()
-        {
-            // NOTE: we impersonate the PreFlex codec so that it can
-            // read the segments we write!
-        }
-
-        public override FieldsConsumer FieldsConsumer(SegmentWriteState state)
-        {
-            return new PreFlexRWFieldsWriter(state);
-        }
-
-        public override FieldsProducer FieldsProducer(SegmentReadState state)
-        {
-            // Whenever IW opens readers, eg for merging, we have to
-            // keep terms order in UTF16:
-
-            return new Lucene3xFieldsAnonymousInnerClassHelper(this, state.Directory, state.FieldInfos, state.SegmentInfo, state.Context, state.TermsIndexDivisor);
-        }
-
-        private class Lucene3xFieldsAnonymousInnerClassHelper : Lucene3xFields
-        {
-            private readonly PreFlexRWPostingsFormat OuterInstance;
-
-            public Lucene3xFieldsAnonymousInnerClassHelper(PreFlexRWPostingsFormat outerInstance, Store.Directory directory, Index.FieldInfos fieldInfos, Index.SegmentInfo segmentInfo, Store.IOContext context, int termsIndexDivisor)
-                : base(directory, fieldInfos, segmentInfo, context, termsIndexDivisor)
-            {
-                this.OuterInstance = outerInstance;
-            }
-
-            protected internal override bool SortTermsByUnicode()
-            {
-                // We carefully peek into stack track above us: if
-                // we are part of a "merge", we must sort by UTF16:
-                bool unicodeSortOrder = true;
-
-                if(Util.StackTraceHelper.DoesStackTraceContainMethod("Merge"))
-                {
-                       unicodeSortOrder = false;
-                        if (LuceneTestCase.VERBOSE)
-                        {
-                            Console.WriteLine("NOTE: PreFlexRW codec: forcing legacy UTF16 term sort order");
-                        }
-                }
-
-                return unicodeSortOrder;
-            }
-        }
-    }
-#pragma warning restore 612, 618
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8304ca82/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWSegmentInfoFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWSegmentInfoFormat.cs b/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWSegmentInfoFormat.cs
deleted file mode 100644
index 86d7e4d..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWSegmentInfoFormat.cs
+++ /dev/null
@@ -1,37 +0,0 @@
-namespace Lucene.Net.Codecs.Lucene3x
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    /// <summary>
-    /// @lucene.experimental
-    /// </summary>
-#pragma warning disable 612, 618
-    internal class PreFlexRWSegmentInfoFormat : Lucene3xSegmentInfoFormat
-    {
-        private readonly SegmentInfoWriter Writer = new PreFlexRWSegmentInfoWriter();
-
-        public override SegmentInfoWriter SegmentInfoWriter
-        {
-            get
-            {
-                return Writer;
-            }
-        }
-    }
-#pragma warning restore 612, 618
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8304ca82/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWSegmentInfoWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWSegmentInfoWriter.cs b/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWSegmentInfoWriter.cs
deleted file mode 100644
index 3019c51..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWSegmentInfoWriter.cs
+++ /dev/null
@@ -1,47 +0,0 @@
-namespace Lucene.Net.Codecs.Lucene3x
-{
-    using Directory = Lucene.Net.Store.Directory;
-
-    /*
-         * Licensed to the Apache Software Foundation (ASF) under one or more
-         * contributor license agreements.  See the NOTICE file distributed with
-         * this work for additional information regarding copyright ownership.
-         * The ASF licenses this file to You under the Apache License, Version 2.0
-         * (the "License"); you may not use this file except in compliance with
-         * the License.  You may obtain a copy of the License at
-         *
-         *     http://www.apache.org/licenses/LICENSE-2.0
-         *
-         * Unless required by applicable law or agreed to in writing, software
-         * distributed under the License is distributed on an "AS IS" BASIS,
-         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-         * See the License for the specific language governing permissions and
-         * limitations under the License.
-         */
-
-    using FieldInfos = Lucene.Net.Index.FieldInfos;
-    using IOContext = Lucene.Net.Store.IOContext;
-    using SegmentInfo = Lucene.Net.Index.SegmentInfo;
-    using SegmentInfos = Lucene.Net.Index.SegmentInfos;
-
-    /// <summary>
-    /// PreFlex implementation of <seealso cref="SegmentInfoWriter"/>.
-    /// @lucene.experimental
-    /// </summary>
-#pragma warning disable 612, 618
-    internal class PreFlexRWSegmentInfoWriter : SegmentInfoWriter
-    {
-        // NOTE: this is not "really" 3.x format, because we are
-        // writing each SI to its own file, vs 3.x where the list
-        // of segments and SI for each segment is written into a
-        // single segments_N file
-
-        /// <summary>
-        /// Save a single segment's info. </summary>
-        public override void Write(Directory dir, SegmentInfo si, FieldInfos fis, IOContext ioContext)
-        {
-            SegmentInfos.Write3xInfo(dir, si, ioContext);
-        }
-    }
-#pragma warning restore 612, 618
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8304ca82/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWSkipListWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWSkipListWriter.cs b/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWSkipListWriter.cs
deleted file mode 100644
index 0ce2d24..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWSkipListWriter.cs
+++ /dev/null
@@ -1,138 +0,0 @@
-namespace Lucene.Net.Codecs.Lucene3x
-{
-    using Lucene.Net.Support;
-
-    /*
-         * Licensed to the Apache Software Foundation (ASF) under one or more
-         * contributor license agreements.  See the NOTICE file distributed with
-         * this work for additional information regarding copyright ownership.
-         * The ASF licenses this file to You under the Apache License, Version 2.0
-         * (the "License"); you may not use this file except in compliance with
-         * the License.  You may obtain a copy of the License at
-         *
-         *     http://www.apache.org/licenses/LICENSE-2.0
-         *
-         * Unless required by applicable law or agreed to in writing, software
-         * distributed under the License is distributed on an "AS IS" BASIS,
-         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-         * See the License for the specific language governing permissions and
-         * limitations under the License.
-         */
-
-    using IndexOutput = Lucene.Net.Store.IndexOutput;
-
-    /// <summary>
-    /// PreFlexRW skiplist implementation.
-    /// @lucene.experimental
-    /// </summary>
-    public class PreFlexRWSkipListWriter : MultiLevelSkipListWriter
-    {
-        private int[] LastSkipDoc;
-        private int[] LastSkipPayloadLength;
-        private long[] LastSkipFreqPointer;
-        private long[] LastSkipProxPointer;
-
-        private IndexOutput FreqOutput;
-        private IndexOutput ProxOutput;
-
-        private int CurDoc;
-        private bool CurStorePayloads;
-        private int CurPayloadLength;
-        private long CurFreqPointer;
-        private long CurProxPointer;
-
-        public PreFlexRWSkipListWriter(int skipInterval, int numberOfSkipLevels, int docCount, IndexOutput freqOutput, IndexOutput proxOutput)
-            : base(skipInterval, numberOfSkipLevels, docCount)
-        {
-            this.FreqOutput = freqOutput;
-            this.ProxOutput = proxOutput;
-
-            LastSkipDoc = new int[numberOfSkipLevels];
-            LastSkipPayloadLength = new int[numberOfSkipLevels];
-            LastSkipFreqPointer = new long[numberOfSkipLevels];
-            LastSkipProxPointer = new long[numberOfSkipLevels];
-        }
-
-        /// <summary>
-        /// Sets the values for the current skip data.
-        /// </summary>
-        public virtual void SetSkipData(int doc, bool storePayloads, int payloadLength)
-        {
-            this.CurDoc = doc;
-            this.CurStorePayloads = storePayloads;
-            this.CurPayloadLength = payloadLength;
-            this.CurFreqPointer = FreqOutput.FilePointer;
-            if (ProxOutput != null)
-            {
-                this.CurProxPointer = ProxOutput.FilePointer;
-            }
-        }
-
-        public override void ResetSkip()
-        {
-            base.ResetSkip();
-            Arrays.Fill(LastSkipDoc, 0);
-            Arrays.Fill(LastSkipPayloadLength, -1); // we don't have to write the first length in the skip list
-            Arrays.Fill(LastSkipFreqPointer, FreqOutput.FilePointer);
-            if (ProxOutput != null)
-            {
-                Arrays.Fill(LastSkipProxPointer, ProxOutput.FilePointer);
-            }
-        }
-
-        protected override void WriteSkipData(int level, IndexOutput skipBuffer)
-        {
-            // To efficiently store payloads in the posting lists we do not store the length of
-            // every payload. Instead we omit the length for a payload if the previous payload had
-            // the same length.
-            // However, in order to support skipping the payload length at every skip point must be known.
-            // So we use the same length encoding that we use for the posting lists for the skip data as well:
-            // Case 1: current field does not store payloads
-            //           SkipDatum                 --> DocSkip, FreqSkip, ProxSkip
-            //           DocSkip,FreqSkip,ProxSkip --> VInt
-            //           DocSkip records the document number before every SkipInterval th  document in TermFreqs.
-            //           Document numbers are represented as differences from the previous value in the sequence.
-            // Case 2: current field stores payloads
-            //           SkipDatum                 --> DocSkip, PayloadLength?, FreqSkip,ProxSkip
-            //           DocSkip,FreqSkip,ProxSkip --> VInt
-            //           PayloadLength             --> VInt
-            //         In this case DocSkip/2 is the difference between
-            //         the current and the previous value. If DocSkip
-            //         is odd, then a PayloadLength encoded as VInt follows,
-            //         if DocSkip is even, then it is assumed that the
-            //         current payload length equals the length at the previous
-            //         skip point
-            if (CurStorePayloads)
-            {
-                int delta = CurDoc - LastSkipDoc[level];
-                if (CurPayloadLength == LastSkipPayloadLength[level])
-                {
-                    // the current payload length equals the length at the previous skip point,
-                    // so we don't store the length again
-                    skipBuffer.WriteVInt32(delta * 2);
-                }
-                else
-                {
-                    // the payload length is different from the previous one. We shift the DocSkip,
-                    // set the lowest bit and store the current payload length as VInt.
-                    skipBuffer.WriteVInt32(delta * 2 + 1);
-                    skipBuffer.WriteVInt32(CurPayloadLength);
-                    LastSkipPayloadLength[level] = CurPayloadLength;
-                }
-            }
-            else
-            {
-                // current field does not store payloads
-                skipBuffer.WriteVInt32(CurDoc - LastSkipDoc[level]);
-            }
-
-            skipBuffer.WriteVInt32((int)(CurFreqPointer - LastSkipFreqPointer[level]));
-            skipBuffer.WriteVInt32((int)(CurProxPointer - LastSkipProxPointer[level]));
-
-            LastSkipDoc[level] = CurDoc;
-
-            LastSkipFreqPointer[level] = CurFreqPointer;
-            LastSkipProxPointer[level] = CurProxPointer;
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8304ca82/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWStoredFieldsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWStoredFieldsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWStoredFieldsFormat.cs
deleted file mode 100644
index 63ffc4a..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWStoredFieldsFormat.cs
+++ /dev/null
@@ -1,34 +0,0 @@
-namespace Lucene.Net.Codecs.Lucene3x
-{
-    using Directory = Lucene.Net.Store.Directory;
-    using IOContext = Lucene.Net.Store.IOContext;
-
-    /*
-         * Licensed to the Apache Software Foundation (ASF) under one or more
-         * contributor license agreements.  See the NOTICE file distributed with
-         * this work for additional information regarding copyright ownership.
-         * The ASF licenses this file to You under the Apache License, Version 2.0
-         * (the "License"); you may not use this file except in compliance with
-         * the License.  You may obtain a copy of the License at
-         *
-         *     http://www.apache.org/licenses/LICENSE-2.0
-         *
-         * Unless required by applicable law or agreed to in writing, software
-         * distributed under the License is distributed on an "AS IS" BASIS,
-         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-         * See the License for the specific language governing permissions and
-         * limitations under the License.
-         */
-
-    using SegmentInfo = Lucene.Net.Index.SegmentInfo;
-
-#pragma warning disable 612, 618
-    internal class PreFlexRWStoredFieldsFormat : Lucene3xStoredFieldsFormat
-    {
-        public override StoredFieldsWriter FieldsWriter(Directory directory, SegmentInfo segmentInfo, IOContext context)
-        {
-            return new PreFlexRWStoredFieldsWriter(directory, segmentInfo.Name, context);
-        }
-    }
-#pragma warning restore 612, 618
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8304ca82/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWStoredFieldsWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWStoredFieldsWriter.cs b/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWStoredFieldsWriter.cs
deleted file mode 100644
index 628564a..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWStoredFieldsWriter.cs
+++ /dev/null
@@ -1,214 +0,0 @@
-using System;
-using System.Diagnostics;
-
-namespace Lucene.Net.Codecs.Lucene3x
-{
-    using Lucene.Net.Support;
-    using BytesRef = Lucene.Net.Util.BytesRef;
-    using Directory = Lucene.Net.Store.Directory;
-
-    /// <summary>
-    /// Copyright 2004 The Apache Software Foundation
-    ///
-    /// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-    /// use this file except in compliance with the License. You may obtain a copy of
-    /// the License at
-    ///
-    /// http://www.apache.org/licenses/LICENSE-2.0
-    ///
-    /// Unless required by applicable law or agreed to in writing, software
-    /// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-    /// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-    /// License for the specific language governing permissions and limitations under
-    /// the License.
-    /// </summary>
-
-    using FieldInfo = Lucene.Net.Index.FieldInfo;
-    using FieldInfos = Lucene.Net.Index.FieldInfos;
-    using IIndexableField = Lucene.Net.Index.IIndexableField;
-    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
-    using IndexOutput = Lucene.Net.Store.IndexOutput;
-    using IOContext = Lucene.Net.Store.IOContext;
-    using IOUtils = Lucene.Net.Util.IOUtils;
-
-    /// <summary>
-    /// @lucene.experimental </summary>
-#pragma warning disable 612, 618
-    internal sealed class PreFlexRWStoredFieldsWriter : StoredFieldsWriter
-    {
-        private readonly Directory Directory;
-        private readonly string Segment;
-        private IndexOutput FieldsStream;
-        private IndexOutput IndexStream;
-
-        public PreFlexRWStoredFieldsWriter(Directory directory, string segment, IOContext context)
-        {
-            Debug.Assert(directory != null);
-            this.Directory = directory;
-            this.Segment = segment;
-
-            bool success = false;
-            try
-            {
-                FieldsStream = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", Lucene3xStoredFieldsReader.FIELDS_EXTENSION), context);
-                IndexStream = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", Lucene3xStoredFieldsReader.FIELDS_INDEX_EXTENSION), context);
-
-                FieldsStream.WriteInt32(Lucene3xStoredFieldsReader.FORMAT_CURRENT);
-                IndexStream.WriteInt32(Lucene3xStoredFieldsReader.FORMAT_CURRENT);
-
-                success = true;
-            }
-            finally
-            {
-                if (!success)
-                {
-                    Abort();
-                }
-            }
-        }
-
-        // Writes the contents of buffer into the fields stream
-        // and adds a new entry for this document into the index
-        // stream.  this assumes the buffer was already written
-        // in the correct fields format.
-        public override void StartDocument(int numStoredFields)
-        {
-            IndexStream.WriteInt64(FieldsStream.FilePointer);
-            FieldsStream.WriteVInt32(numStoredFields);
-        }
-
-        protected override void Dispose(bool disposing)
-        {
-            if (disposing)
-            {
-                try
-                {
-                    IOUtils.Close(FieldsStream, IndexStream);
-                }
-                finally
-                {
-                    FieldsStream = IndexStream = null;
-                }
-            }
-        }
-
-        public override void Abort()
-        {
-            try
-            {
-                Dispose();
-            }
-#pragma warning disable 168
-            catch (Exception ignored)
-#pragma warning restore 168
-            {
-            }
-            IOUtils.DeleteFilesIgnoringExceptions(Directory, IndexFileNames.SegmentFileName(Segment, "", Lucene3xStoredFieldsReader.FIELDS_EXTENSION), IndexFileNames.SegmentFileName(Segment, "", Lucene3xStoredFieldsReader.FIELDS_INDEX_EXTENSION));
-        }
-
-        public override void WriteField(FieldInfo info, IIndexableField field)
-        {
-            FieldsStream.WriteVInt32(info.Number);
-            int bits = 0;
-            BytesRef bytes;
-            string @string;
-            // TODO: maybe a field should serialize itself?
-            // this way we don't bake into indexer all these
-            // specific encodings for different fields?  and apps
-            // can customize...
-
-            object number = field.GetNumericValue();
-            if (number != null)
-            {
-                if (number is sbyte? || number is short? || number is int?)
-                {
-                    bits |= Lucene3xStoredFieldsReader.FIELD_IS_NUMERIC_INT;
-                }
-                else if (number is long?)
-                {
-                    bits |= Lucene3xStoredFieldsReader.FIELD_IS_NUMERIC_LONG;
-                }
-                else if (number is float?)
-                {
-                    bits |= Lucene3xStoredFieldsReader.FIELD_IS_NUMERIC_FLOAT;
-                }
-                else if (number is double?)
-                {
-                    bits |= Lucene3xStoredFieldsReader.FIELD_IS_NUMERIC_DOUBLE;
-                }
-                else
-                {
-                    throw new System.ArgumentException("cannot store numeric type " + number.GetType());
-                }
-                @string = null;
-                bytes = null;
-            }
-            else
-            {
-                bytes = field.GetBinaryValue();
-                if (bytes != null)
-                {
-                    bits |= Lucene3xStoredFieldsReader.FIELD_IS_BINARY;
-                    @string = null;
-                }
-                else
-                {
-                    @string = field.GetStringValue();
-                    if (@string == null)
-                    {
-                        throw new System.ArgumentException("field " + field.Name + " is stored but does not have binaryValue, stringValue nor numericValue");
-                    }
-                }
-            }
-
-            FieldsStream.WriteByte((byte)(sbyte)bits);
-
-            if (bytes != null)
-            {
-                FieldsStream.WriteVInt32(bytes.Length);
-                FieldsStream.WriteBytes(bytes.Bytes, bytes.Offset, bytes.Length);
-            }
-            else if (@string != null)
-            {
-                FieldsStream.WriteString(field.GetStringValue());
-            }
-            else
-            {
-                if (number is sbyte? || number is short? || number is int?)
-                {
-                    FieldsStream.WriteInt32((int)number);
-                }
-                else if (number is long?)
-                {
-                    FieldsStream.WriteInt64((long)number);
-                }
-                else if (number is float?)
-                {
-                    FieldsStream.WriteInt32(Number.SingleToInt32Bits((float)number));
-                }
-                else if (number is double?)
-                {
-                    FieldsStream.WriteInt64(BitConverter.DoubleToInt64Bits((double)number));
-                }
-                else
-                {
-                    Debug.Assert(false);
-                }
-            }
-        }
-
-        public override void Finish(FieldInfos fis, int numDocs)
-        {
-            if (4 + ((long)numDocs) * 8 != IndexStream.FilePointer)
-            // this is most likely a bug in Sun JRE 1.6.0_04/_05;
-            // we detect that the bug has struck, here, and
-            // throw an exception to prevent the corruption from
-            // entering the index.  See LUCENE-1282 for
-            // details.
-            {
-                throw new Exception("fdx size mismatch: docCount is " + numDocs + " but fdx file size is " + IndexStream.FilePointer + " file=" + IndexStream.ToString() + "; now aborting this merge to prevent index corruption");
-            }
-        }
-    }
-#pragma warning restore 612, 618
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/8304ca82/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWTermVectorsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWTermVectorsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWTermVectorsFormat.cs
deleted file mode 100644
index 871ee07..0000000
--- a/src/Lucene.Net.TestFramework/Codecs/lucene3x/PreFlexRWTermVectorsFormat.cs
+++ /dev/null
@@ -1,74 +0,0 @@
-using System;
-using System.Diagnostics;
-
-namespace Lucene.Net.Codecs.Lucene3x
-{
-    using Directory = Lucene.Net.Store.Directory;
-
-    /*
-         * Licensed to the Apache Software Foundation (ASF) under one or more
-         * contributor license agreements.  See the NOTICE file distributed with
-         * this work for additional information regarding copyright ownership.
-         * The ASF licenses this file to You under the Apache License, Version 2.0
-         * (the "License"); you may not use this file except in compliance with
-         * the License.  You may obtain a copy of the License at
-         *
-         *     http://www.apache.org/licenses/LICENSE-2.0
-         *
-         * Unless required by applicable law or agreed to in writing, software
-         * distributed under the License is distributed on an "AS IS" BASIS,
-         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-         * See the License for the specific language governing permissions and
-         * limitations under the License.
-         */
-
-    using FieldInfos = Lucene.Net.Index.FieldInfos;
-    using IOContext = Lucene.Net.Store.IOContext;
-    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
-    using SegmentInfo = Lucene.Net.Index.SegmentInfo;
-
-#pragma warning disable 612, 618
-    internal class PreFlexRWTermVectorsFormat : Lucene3xTermVectorsFormat
-    {
-        public override TermVectorsWriter VectorsWriter(Directory directory, SegmentInfo segmentInfo, IOContext context)
-        {
-            return new PreFlexRWTermVectorsWriter(directory, segmentInfo.Name, context);
-        }
-
-        public override TermVectorsReader VectorsReader(Directory directory, SegmentInfo segmentInfo, FieldInfos fieldInfos, IOContext context)
-        {
-            return new Lucene3xTermVectorsReaderAnonymousInnerClassHelper(this, directory, segmentInfo, fieldInfos, context);
-        }
-
-        private class Lucene3xTermVectorsReaderAnonymousInnerClassHelper : Lucene3xTermVectorsReader
-        {
-            private readonly PreFlexRWTermVectorsFormat OuterInstance;
-
-            public Lucene3xTermVectorsReaderAnonymousInnerClassHelper(PreFlexRWTermVectorsFormat outerInstance, Directory directory, SegmentInfo segmentInfo, FieldInfos fieldInfos, IOContext context)
-                : base(directory, segmentInfo, fieldInfos, context)
-            {
-                this.OuterInstance = outerInstance;
-            }
-
-            protected internal override bool SortTermsByUnicode()
-            {
-
-                // We carefully peek into stack track above us: if
-                // we are part of a "merge", we must sort by UTF16:
-                bool unicodeSortOrder = true;
-
-                if (Util.StackTraceHelper.DoesStackTraceContainMethod("Merge"))
-                {
-                        unicodeSortOrder = false;
-                        if (LuceneTestCase.VERBOSE)
-                        {
-                            Console.WriteLine("NOTE: PreFlexRW codec: forcing legacy UTF16 vector term sort order");
-                        }
-                }
-
-                return unicodeSortOrder;
-            }
-        }
-    }
-#pragma warning restore 612, 618
-}
\ No newline at end of file


[04/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

Posted by ni...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestFuzzyQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestFuzzyQuery.cs b/src/Lucene.Net.Tests/Search/TestFuzzyQuery.cs
new file mode 100644
index 0000000..0ead1d4
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestFuzzyQuery.cs
@@ -0,0 +1,389 @@
+using System.Collections.Generic;
+using Lucene.Net.Documents;
+using Lucene.Net.Util;
+
+namespace Lucene.Net.Search
+{
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockTokenizer = Lucene.Net.Analysis.MockTokenizer;
+    using MultiReader = Lucene.Net.Index.MultiReader;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Term = Lucene.Net.Index.Term;
+
+    /// <summary>
+    /// Tests <seealso cref="FuzzyQuery"/>.
+    ///
+    /// </summary>
+    [TestFixture]
+    public class TestFuzzyQuery : LuceneTestCaseWithReducedFloatPrecision
+    {
+        [Test]
+        public virtual void TestFuzziness()
+        {
+            Directory directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, Similarity, TimeZone);
+            AddDoc("aaaaa", writer);
+            AddDoc("aaaab", writer);
+            AddDoc("aaabb", writer);
+            AddDoc("aabbb", writer);
+            AddDoc("abbbb", writer);
+            AddDoc("bbbbb", writer);
+            AddDoc("ddddd", writer);
+
+            IndexReader reader = writer.Reader;
+            IndexSearcher searcher = NewSearcher(reader);
+            writer.Dispose();
+
+            FuzzyQuery query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.DefaultMaxEdits, 0);
+            ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(3, hits.Length);
+
+            // same with prefix
+            query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.DefaultMaxEdits, 1);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(3, hits.Length);
+            query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.DefaultMaxEdits, 2);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(3, hits.Length);
+            query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.DefaultMaxEdits, 3);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(3, hits.Length);
+            query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.DefaultMaxEdits, 4);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(2, hits.Length);
+            query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.DefaultMaxEdits, 5);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+            query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.DefaultMaxEdits, 6);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+
+            // test scoring
+            query = new FuzzyQuery(new Term("field", "bbbbb"), FuzzyQuery.DefaultMaxEdits, 0);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(3, hits.Length, "3 documents should match");
+            IList<string> order = Arrays.AsList("bbbbb", "abbbb", "aabbb");
+            for (int i = 0; i < hits.Length; i++)
+            {
+                string term = searcher.Doc(hits[i].Doc).Get("field");
+                //System.out.println(hits[i].Score);
+                Assert.AreEqual(order[i], term);
+            }
+
+            // test pq size by supplying maxExpansions=2
+            // this query would normally return 3 documents, because 3 terms match (see above):
+            query = new FuzzyQuery(new Term("field", "bbbbb"), FuzzyQuery.DefaultMaxEdits, 0, 2, false);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(2, hits.Length, "only 2 documents should match");
+            order = Arrays.AsList("bbbbb", "abbbb");
+            for (int i = 0; i < hits.Length; i++)
+            {
+                string term = searcher.Doc(hits[i].Doc).Get("field");
+                //System.out.println(hits[i].Score);
+                Assert.AreEqual(order[i], term);
+            }
+
+            // not similar enough:
+            query = new FuzzyQuery(new Term("field", "xxxxx"), FuzzyQuery.DefaultMaxEdits, 0);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(0, hits.Length);
+            query = new FuzzyQuery(new Term("field", "aaccc"), FuzzyQuery.DefaultMaxEdits, 0); // edit distance to "aaaaa" = 3
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(0, hits.Length);
+
+            // query identical to a word in the index:
+            query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.DefaultMaxEdits, 0);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(3, hits.Length);
+            Assert.AreEqual(searcher.Doc(hits[0].Doc).Get("field"), ("aaaaa"));
+            // default allows for up to two edits:
+            Assert.AreEqual(searcher.Doc(hits[1].Doc).Get("field"), ("aaaab"));
+            Assert.AreEqual(searcher.Doc(hits[2].Doc).Get("field"), ("aaabb"));
+
+            // query similar to a word in the index:
+            query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.DefaultMaxEdits, 0);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(3, hits.Length);
+            Assert.AreEqual(searcher.Doc(hits[0].Doc).Get("field"), ("aaaaa"));
+            Assert.AreEqual(searcher.Doc(hits[1].Doc).Get("field"), ("aaaab"));
+            Assert.AreEqual(searcher.Doc(hits[2].Doc).Get("field"), ("aaabb"));
+
+            // now with prefix
+            query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.DefaultMaxEdits, 1);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(3, hits.Length);
+            Assert.AreEqual(searcher.Doc(hits[0].Doc).Get("field"), ("aaaaa"));
+            Assert.AreEqual(searcher.Doc(hits[1].Doc).Get("field"), ("aaaab"));
+            Assert.AreEqual(searcher.Doc(hits[2].Doc).Get("field"), ("aaabb"));
+            query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.DefaultMaxEdits, 2);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(3, hits.Length);
+            Assert.AreEqual(searcher.Doc(hits[0].Doc).Get("field"), ("aaaaa"));
+            Assert.AreEqual(searcher.Doc(hits[1].Doc).Get("field"), ("aaaab"));
+            Assert.AreEqual(searcher.Doc(hits[2].Doc).Get("field"), ("aaabb"));
+            query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.DefaultMaxEdits, 3);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(3, hits.Length);
+            Assert.AreEqual(searcher.Doc(hits[0].Doc).Get("field"), ("aaaaa"));
+            Assert.AreEqual(searcher.Doc(hits[1].Doc).Get("field"), ("aaaab"));
+            Assert.AreEqual(searcher.Doc(hits[2].Doc).Get("field"), ("aaabb"));
+            query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.DefaultMaxEdits, 4);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(2, hits.Length);
+            Assert.AreEqual(searcher.Doc(hits[0].Doc).Get("field"), ("aaaaa"));
+            Assert.AreEqual(searcher.Doc(hits[1].Doc).Get("field"), ("aaaab"));
+            query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.DefaultMaxEdits, 5);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(0, hits.Length);
+
+            query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.DefaultMaxEdits, 0);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+            Assert.AreEqual(searcher.Doc(hits[0].Doc).Get("field"), ("ddddd"));
+
+            // now with prefix
+            query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.DefaultMaxEdits, 1);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+            Assert.AreEqual(searcher.Doc(hits[0].Doc).Get("field"), ("ddddd"));
+            query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.DefaultMaxEdits, 2);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+            Assert.AreEqual(searcher.Doc(hits[0].Doc).Get("field"), ("ddddd"));
+            query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.DefaultMaxEdits, 3);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+            Assert.AreEqual(searcher.Doc(hits[0].Doc).Get("field"), ("ddddd"));
+            query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.DefaultMaxEdits, 4);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+            Assert.AreEqual(searcher.Doc(hits[0].Doc).Get("field"), ("ddddd"));
+            query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.DefaultMaxEdits, 5);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(0, hits.Length);
+
+            // different field = no match:
+            query = new FuzzyQuery(new Term("anotherfield", "ddddX"), FuzzyQuery.DefaultMaxEdits, 0);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(0, hits.Length);
+
+            reader.Dispose();
+            directory.Dispose();
+        }
+
+        [Test]
+        public virtual void Test2()
+        {
+            Directory directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, new MockAnalyzer(Random(), MockTokenizer.KEYWORD, false), Similarity, TimeZone);
+            AddDoc("LANGE", writer);
+            AddDoc("LUETH", writer);
+            AddDoc("PIRSING", writer);
+            AddDoc("RIEGEL", writer);
+            AddDoc("TRZECZIAK", writer);
+            AddDoc("WALKER", writer);
+            AddDoc("WBR", writer);
+            AddDoc("WE", writer);
+            AddDoc("WEB", writer);
+            AddDoc("WEBE", writer);
+            AddDoc("WEBER", writer);
+            AddDoc("WEBERE", writer);
+            AddDoc("WEBREE", writer);
+            AddDoc("WEBEREI", writer);
+            AddDoc("WBRE", writer);
+            AddDoc("WITTKOPF", writer);
+            AddDoc("WOJNAROWSKI", writer);
+            AddDoc("WRICKE", writer);
+
+            IndexReader reader = writer.Reader;
+            IndexSearcher searcher = NewSearcher(reader);
+            writer.Dispose();
+
+            FuzzyQuery query = new FuzzyQuery(new Term("field", "WEBER"), 2, 1);
+            //query.setRewriteMethod(FuzzyQuery.SCORING_BOOLEAN_QUERY_REWRITE);
+            ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(8, hits.Length);
+
+            reader.Dispose();
+            directory.Dispose();
+        }
+
+        /// <summary>
+        /// MultiTermQuery provides (via attribute) information about which values
+        /// must be competitive to enter the priority queue.
+        ///
+        /// FuzzyQuery optimizes itself around this information, if the attribute
+        /// is not implemented correctly, there will be problems!
+        /// </summary>
+        [Test]
+        public virtual void TestTieBreaker()
+        {
+            Directory directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, Similarity, TimeZone);
+            AddDoc("a123456", writer);
+            AddDoc("c123456", writer);
+            AddDoc("d123456", writer);
+            AddDoc("e123456", writer);
+
+            Directory directory2 = NewDirectory();
+            RandomIndexWriter writer2 = new RandomIndexWriter(Random(), directory2, Similarity, TimeZone);
+            AddDoc("a123456", writer2);
+            AddDoc("b123456", writer2);
+            AddDoc("b123456", writer2);
+            AddDoc("b123456", writer2);
+            AddDoc("c123456", writer2);
+            AddDoc("f123456", writer2);
+
+            IndexReader ir1 = writer.Reader;
+            IndexReader ir2 = writer2.Reader;
+
+            MultiReader mr = new MultiReader(ir1, ir2);
+            IndexSearcher searcher = NewSearcher(mr);
+            FuzzyQuery fq = new FuzzyQuery(new Term("field", "z123456"), 1, 0, 2, false);
+            TopDocs docs = searcher.Search(fq, 2);
+            Assert.AreEqual(5, docs.TotalHits); // 5 docs, from the a and b's
+            mr.Dispose();
+            ir1.Dispose();
+            ir2.Dispose();
+            writer.Dispose();
+            writer2.Dispose();
+            directory.Dispose();
+            directory2.Dispose();
+        }
+
+        /// <summary>
+        /// Test the TopTermsBoostOnlyBooleanQueryRewrite rewrite method. </summary>
+        [Test]
+        public virtual void TestBoostOnlyRewrite()
+        {
+            Directory directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, Similarity, TimeZone);
+            AddDoc("Lucene", writer);
+            AddDoc("Lucene", writer);
+            AddDoc("Lucenne", writer);
+
+            IndexReader reader = writer.Reader;
+            IndexSearcher searcher = NewSearcher(reader);
+            writer.Dispose();
+
+            FuzzyQuery query = new FuzzyQuery(new Term("field", "lucene"));
+            query.MultiTermRewriteMethod = (new MultiTermQuery.TopTermsBoostOnlyBooleanQueryRewrite(50));
+            ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(3, hits.Length);
+            // normally, 'Lucenne' would be the first result as IDF will skew the score.
+            Assert.AreEqual("Lucene", reader.Document(hits[0].Doc).Get("field"));
+            Assert.AreEqual("Lucene", reader.Document(hits[1].Doc).Get("field"));
+            Assert.AreEqual("Lucenne", reader.Document(hits[2].Doc).Get("field"));
+            reader.Dispose();
+            directory.Dispose();
+        }
+
+        [Test]
+        public virtual void TestGiga()
+        {
+            MockAnalyzer analyzer = new MockAnalyzer(Random());
+            Directory index = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), index, Similarity, TimeZone);
+
+            AddDoc("Lucene in Action", w);
+            AddDoc("Lucene for Dummies", w);
+
+            //addDoc("Giga", w);
+            AddDoc("Giga byte", w);
+
+            AddDoc("ManagingGigabytesManagingGigabyte", w);
+            AddDoc("ManagingGigabytesManagingGigabytes", w);
+
+            AddDoc("The Art of Computer Science", w);
+            AddDoc("J. K. Rowling", w);
+            AddDoc("JK Rowling", w);
+            AddDoc("Joanne K Roling", w);
+            AddDoc("Bruce Willis", w);
+            AddDoc("Willis bruce", w);
+            AddDoc("Brute willis", w);
+            AddDoc("B. willis", w);
+            IndexReader r = w.Reader;
+            w.Dispose();
+
+            Query q = new FuzzyQuery(new Term("field", "giga"), 0);
+
+            // 3. search
+            IndexSearcher searcher = NewSearcher(r);
+            ScoreDoc[] hits = searcher.Search(q, 10).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+            Assert.AreEqual("Giga byte", searcher.Doc(hits[0].Doc).Get("field"));
+            r.Dispose();
+            index.Dispose();
+        }
+
+        [Test]
+        public virtual void TestDistanceAsEditsSearching()
+        {
+            Directory index = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), index, Similarity, TimeZone);
+            AddDoc("foobar", w);
+            AddDoc("test", w);
+            AddDoc("working", w);
+            IndexReader reader = w.Reader;
+            IndexSearcher searcher = NewSearcher(reader);
+            w.Dispose();
+
+            FuzzyQuery q = new FuzzyQuery(new Term("field", "fouba"), 2);
+            ScoreDoc[] hits = searcher.Search(q, 10).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+            Assert.AreEqual("foobar", searcher.Doc(hits[0].Doc).Get("field"));
+
+            q = new FuzzyQuery(new Term("field", "foubara"), 2);
+            hits = searcher.Search(q, 10).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+            Assert.AreEqual("foobar", searcher.Doc(hits[0].Doc).Get("field"));
+
+            try
+            {
+                q = new FuzzyQuery(new Term("field", "t"), 3);
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException expected)
+#pragma warning restore 168
+            {
+                // expected
+            }
+
+            reader.Dispose();
+            index.Dispose();
+        }
+
+        private void AddDoc(string text, RandomIndexWriter writer)
+        {
+            Document doc = new Document();
+            doc.Add(NewTextField("field", text, Field.Store.YES));
+            writer.AddDocument(doc);
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestIndexSearcher.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestIndexSearcher.cs b/src/Lucene.Net.Tests/Search/TestIndexSearcher.cs
new file mode 100644
index 0000000..d5dce40
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestIndexSearcher.cs
@@ -0,0 +1,145 @@
+using Lucene.Net.Documents;
+using Lucene.Net.Index;
+using Lucene.Net.Support;
+using Lucene.Net.Util;
+using NUnit.Framework;
+using System;
+using System.Threading.Tasks;
+
+namespace Lucene.Net.Search
+{
+    using Directory = Lucene.Net.Store.Directory;
+
+    /*
+    * Licensed to the Apache Software Foundation (ASF) under one or more
+    * contributor license agreements.  See the NOTICE file distributed with
+    * this work for additional information regarding copyright ownership.
+    * The ASF licenses this file to You under the Apache License, Version 2.0
+    * (the "License"); you may not use this file except in compliance with
+    * the License.  You may obtain a copy of the License at
+    *
+    *     http://www.apache.org/licenses/LICENSE-2.0
+    *
+    * Unless required by applicable law or agreed to in writing, software
+    * distributed under the License is distributed on an "AS IS" BASIS,
+    * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    * See the License for the specific language governing permissions and
+    * limitations under the License.
+    */
+
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using IOUtils = Lucene.Net.Util.IOUtils;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+
+    [TestFixture]
+    public class TestIndexSearcher : LuceneTestCase
+    {
+        internal Directory Dir;
+        internal IndexReader Reader;
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Dir = NewDirectory();
+            RandomIndexWriter iw = new RandomIndexWriter(Random(), Dir, Similarity, TimeZone);
+            for (int i = 0; i < 100; i++)
+            {
+                Document doc = new Document();
+                doc.Add(NewStringField("field", Convert.ToString(i), Field.Store.NO));
+                doc.Add(NewStringField("field2", Convert.ToString(i % 2 == 0), Field.Store.NO));
+                iw.AddDocument(doc);
+            }
+            Reader = iw.Reader;
+            iw.Dispose();
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            base.TearDown();
+            Reader.Dispose();
+            Dir.Dispose();
+        }
+
+        // should not throw exception
+        [Test]
+        public virtual void TestHugeN()
+        {
+            TaskScheduler service = new LimitedConcurrencyLevelTaskScheduler(4);
+
+            IndexSearcher[] searchers = new IndexSearcher[] { new IndexSearcher(Reader), new IndexSearcher(Reader, service) };
+            Query[] queries = new Query[] { new MatchAllDocsQuery(), new TermQuery(new Term("field", "1")) };
+            Sort[] sorts = new Sort[] { null, new Sort(new SortField("field2", SortFieldType.STRING)) };
+            Filter[] filters = new Filter[] { null, new QueryWrapperFilter(new TermQuery(new Term("field2", "true"))) };
+            ScoreDoc[] afters = new ScoreDoc[] { null, new FieldDoc(0, 0f, new object[] { new BytesRef("boo!") }) };
+
+            foreach (IndexSearcher searcher in searchers)
+            {
+                foreach (ScoreDoc after in afters)
+                {
+                    foreach (Query query in queries)
+                    {
+                        foreach (Sort sort in sorts)
+                        {
+                            foreach (Filter filter in filters)
+                            {
+                                searcher.Search(query, int.MaxValue);
+                                searcher.SearchAfter(after, query, int.MaxValue);
+                                searcher.Search(query, filter, int.MaxValue);
+                                searcher.SearchAfter(after, query, filter, int.MaxValue);
+                                if (sort != null)
+                                {
+                                    searcher.Search(query, int.MaxValue, sort);
+                                    searcher.Search(query, filter, int.MaxValue, sort);
+                                    searcher.Search(query, filter, int.MaxValue, sort, true, true);
+                                    searcher.Search(query, filter, int.MaxValue, sort, true, false);
+                                    searcher.Search(query, filter, int.MaxValue, sort, false, true);
+                                    searcher.Search(query, filter, int.MaxValue, sort, false, false);
+                                    searcher.SearchAfter(after, query, filter, int.MaxValue, sort);
+                                    searcher.SearchAfter(after, query, filter, int.MaxValue, sort, true, true);
+                                    searcher.SearchAfter(after, query, filter, int.MaxValue, sort, true, false);
+                                    searcher.SearchAfter(after, query, filter, int.MaxValue, sort, false, true);
+                                    searcher.SearchAfter(after, query, filter, int.MaxValue, sort, false, false);
+                                }
+                            }
+                        }
+                    }
+                }
+            }
+
+            TestUtil.ShutdownExecutorService(service);
+        }
+
+        [Test]
+        public virtual void TestSearchAfterPassedMaxDoc()
+        {
+            // LUCENE-5128: ensure we get a meaningful message if searchAfter exceeds maxDoc
+            Directory dir = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            w.AddDocument(new Document());
+            IndexReader r = w.Reader;
+            w.Dispose();
+
+            IndexSearcher s = new IndexSearcher(r);
+            try
+            {
+                s.SearchAfter(new ScoreDoc(r.MaxDoc, 0.54f), new MatchAllDocsQuery(), 10);
+                Assert.Fail("should have hit IllegalArgumentException when searchAfter exceeds maxDoc");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException e)
+#pragma warning restore 168
+            {
+                // ok
+            }
+            finally
+            {
+                IOUtils.Close(r, dir);
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestLiveFieldValues.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestLiveFieldValues.cs b/src/Lucene.Net.Tests/Search/TestLiveFieldValues.cs
new file mode 100644
index 0000000..9cd902c
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestLiveFieldValues.cs
@@ -0,0 +1,245 @@
+using Lucene.Net.Documents;
+using Lucene.Net.Support;
+using System;
+using System.Collections.Generic;
+using System.Globalization;
+using System.Threading;
+
+namespace Lucene.Net.Search
+{
+    using Index;
+    using NUnit.Framework;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using IndexWriter = Lucene.Net.Index.IndexWriter;
+    using IndexWriterConfig = Lucene.Net.Index.IndexWriterConfig;
+    using Int32Field = Int32Field;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using StringField = StringField;
+    using Term = Lucene.Net.Index.Term;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    [TestFixture]
+    public class TestLiveFieldValues : LuceneTestCase
+    {
+        [Test]
+        public virtual void Test()
+        {
+            Directory dir = NewFSDirectory(CreateTempDir("livefieldupdates"));
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+
+            IndexWriter w = new IndexWriter(dir, iwc);
+
+            SearcherManager mgr = new SearcherManager(w, true, new SearcherFactoryAnonymousInnerClassHelper());
+
+            const int missing = -1;
+
+            LiveFieldValues<IndexSearcher, int?> rt = new LiveFieldValuesAnonymousInnerClassHelper(mgr, missing);
+
+            int numThreads = TestUtil.NextInt(Random(), 2, 5);
+            if (VERBOSE)
+            {
+                Console.WriteLine(numThreads + " threads");
+            }
+
+            CountdownEvent startingGun = new CountdownEvent(1);
+            IList<ThreadClass> threads = new List<ThreadClass>();
+
+            int iters = AtLeast(1000);
+            int idCount = TestUtil.NextInt(Random(), 100, 10000);
+
+            double reopenChance = Random().NextDouble() * 0.01;
+            double deleteChance = Random().NextDouble() * 0.25;
+            double addChance = Random().NextDouble() * 0.5;
+
+            for (int t = 0; t < numThreads; t++)
+            {
+                int threadID = t;
+                Random threadRandom = new Random(Random().Next());
+                ThreadClass thread = new ThreadAnonymousInnerClassHelper(w, mgr, missing, rt, startingGun, iters, idCount, reopenChance, deleteChance, addChance, t, threadID, threadRandom);
+                threads.Add(thread);
+                thread.Start();
+            }
+
+            startingGun.Signal();
+
+            foreach (ThreadClass thread in threads)
+            {
+                thread.Join();
+            }
+            mgr.MaybeRefresh();
+            Assert.AreEqual(0, rt.Count);
+
+            rt.Dispose();
+            mgr.Dispose();
+            w.Dispose();
+            dir.Dispose();
+        }
+
+        private class SearcherFactoryAnonymousInnerClassHelper : SearcherFactory
+        {
+            public override IndexSearcher NewSearcher(IndexReader r)
+            {
+                return new IndexSearcher(r);
+            }
+        }
+
+        private class LiveFieldValuesAnonymousInnerClassHelper : LiveFieldValues<IndexSearcher, int?>
+        {
+            public LiveFieldValuesAnonymousInnerClassHelper(SearcherManager mgr, int missing)
+                : base(mgr, missing)
+            {
+            }
+
+            protected override int? LookupFromSearcher(IndexSearcher s, string id)
+            {
+                TermQuery tq = new TermQuery(new Term("id", id));
+                TopDocs hits = s.Search(tq, 1);
+                Assert.IsTrue(hits.TotalHits <= 1);
+                if (hits.TotalHits == 0)
+                {
+                    return null;
+                }
+                else
+                {
+                    Document doc = s.Doc(hits.ScoreDocs[0].Doc);
+                    return (int)doc.GetField("field").GetNumericValue();
+                }
+            }
+        }
+
+        private class ThreadAnonymousInnerClassHelper : ThreadClass
+        {
+            private IndexWriter w;
+            private SearcherManager Mgr;
+            private int? Missing;
+            private LiveFieldValues<IndexSearcher, int?> Rt;
+            private CountdownEvent StartingGun;
+            private int Iters;
+            private int IdCount;
+            private double ReopenChance;
+            private double DeleteChance;
+            private double AddChance;
+            private int t;
+            private int ThreadID;
+            private Random ThreadRandom;
+
+            public ThreadAnonymousInnerClassHelper(IndexWriter w, SearcherManager mgr, int? missing, LiveFieldValues<IndexSearcher, int?> rt, CountdownEvent startingGun, int iters, int idCount, double reopenChance, double deleteChance, double addChance, int t, int threadID, Random threadRandom)
+            {
+                this.w = w;
+                this.Mgr = mgr;
+                this.Missing = missing;
+                this.Rt = rt;
+                this.StartingGun = startingGun;
+                this.Iters = iters;
+                this.IdCount = idCount;
+                this.ReopenChance = reopenChance;
+                this.DeleteChance = deleteChance;
+                this.AddChance = addChance;
+                this.t = t;
+                this.ThreadID = threadID;
+                this.ThreadRandom = threadRandom;
+            }
+
+            public override void Run()
+            {
+                try
+                {
+                    IDictionary<string, int?> values = new Dictionary<string, int?>();
+                    IList<string> allIDs = new SynchronizedList<string>();
+
+                    StartingGun.Wait();
+                    for (int iter = 0; iter < Iters; iter++)
+                    {
+                        // Add/update a document
+                        Document doc = new Document();
+                        // Threads must not update the same id at the
+                        // same time:
+                        if (ThreadRandom.NextDouble() <= AddChance)
+                        {
+                            string id = string.Format(CultureInfo.InvariantCulture, "{0}_{1:X4}", ThreadID, ThreadRandom.Next(IdCount));
+                            int field = ThreadRandom.Next(int.MaxValue);
+                            doc.Add(new StringField("id", id, Field.Store.YES));
+                            doc.Add(new Int32Field("field", (int)field, Field.Store.YES));
+                            w.UpdateDocument(new Term("id", id), doc);
+                            Rt.Add(id, field);
+                            if (!values.ContainsKey(id))//Key didn't exist before
+                            {
+                                allIDs.Add(id);
+                            }
+                            values[id] = field;
+                        }
+
+                        if (allIDs.Count > 0 && ThreadRandom.NextDouble() <= DeleteChance)
+                        {
+                            string randomID = allIDs[ThreadRandom.Next(allIDs.Count)];
+                            w.DeleteDocuments(new Term("id", randomID));
+                            Rt.Delete(randomID);
+                            values[randomID] = Missing;
+                        }
+
+                        if (ThreadRandom.NextDouble() <= ReopenChance || Rt.Count > 10000)
+                        {
+                            //System.out.println("refresh @ " + rt.Size());
+                            Mgr.MaybeRefresh();
+                            if (VERBOSE)
+                            {
+                                IndexSearcher s = Mgr.Acquire();
+                                try
+                                {
+                                    Console.WriteLine("TEST: reopen " + s);
+                                }
+                                finally
+                                {
+                                    Mgr.Release(s);
+                                }
+                                Console.WriteLine("TEST: " + values.Count + " values");
+                            }
+                        }
+
+                        if (ThreadRandom.Next(10) == 7)
+                        {
+                            Assert.AreEqual(null, Rt.Get("foo"));
+                        }
+
+                        if (allIDs.Count > 0)
+                        {
+                            string randomID = allIDs[ThreadRandom.Next(allIDs.Count)];
+                            int? expected = values[randomID];
+                            if (expected == Missing)
+                            {
+                                expected = null;
+                            }
+                            Assert.AreEqual(expected, Rt.Get(randomID), "id=" + randomID);
+                        }
+                    }
+                }
+                catch (Exception t)
+                {
+                    throw new Exception(t.Message, t);
+                }
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestMatchAllDocsQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestMatchAllDocsQuery.cs b/src/Lucene.Net.Tests/Search/TestMatchAllDocsQuery.cs
new file mode 100644
index 0000000..3d8dc7e
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestMatchAllDocsQuery.cs
@@ -0,0 +1,115 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using NUnit.Framework;
+    using Analyzer = Lucene.Net.Analysis.Analyzer;
+    using Directory = Lucene.Net.Store.Directory;
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using IndexWriter = Lucene.Net.Index.IndexWriter;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using Term = Lucene.Net.Index.Term;
+
+    /// <summary>
+    /// Tests MatchAllDocsQuery.
+    ///
+    /// </summary>
+    [TestFixture]
+    public class TestMatchAllDocsQuery : LuceneTestCase
+    {
+        private Analyzer Analyzer;
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Analyzer = new MockAnalyzer(Random());
+        }
+
+        [Test]
+        public virtual void TestQuery()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter iw = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, Analyzer).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy()));
+            AddDoc("one", iw, 1f);
+            AddDoc("two", iw, 20f);
+            AddDoc("three four", iw, 300f);
+            IndexReader ir = DirectoryReader.Open(iw, true);
+
+            IndexSearcher @is = NewSearcher(ir);
+            ScoreDoc[] hits;
+
+            hits = @is.Search(new MatchAllDocsQuery(), null, 1000).ScoreDocs;
+            Assert.AreEqual(3, hits.Length);
+            Assert.AreEqual("one", @is.Doc(hits[0].Doc).Get("key"));
+            Assert.AreEqual("two", @is.Doc(hits[1].Doc).Get("key"));
+            Assert.AreEqual("three four", @is.Doc(hits[2].Doc).Get("key"));
+
+            // some artificial queries to trigger the use of skipTo():
+
+            BooleanQuery bq = new BooleanQuery();
+            bq.Add(new MatchAllDocsQuery(), Occur.MUST);
+            bq.Add(new MatchAllDocsQuery(), Occur.MUST);
+            hits = @is.Search(bq, null, 1000).ScoreDocs;
+            Assert.AreEqual(3, hits.Length);
+
+            bq = new BooleanQuery();
+            bq.Add(new MatchAllDocsQuery(), Occur.MUST);
+            bq.Add(new TermQuery(new Term("key", "three")), Occur.MUST);
+            hits = @is.Search(bq, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+
+            iw.DeleteDocuments(new Term("key", "one"));
+            ir.Dispose();
+            ir = DirectoryReader.Open(iw, true);
+            @is = NewSearcher(ir);
+
+            hits = @is.Search(new MatchAllDocsQuery(), null, 1000).ScoreDocs;
+            Assert.AreEqual(2, hits.Length);
+
+            iw.Dispose();
+            ir.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestEquals()
+        {
+            Query q1 = new MatchAllDocsQuery();
+            Query q2 = new MatchAllDocsQuery();
+            Assert.IsTrue(q1.Equals(q2));
+            q1.Boost = 1.5f;
+            Assert.IsFalse(q1.Equals(q2));
+        }
+
+        private void AddDoc(string text, IndexWriter iw, float boost)
+        {
+            Document doc = new Document();
+            Field f = NewTextField("key", text, Field.Store.YES);
+            f.Boost = boost;
+            doc.Add(f);
+            iw.AddDocument(doc);
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs b/src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs
new file mode 100644
index 0000000..6e27f37
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs
@@ -0,0 +1,431 @@
+using System.Linq;
+using Lucene.Net.Documents;
+using Lucene.Net.Index;
+using System.Collections.Generic;
+using System.Diagnostics;
+
+namespace Lucene.Net.Search
+{
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using AtomicReader = Lucene.Net.Index.AtomicReader;
+    using BooleanWeight = Lucene.Net.Search.BooleanQuery.BooleanWeight;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using DefaultSimilarity = Lucene.Net.Search.Similarities.DefaultSimilarity;
+    using Directory = Lucene.Net.Store.Directory;
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Document = Documents.Document;
+    using Field = Field;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using SimScorer = Lucene.Net.Search.Similarities.Similarity.SimScorer;
+    using SimWeight = Lucene.Net.Search.Similarities.Similarity.SimWeight;
+    using SortedSetDocValues = Lucene.Net.Index.SortedSetDocValues;
+    using SortedSetDocValuesField = SortedSetDocValuesField;
+    using StringField = StringField;
+    using Term = Lucene.Net.Index.Term;
+    using TermContext = Lucene.Net.Index.TermContext;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    /// <summary>
+    /// tests BooleanScorer2's minShouldMatch </summary>
+    [SuppressCodecs("Appending", "Lucene3x", "Lucene40", "Lucene41")]
+    [TestFixture]
+    public class TestMinShouldMatch2 : LuceneTestCase
+    {
+        internal static Directory Dir;
+        internal static DirectoryReader r;
+        internal static AtomicReader atomicReader;
+        internal static IndexSearcher Searcher;
+
+        internal static readonly string[] AlwaysTerms = new string[] { "a" };
+        internal static readonly string[] CommonTerms = new string[] { "b", "c", "d" };
+        internal static readonly string[] MediumTerms = new string[] { "e", "f", "g" };
+        internal static readonly string[] RareTerms = new string[] { "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z" };
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Is non-static because Similarity and TimeZone are not static.
+        /// </summary>
+        [OneTimeSetUp]
+        public void BeforeClass()
+        {
+            Dir = NewDirectory();
+            RandomIndexWriter iw = new RandomIndexWriter(Random(), Dir, Similarity, TimeZone);
+            int numDocs = AtLeast(300);
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document doc = new Document();
+
+                AddSome(doc, AlwaysTerms);
+
+                if (Random().Next(100) < 90)
+                {
+                    AddSome(doc, CommonTerms);
+                }
+                if (Random().Next(100) < 50)
+                {
+                    AddSome(doc, MediumTerms);
+                }
+                if (Random().Next(100) < 10)
+                {
+                    AddSome(doc, RareTerms);
+                }
+                iw.AddDocument(doc);
+            }
+            iw.ForceMerge(1);
+            iw.Dispose();
+            r = DirectoryReader.Open(Dir);
+            atomicReader = GetOnlySegmentReader(r);
+            Searcher = new IndexSearcher(atomicReader);
+            Searcher.Similarity = new DefaultSimilarityAnonymousInnerClassHelper();
+        }
+
+        private class DefaultSimilarityAnonymousInnerClassHelper : DefaultSimilarity
+        {
+            public DefaultSimilarityAnonymousInnerClassHelper()
+            {
+            }
+
+            public override float QueryNorm(float sumOfSquaredWeights)
+            {
+                return 1; // we disable queryNorm, both for debugging and ease of impl
+            }
+        }
+
+        [OneTimeTearDown]
+        public static void AfterClass()
+        {
+            atomicReader.Dispose();
+            Dir.Dispose();
+            Searcher = null;
+            atomicReader = null;
+            r = null;
+            Dir = null;
+        }
+
+        private static void AddSome(Document doc, string[] values)
+        {
+            IList<string> list = Arrays.AsList(values);
+            Collections.Shuffle(list);
+            int howMany = TestUtil.NextInt(Random(), 1, list.Count);
+            for (int i = 0; i < howMany; i++)
+            {
+                doc.Add(new StringField("field", list[i], Field.Store.NO));
+                doc.Add(new SortedSetDocValuesField("dv", new BytesRef(list[i])));
+            }
+        }
+
+        private Scorer Scorer(string[] values, int minShouldMatch, bool slow)
+        {
+            BooleanQuery bq = new BooleanQuery();
+            foreach (string value in values)
+            {
+                bq.Add(new TermQuery(new Term("field", value)), Occur.SHOULD);
+            }
+            bq.MinimumNumberShouldMatch = minShouldMatch;
+
+            BooleanWeight weight = (BooleanWeight)Searcher.CreateNormalizedWeight(bq);
+
+            if (slow)
+            {
+                return new SlowMinShouldMatchScorer(weight, atomicReader, Searcher);
+            }
+            else
+            {
+                return weight.GetScorer((AtomicReaderContext)atomicReader.Context, null);
+            }
+        }
+
+        private void AssertNext(Scorer expected, Scorer actual)
+        {
+            if (actual == null)
+            {
+                Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, expected.NextDoc());
+                return;
+            }
+            int doc;
+            while ((doc = expected.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS)
+            {
+                Assert.AreEqual(doc, actual.NextDoc());
+                Assert.AreEqual(expected.Freq, actual.Freq);
+                float expectedScore = expected.GetScore();
+                float actualScore = actual.GetScore();
+                Assert.AreEqual(expectedScore, actualScore, CheckHits.ExplainToleranceDelta(expectedScore, actualScore));
+            }
+            Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, actual.NextDoc());
+        }
+
+        private void AssertAdvance(Scorer expected, Scorer actual, int amount)
+        {
+            if (actual == null)
+            {
+                Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, expected.NextDoc());
+                return;
+            }
+            int prevDoc = 0;
+            int doc;
+            while ((doc = expected.Advance(prevDoc + amount)) != DocIdSetIterator.NO_MORE_DOCS)
+            {
+                Assert.AreEqual(doc, actual.Advance(prevDoc + amount));
+                Assert.AreEqual(expected.Freq, actual.Freq);
+                float expectedScore = expected.GetScore();
+                float actualScore = actual.GetScore();
+                Assert.AreEqual(expectedScore, actualScore, CheckHits.ExplainToleranceDelta(expectedScore, actualScore));
+                prevDoc = doc;
+            }
+            Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, actual.Advance(prevDoc + amount));
+        }
+
+        /// <summary>
+        /// simple test for next(): minShouldMatch=2 on 3 terms (one common, one medium, one rare) </summary>
+        [Test]
+        public virtual void TestNextCMR2()
+        {
+            for (int common = 0; common < CommonTerms.Length; common++)
+            {
+                for (int medium = 0; medium < MediumTerms.Length; medium++)
+                {
+                    for (int rare = 0; rare < RareTerms.Length; rare++)
+                    {
+                        Scorer expected = Scorer(new string[] { CommonTerms[common], MediumTerms[medium], RareTerms[rare] }, 2, true);
+                        Scorer actual = Scorer(new string[] { CommonTerms[common], MediumTerms[medium], RareTerms[rare] }, 2, false);
+                        AssertNext(expected, actual);
+                    }
+                }
+            }
+        }
+
+        /// <summary>
+        /// simple test for advance(): minShouldMatch=2 on 3 terms (one common, one medium, one rare) </summary>
+        [Test]
+        public virtual void TestAdvanceCMR2()
+        {
+            for (int amount = 25; amount < 200; amount += 25)
+            {
+                for (int common = 0; common < CommonTerms.Length; common++)
+                {
+                    for (int medium = 0; medium < MediumTerms.Length; medium++)
+                    {
+                        for (int rare = 0; rare < RareTerms.Length; rare++)
+                        {
+                            Scorer expected = Scorer(new string[] { CommonTerms[common], MediumTerms[medium], RareTerms[rare] }, 2, true);
+                            Scorer actual = Scorer(new string[] { CommonTerms[common], MediumTerms[medium], RareTerms[rare] }, 2, false);
+                            AssertAdvance(expected, actual, amount);
+                        }
+                    }
+                }
+            }
+        }
+
+        /// <summary>
+        /// test next with giant bq of all terms with varying minShouldMatch </summary>
+        [Test]
+        public virtual void TestNextAllTerms()
+        {
+            IList<string> termsList = new List<string>();
+            termsList.AddRange(Arrays.AsList(CommonTerms));
+            termsList.AddRange(Arrays.AsList(MediumTerms));
+            termsList.AddRange(Arrays.AsList(RareTerms));
+            string[] terms = termsList.ToArray();
+
+            for (int minNrShouldMatch = 1; minNrShouldMatch <= terms.Length; minNrShouldMatch++)
+            {
+                Scorer expected = Scorer(terms, minNrShouldMatch, true);
+                Scorer actual = Scorer(terms, minNrShouldMatch, false);
+                AssertNext(expected, actual);
+            }
+        }
+
+        /// <summary>
+        /// test advance with giant bq of all terms with varying minShouldMatch </summary>
+        [Test]
+        public virtual void TestAdvanceAllTerms()
+        {
+            IList<string> termsList = new List<string>();
+            termsList.AddRange(Arrays.AsList(CommonTerms));
+            termsList.AddRange(Arrays.AsList(MediumTerms));
+            termsList.AddRange(Arrays.AsList(RareTerms));
+            string[] terms = termsList.ToArray();
+
+            for (int amount = 25; amount < 200; amount += 25)
+            {
+                for (int minNrShouldMatch = 1; minNrShouldMatch <= terms.Length; minNrShouldMatch++)
+                {
+                    Scorer expected = Scorer(terms, minNrShouldMatch, true);
+                    Scorer actual = Scorer(terms, minNrShouldMatch, false);
+                    AssertAdvance(expected, actual, amount);
+                }
+            }
+        }
+
+        /// <summary>
+        /// test next with varying numbers of terms with varying minShouldMatch </summary>
+        [Test]
+        public virtual void TestNextVaryingNumberOfTerms()
+        {
+            IList<string> termsList = new List<string>();
+            termsList.AddRange(Arrays.AsList(CommonTerms));
+            termsList.AddRange(Arrays.AsList(MediumTerms));
+            termsList.AddRange(Arrays.AsList(RareTerms));
+            Collections.Shuffle(termsList);
+
+            for (int numTerms = 2; numTerms <= termsList.Count; numTerms++)
+            {
+                string[] terms = termsList.SubList(0, numTerms).ToArray(/*new string[0]*/);
+                for (int minNrShouldMatch = 1; minNrShouldMatch <= terms.Length; minNrShouldMatch++)
+                {
+                    Scorer expected = Scorer(terms, minNrShouldMatch, true);
+                    Scorer actual = Scorer(terms, minNrShouldMatch, false);
+                    AssertNext(expected, actual);
+                }
+            }
+        }
+
+        /// <summary>
+        /// test advance with varying numbers of terms with varying minShouldMatch </summary>
+        [Test]
+        public virtual void TestAdvanceVaryingNumberOfTerms()
+        {
+            IList<string> termsList = new List<string>();
+            termsList.AddRange(Arrays.AsList(CommonTerms));
+            termsList.AddRange(Arrays.AsList(MediumTerms));
+            termsList.AddRange(Arrays.AsList(RareTerms));
+            Collections.Shuffle(termsList);
+
+            for (int amount = 25; amount < 200; amount += 25)
+            {
+                for (int numTerms = 2; numTerms <= termsList.Count; numTerms++)
+                {
+                    string[] terms = termsList.SubList(0, numTerms).ToArray(/*new string[0]*/);
+                    for (int minNrShouldMatch = 1; minNrShouldMatch <= terms.Length; minNrShouldMatch++)
+                    {
+                        Scorer expected = Scorer(terms, minNrShouldMatch, true);
+                        Scorer actual = Scorer(terms, minNrShouldMatch, false);
+                        AssertAdvance(expected, actual, amount);
+                    }
+                }
+            }
+        }
+
+        // TODO: more tests
+
+        // a slow min-should match scorer that uses a docvalues field.
+        // later, we can make debugging easier as it can record the set of ords it currently matched
+        // and e.g. print out their values and so on for the document
+        internal class SlowMinShouldMatchScorer : Scorer
+        {
+            internal int CurrentDoc = -1; // current docid
+            internal int CurrentMatched = -1; // current number of terms matched
+
+            internal readonly SortedSetDocValues Dv;
+            internal readonly int MaxDoc;
+
+            internal readonly HashSet<long?> Ords = new HashSet<long?>();
+            internal readonly SimScorer[] Sims;
+            internal readonly int MinNrShouldMatch;
+
+            internal double Score_Renamed = float.NaN;
+
+            internal SlowMinShouldMatchScorer(BooleanWeight weight, AtomicReader reader, IndexSearcher searcher)
+                : base(weight)
+            {
+                this.Dv = reader.GetSortedSetDocValues("dv");
+                this.MaxDoc = reader.MaxDoc;
+                BooleanQuery bq = (BooleanQuery)weight.Query;
+                this.MinNrShouldMatch = bq.MinimumNumberShouldMatch;
+                this.Sims = new SimScorer[(int)Dv.ValueCount];
+                foreach (BooleanClause clause in bq.GetClauses())
+                {
+                    Debug.Assert(!clause.IsProhibited);
+                    Debug.Assert(!clause.IsRequired);
+                    Term term = ((TermQuery)clause.Query).Term;
+                    long ord = Dv.LookupTerm(term.Bytes);
+                    if (ord >= 0)
+                    {
+                        bool success = Ords.Add(ord);
+                        Debug.Assert(success); // no dups
+                        TermContext context = TermContext.Build(reader.Context, term);
+                        SimWeight w = weight.Similarity.ComputeWeight(1f, searcher.CollectionStatistics("field"), searcher.TermStatistics(term, context));
+                        var dummy = w.GetValueForNormalization(); // ignored
+                        w.Normalize(1F, 1F);
+                        Sims[(int)ord] = weight.Similarity.GetSimScorer(w, (AtomicReaderContext)reader.Context);
+                    }
+                }
+            }
+
+            public override float GetScore()
+            {
+                Debug.Assert(Score_Renamed != 0, CurrentMatched.ToString());
+                return (float)Score_Renamed * ((BooleanWeight)m_weight).Coord(CurrentMatched, ((BooleanWeight)m_weight).MaxCoord);
+            }
+
+            public override int Freq
+            {
+                get { return CurrentMatched; }
+            }
+
+            public override int DocID
+            {
+                get { return CurrentDoc; }
+            }
+
+            public override int NextDoc()
+            {
+                Debug.Assert(CurrentDoc != NO_MORE_DOCS);
+                for (CurrentDoc = CurrentDoc + 1; CurrentDoc < MaxDoc; CurrentDoc++)
+                {
+                    CurrentMatched = 0;
+                    Score_Renamed = 0;
+                    Dv.SetDocument(CurrentDoc);
+                    long ord;
+                    while ((ord = Dv.NextOrd()) != SortedSetDocValues.NO_MORE_ORDS)
+                    {
+                        if (Ords.Contains(ord))
+                        {
+                            CurrentMatched++;
+                            Score_Renamed += Sims[(int)ord].Score(CurrentDoc, 1);
+                        }
+                    }
+                    if (CurrentMatched >= MinNrShouldMatch)
+                    {
+                        return CurrentDoc;
+                    }
+                }
+                return CurrentDoc = NO_MORE_DOCS;
+            }
+
+            public override int Advance(int target)
+            {
+                int doc;
+                while ((doc = NextDoc()) < target)
+                {
+                }
+                return doc;
+            }
+
+            public override long GetCost()
+            {
+                return MaxDoc;
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestMultiPhraseQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestMultiPhraseQuery.cs b/src/Lucene.Net.Tests/Search/TestMultiPhraseQuery.cs
new file mode 100644
index 0000000..0995c13
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestMultiPhraseQuery.cs
@@ -0,0 +1,631 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using NUnit.Framework;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using CannedTokenStream = Lucene.Net.Analysis.CannedTokenStream;
+    using DefaultSimilarity = Lucene.Net.Search.Similarities.DefaultSimilarity;
+    using Directory = Lucene.Net.Store.Directory;
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using IndexWriter = Lucene.Net.Index.IndexWriter;
+    using IndexWriterConfig = Lucene.Net.Index.IndexWriterConfig;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MultiFields = Lucene.Net.Index.MultiFields;
+    using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Term = Lucene.Net.Index.Term;
+    using TermsEnum = Lucene.Net.Index.TermsEnum;
+    using TextField = TextField;
+    using Token = Lucene.Net.Analysis.Token;
+
+    /// <summary>
+    /// this class tests the MultiPhraseQuery class.
+    ///
+    ///
+    /// </summary>
+    [TestFixture]
+    public class TestMultiPhraseQuery : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestPhrasePrefix()
+        {
+            Directory indexStore = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), indexStore, Similarity, TimeZone);
+            Add("blueberry pie", writer);
+            Add("blueberry strudel", writer);
+            Add("blueberry pizza", writer);
+            Add("blueberry chewing gum", writer);
+            Add("bluebird pizza", writer);
+            Add("bluebird foobar pizza", writer);
+            Add("piccadilly circus", writer);
+
+            IndexReader reader = writer.Reader;
+            IndexSearcher searcher = NewSearcher(reader);
+
+            // search for "blueberry pi*":
+            MultiPhraseQuery query1 = new MultiPhraseQuery();
+            // search for "strawberry pi*":
+            MultiPhraseQuery query2 = new MultiPhraseQuery();
+            query1.Add(new Term("body", "blueberry"));
+            query2.Add(new Term("body", "strawberry"));
+
+            LinkedList<Term> termsWithPrefix = new LinkedList<Term>();
+
+            // this TermEnum gives "piccadilly", "pie" and "pizza".
+            string prefix = "pi";
+            TermsEnum te = MultiFields.GetFields(reader).GetTerms("body").GetIterator(null);
+            te.SeekCeil(new BytesRef(prefix));
+            do
+            {
+                string s = te.Term.Utf8ToString();
+                if (s.StartsWith(prefix))
+                {
+                    termsWithPrefix.AddLast(new Term("body", s));
+                }
+                else
+                {
+                    break;
+                }
+            } while (te.Next() != null);
+
+            query1.Add(termsWithPrefix.ToArray(/*new Term[0]*/));
+            Assert.AreEqual("body:\"blueberry (piccadilly pie pizza)\"", query1.ToString());
+            query2.Add(termsWithPrefix.ToArray(/*new Term[0]*/));
+            Assert.AreEqual("body:\"strawberry (piccadilly pie pizza)\"", query2.ToString());
+
+            ScoreDoc[] result;
+            result = searcher.Search(query1, null, 1000).ScoreDocs;
+            Assert.AreEqual(2, result.Length);
+            result = searcher.Search(query2, null, 1000).ScoreDocs;
+            Assert.AreEqual(0, result.Length);
+
+            // search for "blue* pizza":
+            MultiPhraseQuery query3 = new MultiPhraseQuery();
+            termsWithPrefix.Clear();
+            prefix = "blue";
+            te.SeekCeil(new BytesRef(prefix));
+
+            do
+            {
+                if (te.Term.Utf8ToString().StartsWith(prefix))
+                {
+                    termsWithPrefix.AddLast(new Term("body", te.Term.Utf8ToString()));
+                }
+            } while (te.Next() != null);
+
+            query3.Add(termsWithPrefix.ToArray(/*new Term[0]*/));
+            query3.Add(new Term("body", "pizza"));
+
+            result = searcher.Search(query3, null, 1000).ScoreDocs;
+            Assert.AreEqual(2, result.Length); // blueberry pizza, bluebird pizza
+            Assert.AreEqual("body:\"(blueberry bluebird) pizza\"", query3.ToString());
+
+            // test slop:
+            query3.Slop = 1;
+            result = searcher.Search(query3, null, 1000).ScoreDocs;
+
+            // just make sure no exc:
+            searcher.Explain(query3, 0);
+
+            Assert.AreEqual(3, result.Length); // blueberry pizza, bluebird pizza, bluebird
+            // foobar pizza
+
+            MultiPhraseQuery query4 = new MultiPhraseQuery();
+            try
+            {
+                query4.Add(new Term("field1", "foo"));
+                query4.Add(new Term("field2", "foobar"));
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException e)
+#pragma warning restore 168
+            {
+                // okay, all terms must belong to the same field
+            }
+
+            writer.Dispose();
+            reader.Dispose();
+            indexStore.Dispose();
+        }
+
+        // LUCENE-2580
+        [Test]
+        public virtual void TestTall()
+        {
+            Directory indexStore = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), indexStore, Similarity, TimeZone);
+            Add("blueberry chocolate pie", writer);
+            Add("blueberry chocolate tart", writer);
+            IndexReader r = writer.Reader;
+            writer.Dispose();
+
+            IndexSearcher searcher = NewSearcher(r);
+            MultiPhraseQuery q = new MultiPhraseQuery();
+            q.Add(new Term("body", "blueberry"));
+            q.Add(new Term("body", "chocolate"));
+            q.Add(new Term[] { new Term("body", "pie"), new Term("body", "tart") });
+            Assert.AreEqual(2, searcher.Search(q, 1).TotalHits);
+            r.Dispose();
+            indexStore.Dispose();
+        }
+
+        //ORIGINAL LINE: @Ignore public void testMultiSloppyWithRepeats() throws java.io.IOException
+        [Test]
+        [Ignore("This appears to be a known issue")]
+        public virtual void TestMultiSloppyWithRepeats() //LUCENE-3821 fixes sloppy phrase scoring, except for this known problem
+        {
+            Directory indexStore = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), indexStore, Similarity, TimeZone);
+            Add("a b c d e f g h i k", writer);
+            IndexReader r = writer.Reader;
+            writer.Dispose();
+
+            IndexSearcher searcher = NewSearcher(r);
+
+            MultiPhraseQuery q = new MultiPhraseQuery();
+            // this will fail, when the scorer would propagate [a] rather than [a,b],
+            q.Add(new Term[] { new Term("body", "a"), new Term("body", "b") });
+            q.Add(new Term[] { new Term("body", "a") });
+            q.Slop = 6;
+            Assert.AreEqual(1, searcher.Search(q, 1).TotalHits); // should match on "a b"
+
+            r.Dispose();
+            indexStore.Dispose();
+        }
+
+        [Test]
+        public virtual void TestMultiExactWithRepeats()
+        {
+            Directory indexStore = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), indexStore, Similarity, TimeZone);
+            Add("a b c d e f g h i k", writer);
+            IndexReader r = writer.Reader;
+            writer.Dispose();
+
+            IndexSearcher searcher = NewSearcher(r);
+            MultiPhraseQuery q = new MultiPhraseQuery();
+            q.Add(new Term[] { new Term("body", "a"), new Term("body", "d") }, 0);
+            q.Add(new Term[] { new Term("body", "a"), new Term("body", "f") }, 2);
+            Assert.AreEqual(1, searcher.Search(q, 1).TotalHits); // should match on "a b"
+            r.Dispose();
+            indexStore.Dispose();
+        }
+
+        private void Add(string s, RandomIndexWriter writer)
+        {
+            Document doc = new Document();
+            doc.Add(NewTextField("body", s, Field.Store.YES));
+            writer.AddDocument(doc);
+        }
+
+        [Test]
+        public virtual void TestBooleanQueryContainingSingleTermPrefixQuery()
+        {
+            // this tests against bug 33161 (now fixed)
+            // In order to cause the bug, the outer query must have more than one term
+            // and all terms required.
+            // The contained PhraseMultiQuery must contain exactly one term array.
+            Directory indexStore = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), indexStore, Similarity, TimeZone);
+            Add("blueberry pie", writer);
+            Add("blueberry chewing gum", writer);
+            Add("blue raspberry pie", writer);
+
+            IndexReader reader = writer.Reader;
+            IndexSearcher searcher = NewSearcher(reader);
+            // this query will be equivalent to +body:pie +body:"blue*"
+            BooleanQuery q = new BooleanQuery();
+            q.Add(new TermQuery(new Term("body", "pie")), Occur.MUST);
+
+            MultiPhraseQuery trouble = new MultiPhraseQuery();
+            trouble.Add(new Term[] { new Term("body", "blueberry"), new Term("body", "blue") });
+            q.Add(trouble, Occur.MUST);
+
+            // exception will be thrown here without fix
+            ScoreDoc[] hits = searcher.Search(q, null, 1000).ScoreDocs;
+
+            Assert.AreEqual(2, hits.Length, "Wrong number of hits");
+
+            // just make sure no exc:
+            searcher.Explain(q, 0);
+
+            writer.Dispose();
+            reader.Dispose();
+            indexStore.Dispose();
+        }
+
+        [Test]
+        public virtual void TestPhrasePrefixWithBooleanQuery()
+        {
+            Directory indexStore = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), indexStore, Similarity, TimeZone);
+            Add("this is a test", "object", writer);
+            Add("a note", "note", writer);
+
+            IndexReader reader = writer.Reader;
+            IndexSearcher searcher = NewSearcher(reader);
+
+            // this query will be equivalent to +type:note +body:"a t*"
+            BooleanQuery q = new BooleanQuery();
+            q.Add(new TermQuery(new Term("type", "note")), Occur.MUST);
+
+            MultiPhraseQuery trouble = new MultiPhraseQuery();
+            trouble.Add(new Term("body", "a"));
+            trouble.Add(new Term[] { new Term("body", "test"), new Term("body", "this") });
+            q.Add(trouble, Occur.MUST);
+
+            // exception will be thrown here without fix for #35626:
+            ScoreDoc[] hits = searcher.Search(q, null, 1000).ScoreDocs;
+            Assert.AreEqual(0, hits.Length, "Wrong number of hits");
+            writer.Dispose();
+            reader.Dispose();
+            indexStore.Dispose();
+        }
+
+        [Test]
+        public virtual void TestNoDocs()
+        {
+            Directory indexStore = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), indexStore, Similarity, TimeZone);
+            Add("a note", "note", writer);
+
+            IndexReader reader = writer.Reader;
+            IndexSearcher searcher = NewSearcher(reader);
+
+            MultiPhraseQuery q = new MultiPhraseQuery();
+            q.Add(new Term("body", "a"));
+            q.Add(new Term[] { new Term("body", "nope"), new Term("body", "nope") });
+            Assert.AreEqual(0, searcher.Search(q, null, 1).TotalHits, "Wrong number of hits");
+
+            // just make sure no exc:
+            searcher.Explain(q, 0);
+
+            writer.Dispose();
+            reader.Dispose();
+            indexStore.Dispose();
+        }
+
+        [Test]
+        public virtual void TestHashCodeAndEquals()
+        {
+            MultiPhraseQuery query1 = new MultiPhraseQuery();
+            MultiPhraseQuery query2 = new MultiPhraseQuery();
+
+            Assert.AreEqual(query1.GetHashCode(), query2.GetHashCode());
+            Assert.IsTrue(query1.Equals(query2));
+            Assert.AreEqual(query1, query2);
+
+            Term term1 = new Term("someField", "someText");
+
+            query1.Add(term1);
+            query2.Add(term1);
+
+            Assert.AreEqual(query1.GetHashCode(), query2.GetHashCode());
+            Assert.AreEqual(query1, query2);
+
+            Term term2 = new Term("someField", "someMoreText");
+
+            query1.Add(term2);
+
+            Assert.IsFalse(query1.GetHashCode() == query2.GetHashCode());
+            Assert.IsFalse(query1.Equals(query2));
+
+            query2.Add(term2);
+
+            Assert.AreEqual(query1.GetHashCode(), query2.GetHashCode());
+            Assert.AreEqual(query1, query2);
+        }
+
+        private void Add(string s, string type, RandomIndexWriter writer)
+        {
+            Document doc = new Document();
+            doc.Add(NewTextField("body", s, Field.Store.YES));
+            doc.Add(NewStringField("type", type, Field.Store.NO));
+            writer.AddDocument(doc);
+        }
+
+        // LUCENE-2526
+        [Test]
+        public virtual void TestEmptyToString()
+        {
+            (new MultiPhraseQuery()).ToString();
+        }
+
+        [Test]
+        public virtual void TestCustomIDF()
+        {
+            Directory indexStore = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), indexStore, Similarity, TimeZone);
+            Add("this is a test", "object", writer);
+            Add("a note", "note", writer);
+
+            IndexReader reader = writer.Reader;
+            IndexSearcher searcher = NewSearcher(reader);
+            searcher.Similarity = new DefaultSimilarityAnonymousInnerClassHelper(this);
+
+            MultiPhraseQuery query = new MultiPhraseQuery();
+            query.Add(new Term[] { new Term("body", "this"), new Term("body", "that") });
+            query.Add(new Term("body", "is"));
+            Weight weight = query.CreateWeight(searcher);
+            Assert.AreEqual(10f * 10f, weight.GetValueForNormalization(), 0.001f);
+
+            writer.Dispose();
+            reader.Dispose();
+            indexStore.Dispose();
+        }
+
+        private class DefaultSimilarityAnonymousInnerClassHelper : DefaultSimilarity
+        {
+            private readonly TestMultiPhraseQuery OuterInstance;
+
+            public DefaultSimilarityAnonymousInnerClassHelper(TestMultiPhraseQuery outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            public override Explanation IdfExplain(CollectionStatistics collectionStats, TermStatistics[] termStats)
+            {
+                return new Explanation(10f, "just a test");
+            }
+        }
+
+        [Test]
+        public virtual void TestZeroPosIncr()
+        {
+            Directory dir = new RAMDirectory();
+            Token[] tokens = new Token[3];
+            tokens[0] = new Token();
+            tokens[0].Append("a");
+            tokens[0].PositionIncrement = 1;
+            tokens[1] = new Token();
+            tokens[1].Append("b");
+            tokens[1].PositionIncrement = 0;
+            tokens[2] = new Token();
+            tokens[2].Append("c");
+            tokens[2].PositionIncrement = 0;
+
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            Document doc = new Document();
+            doc.Add(new TextField("field", new CannedTokenStream(tokens)));
+            writer.AddDocument(doc);
+            doc = new Document();
+            doc.Add(new TextField("field", new CannedTokenStream(tokens)));
+            writer.AddDocument(doc);
+            IndexReader r = writer.Reader;
+            writer.Dispose();
+            IndexSearcher s = NewSearcher(r);
+            MultiPhraseQuery mpq = new MultiPhraseQuery();
+            //mpq.setSlop(1);
+
+            // NOTE: not great that if we do the else clause here we
+            // get different scores!  MultiPhraseQuery counts that
+            // phrase as occurring twice per doc (it should be 1, I
+            // think?).  this is because MultipleTermPositions is able to
+            // return the same position more than once (0, in this
+            // case):
+            if (true)
+            {
+                mpq.Add(new Term[] { new Term("field", "b"), new Term("field", "c") }, 0);
+                mpq.Add(new Term[] { new Term("field", "a") }, 0);
+            }
+            else
+            {
+#pragma warning disable 162
+                mpq.Add(new Term[] { new Term("field", "a") }, 0);
+                mpq.Add(new Term[] { new Term("field", "b"), new Term("field", "c") }, 0);
+#pragma warning restore 162
+            }
+            TopDocs hits = s.Search(mpq, 2);
+            Assert.AreEqual(2, hits.TotalHits);
+            Assert.AreEqual(hits.ScoreDocs[0].Score, hits.ScoreDocs[1].Score, 1e-5);
+            /*
+            for(int hit=0;hit<hits.TotalHits;hit++) {
+              ScoreDoc sd = hits.ScoreDocs[hit];
+              System.out.println("  hit doc=" + sd.Doc + " score=" + sd.Score);
+            }
+            */
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        private static Token MakeToken(string text, int posIncr)
+        {
+            Token t = new Token();
+            t.Append(text);
+            t.PositionIncrement = posIncr;
+            return t;
+        }
+
+        private static readonly Token[] INCR_0_DOC_TOKENS = new Token[] { MakeToken("x", 1), MakeToken("a", 1), MakeToken("1", 0), MakeToken("m", 1), MakeToken("b", 1), MakeToken("1", 0), MakeToken("n", 1), MakeToken("c", 1), MakeToken("y", 1) };
+
+        private static readonly Token[] INCR_0_QUERY_TOKENS_AND = new Token[] { MakeToken("a", 1), MakeToken("1", 0), MakeToken("b", 1), MakeToken("1", 0), MakeToken("c", 1) };
+
+        private static readonly Token[][] INCR_0_QUERY_TOKENS_AND_OR_MATCH = new Token[][] { new Token[] { MakeToken("a", 1) }, new Token[] { MakeToken("x", 1), MakeToken("1", 0) }, new Token[] { MakeToken("b", 2) }, new Token[] { MakeToken("x", 2), MakeToken("1", 0) }, new Token[] { MakeToken("c", 3) } };
+
+        private static readonly Token[][] INCR_0_QUERY_TOKENS_AND_OR_NO_MATCHN = new Token[][] { new Token[] { MakeToken("x", 1) }, new Token[] { MakeToken("a", 1), MakeToken("1", 0) }, new Token[] { MakeToken("x", 2) }, new Token[] { MakeToken("b", 2), MakeToken("1", 0) }, new Token[] { MakeToken("c", 3) } };
+
+        /// <summary>
+        /// using query parser, MPQ will be created, and will not be strict about having all query terms
+        /// in each position - one of each position is sufficient (OR logic)
+        /// </summary>
+        [Test]
+        public virtual void TestZeroPosIncrSloppyParsedAnd()
+        {
+            MultiPhraseQuery q = new MultiPhraseQuery();
+            q.Add(new Term[] { new Term("field", "a"), new Term("field", "1") }, -1);
+            q.Add(new Term[] { new Term("field", "b"), new Term("field", "1") }, 0);
+            q.Add(new Term[] { new Term("field", "c") }, 1);
+            DoTestZeroPosIncrSloppy(q, 0);
+            q.Slop = 1;
+            DoTestZeroPosIncrSloppy(q, 0);
+            q.Slop = 2;
+            DoTestZeroPosIncrSloppy(q, 1);
+        }
+
+        private void DoTestZeroPosIncrSloppy(Query q, int nExpected)
+        {
+            Directory dir = NewDirectory(); // random dir
+            IndexWriterConfig cfg = NewIndexWriterConfig(TEST_VERSION_CURRENT, null);
+            IndexWriter writer = new IndexWriter(dir, cfg);
+            Document doc = new Document();
+            doc.Add(new TextField("field", new CannedTokenStream(INCR_0_DOC_TOKENS)));
+            writer.AddDocument(doc);
+            IndexReader r = DirectoryReader.Open(writer, false);
+            writer.Dispose();
+            IndexSearcher s = NewSearcher(r);
+
+            if (VERBOSE)
+            {
+                Console.WriteLine("QUERY=" + q);
+            }
+
+            TopDocs hits = s.Search(q, 1);
+            Assert.AreEqual(nExpected, hits.TotalHits, "wrong number of results");
+
+            if (VERBOSE)
+            {
+                for (int hit = 0; hit < hits.TotalHits; hit++)
+                {
+                    ScoreDoc sd = hits.ScoreDocs[hit];
+                    Console.WriteLine("  hit doc=" + sd.Doc + " score=" + sd.Score);
+                }
+            }
+
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        /// <summary>
+        /// PQ AND Mode - Manually creating a phrase query
+        /// </summary>
+        [Test]
+        public virtual void TestZeroPosIncrSloppyPqAnd()
+        {
+            PhraseQuery pq = new PhraseQuery();
+            int pos = -1;
+            foreach (Token tap in INCR_0_QUERY_TOKENS_AND)
+            {
+                pos += tap.PositionIncrement;
+                pq.Add(new Term("field", tap.ToString()), pos);
+            }
+            DoTestZeroPosIncrSloppy(pq, 0);
+            pq.Slop = 1;
+            DoTestZeroPosIncrSloppy(pq, 0);
+            pq.Slop = 2;
+            DoTestZeroPosIncrSloppy(pq, 1);
+        }
+
+        /// <summary>
+        /// MPQ AND Mode - Manually creating a multiple phrase query
+        /// </summary>
+        [Test]
+        public virtual void TestZeroPosIncrSloppyMpqAnd()
+        {
+            MultiPhraseQuery mpq = new MultiPhraseQuery();
+            int pos = -1;
+            foreach (Token tap in INCR_0_QUERY_TOKENS_AND)
+            {
+                pos += tap.PositionIncrement;
+                mpq.Add(new Term[] { new Term("field", tap.ToString()) }, pos); //AND logic
+            }
+            DoTestZeroPosIncrSloppy(mpq, 0);
+            mpq.Slop = 1;
+            DoTestZeroPosIncrSloppy(mpq, 0);
+            mpq.Slop = 2;
+            DoTestZeroPosIncrSloppy(mpq, 1);
+        }
+
+        /// <summary>
+        /// MPQ Combined AND OR Mode - Manually creating a multiple phrase query
+        /// </summary>
+        [Test]
+        public virtual void TestZeroPosIncrSloppyMpqAndOrMatch()
+        {
+            MultiPhraseQuery mpq = new MultiPhraseQuery();
+            foreach (Token[] tap in INCR_0_QUERY_TOKENS_AND_OR_MATCH)
+            {
+                Term[] terms = TapTerms(tap);
+                int pos = tap[0].PositionIncrement - 1;
+                mpq.Add(terms, pos); //AND logic in pos, OR across lines
+            }
+            DoTestZeroPosIncrSloppy(mpq, 0);
+            mpq.Slop = 1;
+            DoTestZeroPosIncrSloppy(mpq, 0);
+            mpq.Slop = 2;
+            DoTestZeroPosIncrSloppy(mpq, 1);
+        }
+
+        /// <summary>
+        /// MPQ Combined AND OR Mode - Manually creating a multiple phrase query - with no match
+        /// </summary>
+        [Test]
+        public virtual void TestZeroPosIncrSloppyMpqAndOrNoMatch()
+        {
+            MultiPhraseQuery mpq = new MultiPhraseQuery();
+            foreach (Token[] tap in INCR_0_QUERY_TOKENS_AND_OR_NO_MATCHN)
+            {
+                Term[] terms = TapTerms(tap);
+                int pos = tap[0].PositionIncrement - 1;
+                mpq.Add(terms, pos); //AND logic in pos, OR across lines
+            }
+            DoTestZeroPosIncrSloppy(mpq, 0);
+            mpq.Slop = 2;
+            DoTestZeroPosIncrSloppy(mpq, 0);
+        }
+
+        private Term[] TapTerms(Token[] tap)
+        {
+            Term[] terms = new Term[tap.Length];
+            for (int i = 0; i < terms.Length; i++)
+            {
+                terms[i] = new Term("field", tap[i].ToString());
+            }
+            return terms;
+        }
+
+        [Test]
+        public virtual void TestNegativeSlop()
+        {
+            MultiPhraseQuery query = new MultiPhraseQuery();
+            query.Add(new Term("field", "two"));
+            query.Add(new Term("field", "one"));
+            try
+            {
+                query.Slop = -2;
+                Assert.Fail("didn't get expected exception");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException expected)
+#pragma warning restore 168
+            {
+                // expected exception
+            }
+        }
+    }
+}
\ No newline at end of file