You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by ni...@apache.org on 2017/02/26 23:37:09 UTC

[21/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestPayloads.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestPayloads.cs b/src/Lucene.Net.Tests/Index/TestPayloads.cs
new file mode 100644
index 0000000..5c106d9
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestPayloads.cs
@@ -0,0 +1,738 @@
+using Lucene.Net.Analysis.TokenAttributes;
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.Text;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    using Lucene.Net.Analysis;
+    using Lucene.Net.Support;
+    using Lucene.Net.Util;
+    using NUnit.Framework;
+    using System.IO;
+    using IBits = Lucene.Net.Util.IBits;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using Document = Documents.Document;
+    using Field = Field;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using PayloadAttribute = Lucene.Net.Analysis.TokenAttributes.PayloadAttribute;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using TextField = TextField;
+
+    [TestFixture]
+    public class TestPayloads : LuceneTestCase
+    {
+        // Simple tests to test the payloads
+        [Test]
+        public virtual void TestPayload()
+        {
+            BytesRef payload = new BytesRef("this is a test!");
+            Assert.AreEqual(payload.Length, "this is a test!".Length, "Wrong payload length.");
+
+            BytesRef clone = (BytesRef)payload.Clone();
+            Assert.AreEqual(payload.Length, clone.Length);
+            for (int i = 0; i < payload.Length; i++)
+            {
+                Assert.AreEqual(payload.Bytes[i + payload.Offset], clone.Bytes[i + clone.Offset]);
+            }
+        }
+
+        // Tests whether the DocumentWriter and SegmentMerger correctly enable the
+        // payload bit in the FieldInfo
+        [Test]
+        public virtual void TestPayloadFieldBit()
+        {
+            Directory ram = NewDirectory();
+            PayloadAnalyzer analyzer = new PayloadAnalyzer();
+            IndexWriter writer = new IndexWriter(ram, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
+            Document d = new Document();
+            // this field won't have any payloads
+            d.Add(NewTextField("f1", "this field has no payloads", Field.Store.NO));
+            // this field will have payloads in all docs, however not for all term positions,
+            // so this field is used to check if the DocumentWriter correctly enables the payloads bit
+            // even if only some term positions have payloads
+            d.Add(NewTextField("f2", "this field has payloads in all docs", Field.Store.NO));
+            d.Add(NewTextField("f2", "this field has payloads in all docs NO PAYLOAD", Field.Store.NO));
+            // this field is used to verify if the SegmentMerger enables payloads for a field if it has payloads
+            // enabled in only some documents
+            d.Add(NewTextField("f3", "this field has payloads in some docs", Field.Store.NO));
+            // only add payload data for field f2
+#pragma warning disable 612, 618
+            analyzer.SetPayloadData("f2", "somedata".GetBytes(IOUtils.CHARSET_UTF_8), 0, 1);
+#pragma warning restore 612, 618
+            writer.AddDocument(d);
+            // flush
+            writer.Dispose();
+
+            SegmentReader reader = GetOnlySegmentReader(DirectoryReader.Open(ram));
+            FieldInfos fi = reader.FieldInfos;
+            Assert.IsFalse(fi.FieldInfo("f1").HasPayloads, "Payload field bit should not be set.");
+            Assert.IsTrue(fi.FieldInfo("f2").HasPayloads, "Payload field bit should be set.");
+            Assert.IsFalse(fi.FieldInfo("f3").HasPayloads, "Payload field bit should not be set.");
+            reader.Dispose();
+
+            // now we add another document which has payloads for field f3 and verify if the SegmentMerger
+            // enabled payloads for that field
+            analyzer = new PayloadAnalyzer(); // Clear payload state for each field
+            writer = new IndexWriter(ram, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetOpenMode(OpenMode.CREATE));
+            d = new Document();
+            d.Add(NewTextField("f1", "this field has no payloads", Field.Store.NO));
+            d.Add(NewTextField("f2", "this field has payloads in all docs", Field.Store.NO));
+            d.Add(NewTextField("f2", "this field has payloads in all docs", Field.Store.NO));
+            d.Add(NewTextField("f3", "this field has payloads in some docs", Field.Store.NO));
+            // add payload data for field f2 and f3
+#pragma warning disable 612, 618
+            analyzer.SetPayloadData("f2", "somedata".GetBytes(IOUtils.CHARSET_UTF_8), 0, 1);
+            analyzer.SetPayloadData("f3", "somedata".GetBytes(IOUtils.CHARSET_UTF_8), 0, 3);
+#pragma warning restore 612, 618
+            writer.AddDocument(d);
+
+            // force merge
+            writer.ForceMerge(1);
+            // flush
+            writer.Dispose();
+
+            reader = GetOnlySegmentReader(DirectoryReader.Open(ram));
+            fi = reader.FieldInfos;
+            Assert.IsFalse(fi.FieldInfo("f1").HasPayloads, "Payload field bit should not be set.");
+            Assert.IsTrue(fi.FieldInfo("f2").HasPayloads, "Payload field bit should be set.");
+            Assert.IsTrue(fi.FieldInfo("f3").HasPayloads, "Payload field bit should be set.");
+            reader.Dispose();
+            ram.Dispose();
+        }
+
+        // Tests if payloads are correctly stored and loaded using both RamDirectory and FSDirectory
+        [Test]
+        public virtual void TestPayloadsEncoding()
+        {
+            Directory dir = NewDirectory();
+            PerformTest(dir);
+            dir.Dispose();
+        }
+
+        // builds an index with payloads in the given Directory and performs
+        // different tests to verify the payload encoding
+        private void PerformTest(Directory dir)
+        {
+            PayloadAnalyzer analyzer = new PayloadAnalyzer();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetOpenMode(OpenMode.CREATE).SetMergePolicy(NewLogMergePolicy()));
+
+            // should be in sync with value in TermInfosWriter
+            const int skipInterval = 16;
+
+            const int numTerms = 5;
+            const string fieldName = "f1";
+
+            int numDocs = skipInterval + 1;
+            // create content for the test documents with just a few terms
+            Term[] terms = GenerateTerms(fieldName, numTerms);
+            StringBuilder sb = new StringBuilder();
+            for (int i = 0; i < terms.Length; i++)
+            {
+                sb.Append(terms[i].Text());
+                sb.Append(" ");
+            }
+            string content = sb.ToString();
+
+            int payloadDataLength = numTerms * numDocs * 2 + numTerms * numDocs * (numDocs - 1) / 2;
+            var payloadData = GenerateRandomData(payloadDataLength);
+
+            Document d = new Document();
+            d.Add(NewTextField(fieldName, content, Field.Store.NO));
+            // add the same document multiple times to have the same payload lengths for all
+            // occurrences within two consecutive skip intervals
+            int offset = 0;
+            for (int i = 0; i < 2 * numDocs; i++)
+            {
+                analyzer = new PayloadAnalyzer(fieldName, payloadData, offset, 1);
+                offset += numTerms;
+                writer.AddDocument(d, analyzer);
+            }
+
+            // make sure we create more than one segment to test merging
+            writer.Commit();
+
+            // now we make sure to have different payload lengths next at the next skip point
+            for (int i = 0; i < numDocs; i++)
+            {
+                analyzer = new PayloadAnalyzer(fieldName, payloadData, offset, i);
+                offset += i * numTerms;
+                writer.AddDocument(d, analyzer);
+            }
+
+            writer.ForceMerge(1);
+            // flush
+            writer.Dispose();
+
+            /*
+             * Verify the index
+             * first we test if all payloads are stored correctly
+             */
+            IndexReader reader = DirectoryReader.Open(dir);
+
+            var verifyPayloadData = new byte[payloadDataLength];
+            offset = 0;
+            var tps = new DocsAndPositionsEnum[numTerms];
+            for (int i = 0; i < numTerms; i++)
+            {
+                tps[i] = MultiFields.GetTermPositionsEnum(reader, MultiFields.GetLiveDocs(reader), terms[i].Field, new BytesRef(terms[i].Text()));
+            }
+
+            while (tps[0].NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
+            {
+                for (int i = 1; i < numTerms; i++)
+                {
+                    tps[i].NextDoc();
+                }
+                int freq = tps[0].Freq;
+
+                for (int i = 0; i < freq; i++)
+                {
+                    for (int j = 0; j < numTerms; j++)
+                    {
+                        tps[j].NextPosition();
+                        BytesRef br = tps[j].GetPayload();
+                        if (br != null)
+                        {
+                            Array.Copy(br.Bytes, br.Offset, verifyPayloadData, offset, br.Length);
+                            offset += br.Length;
+                        }
+                    }
+                }
+            }
+
+            AssertByteArrayEquals(payloadData, verifyPayloadData);
+
+            /*
+             *  test lazy skipping
+             */
+            DocsAndPositionsEnum tp = MultiFields.GetTermPositionsEnum(reader, MultiFields.GetLiveDocs(reader), terms[0].Field, new BytesRef(terms[0].Text()));
+            tp.NextDoc();
+            tp.NextPosition();
+            // NOTE: prior rev of this test was failing to first
+            // call next here:
+            tp.NextDoc();
+            // now we don't read this payload
+            tp.NextPosition();
+            BytesRef payload = tp.GetPayload();
+            Assert.AreEqual(1, payload.Length, "Wrong payload length.");
+            Assert.AreEqual(payload.Bytes[payload.Offset], payloadData[numTerms]);
+            tp.NextDoc();
+            tp.NextPosition();
+
+            // we don't read this payload and skip to a different document
+            tp.Advance(5);
+            tp.NextPosition();
+            payload = tp.GetPayload();
+            Assert.AreEqual(1, payload.Length, "Wrong payload length.");
+            Assert.AreEqual(payload.Bytes[payload.Offset], payloadData[5 * numTerms]);
+
+            /*
+             * Test different lengths at skip points
+             */
+            tp = MultiFields.GetTermPositionsEnum(reader, MultiFields.GetLiveDocs(reader), terms[1].Field, new BytesRef(terms[1].Text()));
+            tp.NextDoc();
+            tp.NextPosition();
+            Assert.AreEqual(1, tp.GetPayload().Length, "Wrong payload length.");
+            tp.Advance(skipInterval - 1);
+            tp.NextPosition();
+            Assert.AreEqual(1, tp.GetPayload().Length, "Wrong payload length.");
+            tp.Advance(2 * skipInterval - 1);
+            tp.NextPosition();
+            Assert.AreEqual(1, tp.GetPayload().Length, "Wrong payload length.");
+            tp.Advance(3 * skipInterval - 1);
+            tp.NextPosition();
+            Assert.AreEqual(3 * skipInterval - 2 * numDocs - 1, tp.GetPayload().Length, "Wrong payload length.");
+
+            reader.Dispose();
+
+            // test long payload
+            analyzer = new PayloadAnalyzer();
+            writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetOpenMode(OpenMode.CREATE));
+            string singleTerm = "lucene";
+
+            d = new Document();
+            d.Add(NewTextField(fieldName, singleTerm, Field.Store.NO));
+            // add a payload whose length is greater than the buffer size of BufferedIndexOutput
+            payloadData = GenerateRandomData(2000);
+            analyzer.SetPayloadData(fieldName, payloadData, 100, 1500);
+            writer.AddDocument(d);
+
+            writer.ForceMerge(1);
+            // flush
+            writer.Dispose();
+
+            reader = DirectoryReader.Open(dir);
+            tp = MultiFields.GetTermPositionsEnum(reader, MultiFields.GetLiveDocs(reader), fieldName, new BytesRef(singleTerm));
+            tp.NextDoc();
+            tp.NextPosition();
+
+            BytesRef bref = tp.GetPayload();
+            verifyPayloadData = new byte[bref.Length];
+            var portion = new byte[1500];
+            Array.Copy(payloadData, 100, portion, 0, 1500);
+
+            AssertByteArrayEquals(portion, bref.Bytes, bref.Offset, bref.Length);
+            reader.Dispose();
+        }
+
+#pragma warning disable 612, 618
+        internal static readonly Encoding Utf8 = IOUtils.CHARSET_UTF_8;
+#pragma warning restore 612, 618
+
+        private void GenerateRandomData(byte[] data)
+        {
+            // this test needs the random data to be valid unicode
+            string s = TestUtil.RandomFixedByteLengthUnicodeString(Random(), data.Length);
+            var b = s.GetBytes(Utf8);
+            Debug.Assert(b.Length == data.Length);
+            System.Buffer.BlockCopy(b, 0, data, 0, b.Length);
+        }
+
+        private byte[] GenerateRandomData(int n)
+        {
+            var data = new byte[n];
+            GenerateRandomData(data);
+            return data;
+        }
+
+        private Term[] GenerateTerms(string fieldName, int n)
+        {
+            int maxDigits = (int)(Math.Log(n) / Math.Log(10));
+            Term[] terms = new Term[n];
+            StringBuilder sb = new StringBuilder();
+            for (int i = 0; i < n; i++)
+            {
+                sb.Length = 0;
+                sb.Append("t");
+                int zeros = maxDigits - (int)(Math.Log(i) / Math.Log(10));
+                for (int j = 0; j < zeros; j++)
+                {
+                    sb.Append("0");
+                }
+                sb.Append(i);
+                terms[i] = new Term(fieldName, sb.ToString());
+            }
+            return terms;
+        }
+
+        internal virtual void AssertByteArrayEquals(byte[] b1, byte[] b2)
+        {
+            if (b1.Length != b2.Length)
+            {
+                Assert.Fail("Byte arrays have different lengths: " + b1.Length + ", " + b2.Length);
+            }
+
+            for (int i = 0; i < b1.Length; i++)
+            {
+                if (b1[i] != b2[i])
+                {
+                    Assert.Fail("Byte arrays different at index " + i + ": " + b1[i] + ", " + b2[i]);
+                }
+            }
+        }
+
+        internal virtual void AssertByteArrayEquals(byte[] b1, byte[] b2, int b2offset, int b2length)
+        {
+            if (b1.Length != b2length)
+            {
+                Assert.Fail("Byte arrays have different lengths: " + b1.Length + ", " + b2length);
+            }
+
+            for (int i = 0; i < b1.Length; i++)
+            {
+                if (b1[i] != b2[b2offset + i])
+                {
+                    Assert.Fail("Byte arrays different at index " + i + ": " + b1[i] + ", " + b2[b2offset + i]);
+                }
+            }
+        }
+
+        /// <summary>
+        /// this Analyzer uses an WhitespaceTokenizer and PayloadFilter.
+        /// </summary>
+        private class PayloadAnalyzer : Analyzer
+        {
+            internal readonly IDictionary<string, PayloadData> FieldToData = new Dictionary<string, PayloadData>();
+
+            public PayloadAnalyzer()
+                : base(PER_FIELD_REUSE_STRATEGY)
+            {
+            }
+
+            public PayloadAnalyzer(string field, byte[] data, int offset, int length)
+                : base(PER_FIELD_REUSE_STRATEGY)
+            {
+                SetPayloadData(field, data, offset, length);
+            }
+
+            internal virtual void SetPayloadData(string field, byte[] data, int offset, int length)
+            {
+                FieldToData[field] = new PayloadData(data, offset, length);
+            }
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                PayloadData payload;
+                FieldToData.TryGetValue(fieldName, out payload);
+                Tokenizer ts = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
+                TokenStream tokenStream = (payload != null) ? (TokenStream)new PayloadFilter(ts, payload.Data, payload.Offset, payload.Length) : ts;
+                return new TokenStreamComponents(ts, tokenStream);
+            }
+
+            internal class PayloadData
+            {
+                internal byte[] Data;
+                internal int Offset;
+                internal int Length;
+
+                internal PayloadData(byte[] data, int offset, int length)
+                {
+                    this.Data = data;
+                    this.Offset = offset;
+                    this.Length = length;
+                }
+            }
+        }
+
+        /// <summary>
+        /// this Filter adds payloads to the tokens.
+        /// </summary>
+        private class PayloadFilter : TokenFilter
+        {
+            internal byte[] Data;
+            internal int Length;
+            internal int Offset;
+            internal int StartOffset;
+            internal IPayloadAttribute PayloadAtt;
+            internal ICharTermAttribute TermAttribute;
+
+            public PayloadFilter(TokenStream @in, byte[] data, int offset, int length)
+                : base(@in)
+            {
+                this.Data = data;
+                this.Length = length;
+                this.Offset = offset;
+                this.StartOffset = offset;
+                PayloadAtt = AddAttribute<IPayloadAttribute>();
+                TermAttribute = AddAttribute<ICharTermAttribute>();
+            }
+
+            public sealed override bool IncrementToken()
+            {
+                bool hasNext = m_input.IncrementToken();
+                if (!hasNext)
+                {
+                    return false;
+                }
+
+                // Some values of the same field are to have payloads and others not
+                if (Offset + Length <= Data.Length && !TermAttribute.ToString().EndsWith("NO PAYLOAD"))
+                {
+                    BytesRef p = new BytesRef(Data, Offset, Length);
+                    PayloadAtt.Payload = p;
+                    Offset += Length;
+                }
+                else
+                {
+                    PayloadAtt.Payload = null;
+                }
+
+                return true;
+            }
+
+            public override void Reset()
+            {
+                base.Reset();
+                this.Offset = StartOffset;
+            }
+        }
+
+        [Test]
+        public virtual void TestThreadSafety()
+        {
+            const int numThreads = 5;
+            int numDocs = AtLeast(50);
+            ByteArrayPool pool = new ByteArrayPool(numThreads, 5);
+
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            const string field = "test";
+
+            ThreadClass[] ingesters = new ThreadClass[numThreads];
+            for (int i = 0; i < numThreads; i++)
+            {
+                ingesters[i] = new ThreadAnonymousInnerClassHelper(this, numDocs, pool, writer, field);
+                ingesters[i].Start();
+            }
+
+            for (int i = 0; i < numThreads; i++)
+            {
+                ingesters[i].Join();
+            }
+            writer.Dispose();
+            IndexReader reader = DirectoryReader.Open(dir);
+            TermsEnum terms = MultiFields.GetFields(reader).GetTerms(field).GetIterator(null);
+            IBits liveDocs = MultiFields.GetLiveDocs(reader);
+            DocsAndPositionsEnum tp = null;
+            while (terms.Next() != null)
+            {
+                string termText = terms.Term.Utf8ToString();
+                tp = terms.DocsAndPositions(liveDocs, tp);
+                while (tp.NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
+                {
+                    int freq = tp.Freq;
+                    for (int i = 0; i < freq; i++)
+                    {
+                        tp.NextPosition();
+                        BytesRef payload = tp.GetPayload();
+                        Assert.AreEqual(termText, payload.Utf8ToString());
+                    }
+                }
+            }
+            reader.Dispose();
+            dir.Dispose();
+            Assert.AreEqual(pool.Count, numThreads);
+        }
+
+        private class ThreadAnonymousInnerClassHelper : ThreadClass
+        {
+            private readonly TestPayloads OuterInstance;
+
+            private int NumDocs;
+            private Lucene.Net.Index.TestPayloads.ByteArrayPool Pool;
+            private IndexWriter Writer;
+            private string Field;
+
+            public ThreadAnonymousInnerClassHelper(TestPayloads outerInstance, int numDocs, Lucene.Net.Index.TestPayloads.ByteArrayPool pool, IndexWriter writer, string field)
+            {
+                this.OuterInstance = outerInstance;
+                this.NumDocs = numDocs;
+                this.Pool = pool;
+                this.Writer = writer;
+                this.Field = field;
+            }
+
+            public override void Run()
+            {
+                try
+                {
+                    for (int j = 0; j < NumDocs; j++)
+                    {
+                        Document d = new Document();
+                        d.Add(new TextField(Field, new PoolingPayloadTokenStream(OuterInstance, Pool)));
+                        Writer.AddDocument(d);
+                    }
+                }
+                catch (Exception e)
+                {
+                    Console.WriteLine(e.ToString());
+                    Console.Write(e.StackTrace);
+                    Assert.Fail(e.ToString());
+                }
+            }
+        }
+
+        private class PoolingPayloadTokenStream : TokenStream
+        {
+            private readonly TestPayloads OuterInstance;
+
+            private byte[] Payload;
+            internal bool First;
+            internal ByteArrayPool Pool;
+            internal string Term;
+
+            internal ICharTermAttribute TermAtt;
+            internal IPayloadAttribute PayloadAtt;
+
+            internal PoolingPayloadTokenStream(TestPayloads outerInstance, ByteArrayPool pool)
+            {
+                this.OuterInstance = outerInstance;
+                this.Pool = pool;
+                Payload = pool.Get();
+                OuterInstance.GenerateRandomData(Payload);
+                Term = Encoding.UTF8.GetString((byte[])(Array)Payload);
+                First = true;
+                PayloadAtt = AddAttribute<IPayloadAttribute>();
+                TermAtt = AddAttribute<ICharTermAttribute>();
+            }
+
+            public sealed override bool IncrementToken()
+            {
+                if (!First)
+                {
+                    return false;
+                }
+                First = false;
+                ClearAttributes();
+                TermAtt.Append(Term);
+                PayloadAtt.Payload = new BytesRef(Payload);
+                return true;
+            }
+
+            public override void Dispose()
+            {
+                Pool.Release(Payload);
+            }
+        }
+
+        private class ByteArrayPool
+        {
+            internal readonly IList<byte[]> Pool;
+
+            internal ByteArrayPool(int capacity, int size)
+            {
+                Pool = new List<byte[]>();
+                for (int i = 0; i < capacity; i++)
+                {
+                    Pool.Add(new byte[size]);
+                }
+            }
+
+            internal virtual byte[] Get()
+            {
+                lock (this) // TODO use BlockingCollection / BCL datastructures instead
+                {
+                    var retArray = Pool[0];
+                    Pool.RemoveAt(0);
+                    return retArray;
+                }
+            }
+
+            internal virtual void Release(byte[] b)
+            {
+                lock (this)
+                {
+                    Pool.Add(b);
+                }
+            }
+
+            internal virtual int Count
+            {
+                get
+                {
+                    lock (this)
+                    {
+                        return Pool.Count;
+                    }
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestAcrossFields()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, true), Similarity, TimeZone);
+            Document doc = new Document();
+            doc.Add(new TextField("hasMaybepayload", "here we go", Field.Store.YES));
+            writer.AddDocument(doc);
+            writer.Dispose();
+
+            writer = new RandomIndexWriter(Random(), dir, new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, true), Similarity, TimeZone);
+            doc = new Document();
+            doc.Add(new TextField("hasMaybepayload2", "here we go", Field.Store.YES));
+            writer.AddDocument(doc);
+            writer.AddDocument(doc);
+            writer.ForceMerge(1);
+            writer.Dispose();
+
+            dir.Dispose();
+        }
+
+        /// <summary>
+        /// some docs have payload att, some not </summary>
+        [Test]
+        public virtual void TestMixupDocs()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, null);
+            iwc.SetMergePolicy(NewLogMergePolicy());
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, iwc);
+            Document doc = new Document();
+            Field field = new TextField("field", "", Field.Store.NO);
+            TokenStream ts = new MockTokenizer(new StringReader("here we go"), MockTokenizer.WHITESPACE, true);
+            Assert.IsFalse(ts.HasAttribute<PayloadAttribute>());
+            field.SetTokenStream(ts);
+            doc.Add(field);
+            writer.AddDocument(doc);
+            Token withPayload = new Token("withPayload", 0, 11);
+            withPayload.Payload = new BytesRef("test");
+            ts = new CannedTokenStream(withPayload);
+            Assert.IsTrue(ts.HasAttribute<IPayloadAttribute>());
+            field.SetTokenStream(ts);
+            writer.AddDocument(doc);
+            ts = new MockTokenizer(new StringReader("another"), MockTokenizer.WHITESPACE, true);
+            Assert.IsFalse(ts.HasAttribute<PayloadAttribute>());
+            field.SetTokenStream(ts);
+            writer.AddDocument(doc);
+            DirectoryReader reader = writer.Reader;
+            AtomicReader sr = SlowCompositeReaderWrapper.Wrap(reader);
+            DocsAndPositionsEnum de = sr.TermPositionsEnum(new Term("field", "withPayload"));
+            de.NextDoc();
+            de.NextPosition();
+            Assert.AreEqual(new BytesRef("test"), de.GetPayload());
+            writer.Dispose();
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        /// <summary>
+        /// some field instances have payload att, some not </summary>
+        [Test]
+        public virtual void TestMixupMultiValued()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            Document doc = new Document();
+            Field field = new TextField("field", "", Field.Store.NO);
+            TokenStream ts = new MockTokenizer(new StringReader("here we go"), MockTokenizer.WHITESPACE, true);
+            Assert.IsFalse(ts.HasAttribute<PayloadAttribute>());
+            field.SetTokenStream(ts);
+            doc.Add(field);
+            Field field2 = new TextField("field", "", Field.Store.NO);
+            Token withPayload = new Token("withPayload", 0, 11);
+            withPayload.Payload = new BytesRef("test");
+            ts = new CannedTokenStream(withPayload);
+            Assert.IsTrue(ts.HasAttribute<IPayloadAttribute>());
+            field2.SetTokenStream(ts);
+            doc.Add(field2);
+            Field field3 = new TextField("field", "", Field.Store.NO);
+            ts = new MockTokenizer(new StringReader("nopayload"), MockTokenizer.WHITESPACE, true);
+            Assert.IsFalse(ts.HasAttribute<PayloadAttribute>());
+            field3.SetTokenStream(ts);
+            doc.Add(field3);
+            writer.AddDocument(doc);
+            DirectoryReader reader = writer.Reader;
+            SegmentReader sr = GetOnlySegmentReader(reader);
+            DocsAndPositionsEnum de = sr.TermPositionsEnum(new Term("field", "withPayload"));
+            de.NextDoc();
+            de.NextPosition();
+            Assert.AreEqual(new BytesRef("test"), de.GetPayload());
+            writer.Dispose();
+            reader.Dispose();
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestPayloadsOnVectors.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestPayloadsOnVectors.cs b/src/Lucene.Net.Tests/Index/TestPayloadsOnVectors.cs
new file mode 100644
index 0000000..7e26232
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestPayloadsOnVectors.cs
@@ -0,0 +1,165 @@
+using Lucene.Net.Analysis.TokenAttributes;
+using System.Diagnostics;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Randomized.Generators;
+    using NUnit.Framework;
+    using System.IO;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using CannedTokenStream = Lucene.Net.Analysis.CannedTokenStream;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldType = FieldType;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockTokenizer = Lucene.Net.Analysis.MockTokenizer;
+    using TextField = TextField;
+    using Token = Lucene.Net.Analysis.Token;
+    using TokenStream = Lucene.Net.Analysis.TokenStream;
+
+    [SuppressCodecs("Lucene3x")]
+    [TestFixture]
+    public class TestPayloadsOnVectors : LuceneTestCase
+    {
+        /// <summary>
+        /// some docs have payload att, some not </summary>
+        [Test]
+        public virtual void TestMixupDocs()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            iwc.SetMergePolicy(NewLogMergePolicy());
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, iwc);
+            Document doc = new Document();
+            FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
+            customType.StoreTermVectors = true;
+            customType.StoreTermVectorPositions = true;
+            customType.StoreTermVectorPayloads = true;
+            customType.StoreTermVectorOffsets = Random().NextBoolean();
+            Field field = new Field("field", "", customType);
+            TokenStream ts = new MockTokenizer(new StringReader("here we go"), MockTokenizer.WHITESPACE, true);
+            Assert.IsFalse(ts.HasAttribute<IPayloadAttribute>());
+            field.SetTokenStream(ts);
+            doc.Add(field);
+            writer.AddDocument(doc);
+
+            Token withPayload = new Token("withPayload", 0, 11);
+            withPayload.Payload = new BytesRef("test");
+            ts = new CannedTokenStream(withPayload);
+            Assert.IsTrue(ts.HasAttribute<IPayloadAttribute>());
+            field.SetTokenStream(ts);
+            writer.AddDocument(doc);
+
+            ts = new MockTokenizer(new StringReader("another"), MockTokenizer.WHITESPACE, true);
+            Assert.IsFalse(ts.HasAttribute<IPayloadAttribute>());
+            field.SetTokenStream(ts);
+            writer.AddDocument(doc);
+
+            DirectoryReader reader = writer.Reader;
+            Terms terms = reader.GetTermVector(1, "field");
+            Debug.Assert(terms != null);
+            TermsEnum termsEnum = terms.GetIterator(null);
+            Assert.IsTrue(termsEnum.SeekExact(new BytesRef("withPayload")));
+            DocsAndPositionsEnum de = termsEnum.DocsAndPositions(null, null);
+            Assert.AreEqual(0, de.NextDoc());
+            Assert.AreEqual(0, de.NextPosition());
+            Assert.AreEqual(new BytesRef("test"), de.GetPayload());
+            writer.Dispose();
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        /// <summary>
+        /// some field instances have payload att, some not </summary>
+        [Test]
+        public virtual void TestMixupMultiValued()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            Document doc = new Document();
+            FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
+            customType.StoreTermVectors = true;
+            customType.StoreTermVectorPositions = true;
+            customType.StoreTermVectorPayloads = true;
+            customType.StoreTermVectorOffsets = Random().NextBoolean();
+            Field field = new Field("field", "", customType);
+            TokenStream ts = new MockTokenizer(new StringReader("here we go"), MockTokenizer.WHITESPACE, true);
+            Assert.IsFalse(ts.HasAttribute<IPayloadAttribute>());
+            field.SetTokenStream(ts);
+            doc.Add(field);
+            Field field2 = new Field("field", "", customType);
+            Token withPayload = new Token("withPayload", 0, 11);
+            withPayload.Payload = new BytesRef("test");
+            ts = new CannedTokenStream(withPayload);
+            Assert.IsTrue(ts.HasAttribute<IPayloadAttribute>());
+            field2.SetTokenStream(ts);
+            doc.Add(field2);
+            Field field3 = new Field("field", "", customType);
+            ts = new MockTokenizer(new StringReader("nopayload"), MockTokenizer.WHITESPACE, true);
+            Assert.IsFalse(ts.HasAttribute<IPayloadAttribute>());
+            field3.SetTokenStream(ts);
+            doc.Add(field3);
+            writer.AddDocument(doc);
+            DirectoryReader reader = writer.Reader;
+            Terms terms = reader.GetTermVector(0, "field");
+            Debug.Assert(terms != null);
+            TermsEnum termsEnum = terms.GetIterator(null);
+            Assert.IsTrue(termsEnum.SeekExact(new BytesRef("withPayload")));
+            DocsAndPositionsEnum de = termsEnum.DocsAndPositions(null, null);
+            Assert.AreEqual(0, de.NextDoc());
+            Assert.AreEqual(3, de.NextPosition());
+            Assert.AreEqual(new BytesRef("test"), de.GetPayload());
+            writer.Dispose();
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestPayloadsWithoutPositions()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            Document doc = new Document();
+            FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
+            customType.StoreTermVectors = true;
+            customType.StoreTermVectorPositions = false;
+            customType.StoreTermVectorPayloads = true;
+            customType.StoreTermVectorOffsets = Random().NextBoolean();
+            doc.Add(new Field("field", "foo", customType));
+            try
+            {
+                writer.AddDocument(doc);
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException expected)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            writer.Dispose();
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestPerSegmentDeletes.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestPerSegmentDeletes.cs b/src/Lucene.Net.Tests/Index/TestPerSegmentDeletes.cs
new file mode 100644
index 0000000..ce7e767
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestPerSegmentDeletes.cs
@@ -0,0 +1,318 @@
+using System;
+using System.Collections.Generic;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using ArrayUtil = Lucene.Net.Util.ArrayUtil;
+    using IBits = Lucene.Net.Util.IBits;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+    using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    [TestFixture]
+    public class TestPerSegmentDeletes : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestDeletes1()
+        {
+            //IndexWriter.debug2 = System.out;
+            Directory dir = new MockDirectoryWrapper(new Random(Random().Next()), new RAMDirectory());
+            IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            iwc.SetMergeScheduler(new SerialMergeScheduler());
+            iwc.SetMaxBufferedDocs(5000);
+            iwc.SetRAMBufferSizeMB(100);
+            RangeMergePolicy fsmp = new RangeMergePolicy(this, false);
+            iwc.SetMergePolicy(fsmp);
+            IndexWriter writer = new IndexWriter(dir, iwc);
+            for (int x = 0; x < 5; x++)
+            {
+                writer.AddDocument(DocHelper.CreateDocument(x, "1", 2));
+                //System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs());
+            }
+            //System.out.println("commit1");
+            writer.Commit();
+            Assert.AreEqual(1, writer.SegmentCount);
+            for (int x = 5; x < 10; x++)
+            {
+                writer.AddDocument(DocHelper.CreateDocument(x, "2", 2));
+                //System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs());
+            }
+            //System.out.println("commit2");
+            writer.Commit();
+            Assert.AreEqual(2, writer.SegmentCount);
+
+            for (int x = 10; x < 15; x++)
+            {
+                writer.AddDocument(DocHelper.CreateDocument(x, "3", 2));
+                //System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs());
+            }
+
+            writer.DeleteDocuments(new Term("id", "1"));
+
+            writer.DeleteDocuments(new Term("id", "11"));
+
+            // flushing without applying deletes means
+            // there will still be deletes in the segment infos
+            writer.Flush(false, false);
+            Assert.IsTrue(writer.bufferedUpdatesStream.Any());
+
+            // get reader flushes pending deletes
+            // so there should not be anymore
+            IndexReader r1 = writer.Reader;
+            Assert.IsFalse(writer.bufferedUpdatesStream.Any());
+            r1.Dispose();
+
+            // delete id:2 from the first segment
+            // merge segments 0 and 1
+            // which should apply the delete id:2
+            writer.DeleteDocuments(new Term("id", "2"));
+            writer.Flush(false, false);
+            fsmp = (RangeMergePolicy)writer.Config.MergePolicy;
+            fsmp.DoMerge = true;
+            fsmp.Start = 0;
+            fsmp.Length = 2;
+            writer.MaybeMerge();
+
+            Assert.AreEqual(2, writer.SegmentCount);
+
+            // id:2 shouldn't exist anymore because
+            // it's been applied in the merge and now it's gone
+            IndexReader r2 = writer.Reader;
+            int[] id2docs = ToDocsArray(new Term("id", "2"), null, r2);
+            Assert.IsTrue(id2docs == null);
+            r2.Dispose();
+
+            /*
+            /// // added docs are in the ram buffer
+            /// for (int x = 15; x < 20; x++) {
+            ///  writer.AddDocument(TestIndexWriterReader.CreateDocument(x, "4", 2));
+            ///  System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs());
+            /// }
+            /// Assert.IsTrue(writer.numRamDocs() > 0);
+            /// // delete from the ram buffer
+            /// writer.DeleteDocuments(new Term("id", Integer.toString(13)));
+            ///
+            /// Term id3 = new Term("id", Integer.toString(3));
+            ///
+            /// // delete from the 1st segment
+            /// writer.DeleteDocuments(id3);
+            ///
+            /// Assert.IsTrue(writer.numRamDocs() > 0);
+            ///
+            /// //System.out
+            /// //    .println("segdels1:" + writer.docWriter.deletesToString());
+            ///
+            /// //Assert.IsTrue(writer.docWriter.segmentDeletes.Size() > 0);
+            ///
+            /// // we cause a merge to happen
+            /// fsmp.doMerge = true;
+            /// fsmp.start = 0;
+            /// fsmp.Length = 2;
+            /// System.out.println("maybeMerge "+writer.SegmentInfos);
+            ///
+            /// SegmentInfo info0 = writer.SegmentInfos.Info(0);
+            /// SegmentInfo info1 = writer.SegmentInfos.Info(1);
+            ///
+            /// writer.MaybeMerge();
+            /// System.out.println("maybeMerge after "+writer.SegmentInfos);
+            /// // there should be docs in RAM
+            /// Assert.IsTrue(writer.numRamDocs() > 0);
+            ///
+            /// // assert we've merged the 1 and 2 segments
+            /// // and still have a segment leftover == 2
+            /// Assert.AreEqual(2, writer.SegmentInfos.Size());
+            /// Assert.IsFalse(segThere(info0, writer.SegmentInfos));
+            /// Assert.IsFalse(segThere(info1, writer.SegmentInfos));
+            ///
+            /// //System.out.println("segdels2:" + writer.docWriter.deletesToString());
+            ///
+            /// //Assert.IsTrue(writer.docWriter.segmentDeletes.Size() > 0);
+            ///
+            /// IndexReader r = writer.GetReader();
+            /// IndexReader r1 = r.getSequentialSubReaders()[0];
+            /// printDelDocs(r1.GetLiveDocs());
+            /// int[] docs = toDocsArray(id3, null, r);
+            /// System.out.println("id3 docs:"+Arrays.toString(docs));
+            /// // there shouldn't be any docs for id:3
+            /// Assert.IsTrue(docs == null);
+            /// r.Dispose();
+            ///
+            /// part2(writer, fsmp);
+            ///
+            */
+            // System.out.println("segdels2:"+writer.docWriter.segmentDeletes.toString());
+            //System.out.println("close");
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        /// <summary>
+        /// static boolean hasPendingDeletes(SegmentInfos infos) {
+        ///  for (SegmentInfo info : infos) {
+        ///    if (info.deletes.Any()) {
+        ///      return true;
+        ///    }
+        ///  }
+        ///  return false;
+        /// }
+        ///
+        /// </summary>
+        internal virtual void Part2(IndexWriter writer, RangeMergePolicy fsmp)
+        {
+            for (int x = 20; x < 25; x++)
+            {
+                writer.AddDocument(DocHelper.CreateDocument(x, "5", 2));
+                //System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs());
+            }
+            writer.Flush(false, false);
+            for (int x = 25; x < 30; x++)
+            {
+                writer.AddDocument(DocHelper.CreateDocument(x, "5", 2));
+                //System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs());
+            }
+            writer.Flush(false, false);
+
+            //System.out.println("infos3:"+writer.SegmentInfos);
+
+            Term delterm = new Term("id", "8");
+            writer.DeleteDocuments(delterm);
+            //System.out.println("segdels3:" + writer.docWriter.deletesToString());
+
+            fsmp.DoMerge = true;
+            fsmp.Start = 1;
+            fsmp.Length = 2;
+            writer.MaybeMerge();
+
+            // deletes for info1, the newly created segment from the
+            // merge should have no deletes because they were applied in
+            // the merge
+            //SegmentInfo info1 = writer.SegmentInfos.Info(1);
+            //Assert.IsFalse(exists(info1, writer.docWriter.segmentDeletes));
+
+            //System.out.println("infos4:"+writer.SegmentInfos);
+            //System.out.println("segdels4:" + writer.docWriter.deletesToString());
+        }
+
+        internal virtual bool SegThere(SegmentCommitInfo info, SegmentInfos infos)
+        {
+            foreach (SegmentCommitInfo si in infos.Segments)
+            {
+                if (si.Info.Name.Equals(info.Info.Name))
+                {
+                    return true;
+                }
+            }
+            return false;
+        }
+
+        public static void PrintDelDocs(IBits bits)
+        {
+            if (bits == null)
+            {
+                return;
+            }
+            for (int x = 0; x < bits.Length; x++)
+            {
+                Console.WriteLine(x + ":" + bits.Get(x));
+            }
+        }
+
+        public virtual int[] ToDocsArray(Term term, IBits bits, IndexReader reader)
+        {
+            Fields fields = MultiFields.GetFields(reader);
+            Terms cterms = fields.GetTerms(term.Field);
+            TermsEnum ctermsEnum = cterms.GetIterator(null);
+            if (ctermsEnum.SeekExact(new BytesRef(term.Text())))
+            {
+                DocsEnum docsEnum = TestUtil.Docs(Random(), ctermsEnum, bits, null, DocsEnum.FLAG_NONE);
+                return ToArray(docsEnum);
+            }
+            return null;
+        }
+
+        public static int[] ToArray(DocsEnum docsEnum)
+        {
+            IList<int?> docs = new List<int?>();
+            while (docsEnum.NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
+            {
+                int docID = docsEnum.DocID;
+                docs.Add(docID);
+            }
+            return ArrayUtil.ToInt32Array(docs);
+        }
+
+        public class RangeMergePolicy : MergePolicy
+        {
+            private readonly TestPerSegmentDeletes OuterInstance;
+
+            internal bool DoMerge = false;
+            internal int Start;
+            internal int Length;
+
+            internal readonly bool UseCompoundFile_Renamed;
+
+            internal RangeMergePolicy(TestPerSegmentDeletes outerInstance, bool useCompoundFile)
+            {
+                this.OuterInstance = outerInstance;
+                this.UseCompoundFile_Renamed = useCompoundFile;
+            }
+
+            public override void Dispose()
+            {
+            }
+
+            public override MergeSpecification FindMerges(MergeTrigger? mergeTrigger, SegmentInfos segmentInfos)
+            {
+                MergeSpecification ms = new MergeSpecification();
+                if (DoMerge)
+                {
+                    OneMerge om = new OneMerge(segmentInfos.AsList().SubList(Start, Start + Length));
+                    ms.Add(om);
+                    DoMerge = false;
+                    return ms;
+                }
+                return null;
+            }
+
+            public override MergeSpecification FindForcedMerges(SegmentInfos segmentInfos, int maxSegmentCount, IDictionary<SegmentCommitInfo, bool?> segmentsToMerge)
+            {
+                return null;
+            }
+
+            public override MergeSpecification FindForcedDeletesMerges(SegmentInfos segmentInfos)
+            {
+                return null;
+            }
+
+            public override bool UseCompoundFile(SegmentInfos segments, SegmentCommitInfo newSegment)
+            {
+                return UseCompoundFile_Renamed;
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestPersistentSnapshotDeletionPolicy.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestPersistentSnapshotDeletionPolicy.cs b/src/Lucene.Net.Tests/Index/TestPersistentSnapshotDeletionPolicy.cs
new file mode 100644
index 0000000..84c7a59
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestPersistentSnapshotDeletionPolicy.cs
@@ -0,0 +1,260 @@
+using System.Diagnostics;
+
+namespace Lucene.Net.Index
+{
+    using NUnit.Framework;
+    using System;
+    using System.IO;
+    using Util;
+    using Directory = Lucene.Net.Store.Directory;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements. See the NOTICE file distributed with this
+         * work for additional information regarding copyright ownership. The ASF
+         * licenses this file to You under the Apache License, Version 2.0 (the
+         * "License"); you may not use this file except in compliance with the License.
+         * You may obtain a copy of the License at
+         *
+         * http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+         * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+         * License for the specific language governing permissions and limitations under
+         * the License.
+         */
+
+    using Document = Documents.Document;
+    using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
+
+    [TestFixture]
+    public class TestPersistentSnapshotDeletionPolicy : TestSnapshotDeletionPolicy
+    {
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            base.TearDown();
+        }
+
+        private SnapshotDeletionPolicy GetDeletionPolicy(Directory dir)
+        {
+            return new PersistentSnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy(), dir, OpenMode.CREATE);
+        }
+
+        [Test]
+        public virtual void TestExistingSnapshots()
+        {
+            int numSnapshots = 3;
+            MockDirectoryWrapper dir = NewMockDirectory();
+            IndexWriter writer = new IndexWriter(dir, GetConfig(Random(), GetDeletionPolicy(dir)));
+            PersistentSnapshotDeletionPolicy psdp = (PersistentSnapshotDeletionPolicy)writer.Config.IndexDeletionPolicy;
+            Assert.IsNull(psdp.LastSaveFile);
+            PrepareIndexAndSnapshots(psdp, writer, numSnapshots);
+            Assert.IsNotNull(psdp.LastSaveFile);
+            writer.Dispose();
+
+            // Make sure only 1 save file exists:
+            int count = 0;
+            foreach (string file in dir.ListAll())
+            {
+                if (file.StartsWith(PersistentSnapshotDeletionPolicy.SNAPSHOTS_PREFIX))
+                {
+                    count++;
+                }
+            }
+            Assert.AreEqual(1, count);
+
+            // Make sure we fsync:
+            dir.Crash();
+            dir.ClearCrash();
+
+            // Re-initialize and verify snapshots were persisted
+            psdp = new PersistentSnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy(), dir, OpenMode.APPEND);
+
+            writer = new IndexWriter(dir, GetConfig(Random(), psdp));
+            psdp = (PersistentSnapshotDeletionPolicy)writer.Config.IndexDeletionPolicy;
+
+            Assert.AreEqual(numSnapshots, psdp.GetSnapshots().Count);
+            Assert.AreEqual(numSnapshots, psdp.SnapshotCount);
+            AssertSnapshotExists(dir, psdp, numSnapshots, false);
+
+            writer.AddDocument(new Document());
+            writer.Commit();
+            Snapshots.Add(psdp.Snapshot());
+            Assert.AreEqual(numSnapshots + 1, psdp.GetSnapshots().Count);
+            Assert.AreEqual(numSnapshots + 1, psdp.SnapshotCount);
+            AssertSnapshotExists(dir, psdp, numSnapshots + 1, false);
+
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestNoSnapshotInfos()
+        {
+            Directory dir = NewDirectory();
+            new PersistentSnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy(), dir, OpenMode.CREATE);
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestMissingSnapshots()
+        {
+            Directory dir = NewDirectory();
+            try
+            {
+                new PersistentSnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy(), dir, OpenMode.APPEND);
+                Assert.Fail("did not hit expected exception");
+            }
+#pragma warning disable 168
+            catch (InvalidOperationException ise)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestExceptionDuringSave()
+        {
+            MockDirectoryWrapper dir = NewMockDirectory();
+            dir.FailOn(new FailureAnonymousInnerClassHelper(this, dir));
+            IndexWriter writer = new IndexWriter(dir, GetConfig(Random(), new PersistentSnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy(), dir, OpenMode.CREATE_OR_APPEND)));
+            writer.AddDocument(new Document());
+            writer.Commit();
+
+            PersistentSnapshotDeletionPolicy psdp = (PersistentSnapshotDeletionPolicy)writer.Config.IndexDeletionPolicy;
+            try
+            {
+                psdp.Snapshot();
+            }
+            catch (IOException ioe)
+            {
+                if (ioe.Message.Equals("now fail on purpose"))
+                {
+                    // ok
+                }
+                else
+                {
+                    throw ioe;
+                }
+            }
+            Assert.AreEqual(0, psdp.SnapshotCount);
+            writer.Dispose();
+            Assert.AreEqual(1, DirectoryReader.ListCommits(dir).Count);
+            dir.Dispose();
+        }
+
+        private class FailureAnonymousInnerClassHelper : MockDirectoryWrapper.Failure
+        {
+            private readonly TestPersistentSnapshotDeletionPolicy OuterInstance;
+
+            private MockDirectoryWrapper Dir;
+
+            public FailureAnonymousInnerClassHelper(TestPersistentSnapshotDeletionPolicy outerInstance, MockDirectoryWrapper dir)
+            {
+                this.OuterInstance = outerInstance;
+                this.Dir = dir;
+            }
+
+            public override void Eval(MockDirectoryWrapper dir)
+            {
+                /*typeof(PersistentSnapshotDeletionPolicy).Name.Equals(frame.GetType().Name) && */
+                if (StackTraceHelper.DoesStackTraceContainMethod("Persist"))
+                {
+                    throw new IOException("now fail on purpose");
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestSnapshotRelease()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, GetConfig(Random(), GetDeletionPolicy(dir)));
+            PersistentSnapshotDeletionPolicy psdp = (PersistentSnapshotDeletionPolicy)writer.Config.IndexDeletionPolicy;
+            PrepareIndexAndSnapshots(psdp, writer, 1);
+            writer.Dispose();
+
+            psdp.Release(Snapshots[0]);
+
+            psdp = new PersistentSnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy(), dir, OpenMode.APPEND);
+            Assert.AreEqual(0, psdp.SnapshotCount, "Should have no snapshots !");
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestSnapshotReleaseByGeneration()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, GetConfig(Random(), GetDeletionPolicy(dir)));
+            PersistentSnapshotDeletionPolicy psdp = (PersistentSnapshotDeletionPolicy)writer.Config.IndexDeletionPolicy;
+            PrepareIndexAndSnapshots(psdp, writer, 1);
+            writer.Dispose();
+
+            psdp.Release(Snapshots[0].Generation);
+
+            psdp = new PersistentSnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy(), dir, OpenMode.APPEND);
+            Assert.AreEqual(0, psdp.SnapshotCount, "Should have no snapshots !");
+            dir.Dispose();
+        }
+
+
+        #region TestSnapshotDeletionPolicy
+        // LUCENENET NOTE: Tests in a base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestSnapshotDeletionPolicy_Mem()
+        {
+            base.TestSnapshotDeletionPolicy_Mem();
+        }
+
+        [Test]
+        public override void TestBasicSnapshots()
+        {
+            base.TestBasicSnapshots();
+        }
+
+        [Test]
+        public override void TestMultiThreadedSnapshotting()
+        {
+            base.TestMultiThreadedSnapshotting();
+        }
+
+        [Test]
+        public override void TestRollbackToOldSnapshot()
+        {
+            base.TestRollbackToOldSnapshot();
+        }
+
+        [Test]
+        public override void TestReleaseSnapshot()
+        {
+            base.TestReleaseSnapshot();
+        }
+
+        [Test]
+        public override void TestSnapshotLastCommitTwice()
+        {
+            base.TestSnapshotLastCommitTwice();
+        }
+
+        [Test]
+        public override void TestMissingCommits()
+        {
+            base.TestMissingCommits();
+        }
+
+        #endregion
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestPostingsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestPostingsFormat.cs b/src/Lucene.Net.Tests/Index/TestPostingsFormat.cs
new file mode 100644
index 0000000..20c6b07
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestPostingsFormat.cs
@@ -0,0 +1,95 @@
+using NUnit.Framework;
+
+namespace Lucene.Net.Index
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    using Codec = Lucene.Net.Codecs.Codec;
+
+    /// <summary>
+    /// Tests the codec configuration defined by LuceneTestCase randomly
+    ///  (typically a mix across different fields).
+    /// </summary>
+    [TestFixture]
+    public class TestPostingsFormat : BasePostingsFormatTestCase
+    {
+        protected override Codec Codec
+        {
+            get
+            {
+                return Codec.Default;
+            }
+        }
+
+        [Test]
+        public override void TestMergeStability()
+        {
+            AssumeTrue("The MockRandom PF randomizes content on the fly, so we can't check it", false);
+        }
+
+
+        #region BasePostingsFormatTestCase
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void TestDocsOnly()
+        {
+            base.TestDocsOnly();
+        }
+
+        [Test]
+        public override void TestDocsAndFreqs()
+        {
+            base.TestDocsAndFreqs();
+        }
+
+        [Test]
+        public override void TestDocsAndFreqsAndPositions()
+        {
+            base.TestDocsAndFreqsAndPositions();
+        }
+
+        [Test]
+        public override void TestDocsAndFreqsAndPositionsAndPayloads()
+        {
+            base.TestDocsAndFreqsAndPositionsAndPayloads();
+        }
+
+        [Test]
+        public override void TestDocsAndFreqsAndPositionsAndOffsets()
+        {
+            base.TestDocsAndFreqsAndPositionsAndOffsets();
+        }
+
+        [Test]
+        public override void TestDocsAndFreqsAndPositionsAndOffsetsAndPayloads()
+        {
+            base.TestDocsAndFreqsAndPositionsAndOffsetsAndPayloads();
+        }
+
+        [Test]
+        public override void TestRandom()
+        {
+            base.TestRandom();
+        }
+
+        #endregion
+    }
+}
\ No newline at end of file