You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by ni...@apache.org on 2017/02/26 23:36:52 UTC

[04/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestFuzzyQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestFuzzyQuery.cs b/src/Lucene.Net.Tests/Search/TestFuzzyQuery.cs
new file mode 100644
index 0000000..0ead1d4
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestFuzzyQuery.cs
@@ -0,0 +1,389 @@
+using System.Collections.Generic;
+using Lucene.Net.Documents;
+using Lucene.Net.Util;
+
+namespace Lucene.Net.Search
+{
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using MockTokenizer = Lucene.Net.Analysis.MockTokenizer;
+    using MultiReader = Lucene.Net.Index.MultiReader;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Term = Lucene.Net.Index.Term;
+
+    /// <summary>
+    /// Tests <seealso cref="FuzzyQuery"/>.
+    ///
+    /// </summary>
+    [TestFixture]
+    public class TestFuzzyQuery : LuceneTestCaseWithReducedFloatPrecision
+    {
+        [Test]
+        public virtual void TestFuzziness()
+        {
+            Directory directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, Similarity, TimeZone);
+            AddDoc("aaaaa", writer);
+            AddDoc("aaaab", writer);
+            AddDoc("aaabb", writer);
+            AddDoc("aabbb", writer);
+            AddDoc("abbbb", writer);
+            AddDoc("bbbbb", writer);
+            AddDoc("ddddd", writer);
+
+            IndexReader reader = writer.Reader;
+            IndexSearcher searcher = NewSearcher(reader);
+            writer.Dispose();
+
+            FuzzyQuery query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.DefaultMaxEdits, 0);
+            ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(3, hits.Length);
+
+            // same with prefix
+            query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.DefaultMaxEdits, 1);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(3, hits.Length);
+            query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.DefaultMaxEdits, 2);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(3, hits.Length);
+            query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.DefaultMaxEdits, 3);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(3, hits.Length);
+            query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.DefaultMaxEdits, 4);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(2, hits.Length);
+            query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.DefaultMaxEdits, 5);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+            query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.DefaultMaxEdits, 6);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+
+            // test scoring
+            query = new FuzzyQuery(new Term("field", "bbbbb"), FuzzyQuery.DefaultMaxEdits, 0);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(3, hits.Length, "3 documents should match");
+            IList<string> order = Arrays.AsList("bbbbb", "abbbb", "aabbb");
+            for (int i = 0; i < hits.Length; i++)
+            {
+                string term = searcher.Doc(hits[i].Doc).Get("field");
+                //System.out.println(hits[i].Score);
+                Assert.AreEqual(order[i], term);
+            }
+
+            // test pq size by supplying maxExpansions=2
+            // this query would normally return 3 documents, because 3 terms match (see above):
+            query = new FuzzyQuery(new Term("field", "bbbbb"), FuzzyQuery.DefaultMaxEdits, 0, 2, false);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(2, hits.Length, "only 2 documents should match");
+            order = Arrays.AsList("bbbbb", "abbbb");
+            for (int i = 0; i < hits.Length; i++)
+            {
+                string term = searcher.Doc(hits[i].Doc).Get("field");
+                //System.out.println(hits[i].Score);
+                Assert.AreEqual(order[i], term);
+            }
+
+            // not similar enough:
+            query = new FuzzyQuery(new Term("field", "xxxxx"), FuzzyQuery.DefaultMaxEdits, 0);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(0, hits.Length);
+            query = new FuzzyQuery(new Term("field", "aaccc"), FuzzyQuery.DefaultMaxEdits, 0); // edit distance to "aaaaa" = 3
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(0, hits.Length);
+
+            // query identical to a word in the index:
+            query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.DefaultMaxEdits, 0);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(3, hits.Length);
+            Assert.AreEqual(searcher.Doc(hits[0].Doc).Get("field"), ("aaaaa"));
+            // default allows for up to two edits:
+            Assert.AreEqual(searcher.Doc(hits[1].Doc).Get("field"), ("aaaab"));
+            Assert.AreEqual(searcher.Doc(hits[2].Doc).Get("field"), ("aaabb"));
+
+            // query similar to a word in the index:
+            query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.DefaultMaxEdits, 0);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(3, hits.Length);
+            Assert.AreEqual(searcher.Doc(hits[0].Doc).Get("field"), ("aaaaa"));
+            Assert.AreEqual(searcher.Doc(hits[1].Doc).Get("field"), ("aaaab"));
+            Assert.AreEqual(searcher.Doc(hits[2].Doc).Get("field"), ("aaabb"));
+
+            // now with prefix
+            query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.DefaultMaxEdits, 1);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(3, hits.Length);
+            Assert.AreEqual(searcher.Doc(hits[0].Doc).Get("field"), ("aaaaa"));
+            Assert.AreEqual(searcher.Doc(hits[1].Doc).Get("field"), ("aaaab"));
+            Assert.AreEqual(searcher.Doc(hits[2].Doc).Get("field"), ("aaabb"));
+            query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.DefaultMaxEdits, 2);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(3, hits.Length);
+            Assert.AreEqual(searcher.Doc(hits[0].Doc).Get("field"), ("aaaaa"));
+            Assert.AreEqual(searcher.Doc(hits[1].Doc).Get("field"), ("aaaab"));
+            Assert.AreEqual(searcher.Doc(hits[2].Doc).Get("field"), ("aaabb"));
+            query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.DefaultMaxEdits, 3);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(3, hits.Length);
+            Assert.AreEqual(searcher.Doc(hits[0].Doc).Get("field"), ("aaaaa"));
+            Assert.AreEqual(searcher.Doc(hits[1].Doc).Get("field"), ("aaaab"));
+            Assert.AreEqual(searcher.Doc(hits[2].Doc).Get("field"), ("aaabb"));
+            query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.DefaultMaxEdits, 4);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(2, hits.Length);
+            Assert.AreEqual(searcher.Doc(hits[0].Doc).Get("field"), ("aaaaa"));
+            Assert.AreEqual(searcher.Doc(hits[1].Doc).Get("field"), ("aaaab"));
+            query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.DefaultMaxEdits, 5);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(0, hits.Length);
+
+            query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.DefaultMaxEdits, 0);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+            Assert.AreEqual(searcher.Doc(hits[0].Doc).Get("field"), ("ddddd"));
+
+            // now with prefix
+            query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.DefaultMaxEdits, 1);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+            Assert.AreEqual(searcher.Doc(hits[0].Doc).Get("field"), ("ddddd"));
+            query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.DefaultMaxEdits, 2);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+            Assert.AreEqual(searcher.Doc(hits[0].Doc).Get("field"), ("ddddd"));
+            query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.DefaultMaxEdits, 3);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+            Assert.AreEqual(searcher.Doc(hits[0].Doc).Get("field"), ("ddddd"));
+            query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.DefaultMaxEdits, 4);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+            Assert.AreEqual(searcher.Doc(hits[0].Doc).Get("field"), ("ddddd"));
+            query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.DefaultMaxEdits, 5);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(0, hits.Length);
+
+            // different field = no match:
+            query = new FuzzyQuery(new Term("anotherfield", "ddddX"), FuzzyQuery.DefaultMaxEdits, 0);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(0, hits.Length);
+
+            reader.Dispose();
+            directory.Dispose();
+        }
+
+        [Test]
+        public virtual void Test2()
+        {
+            Directory directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, new MockAnalyzer(Random(), MockTokenizer.KEYWORD, false), Similarity, TimeZone);
+            AddDoc("LANGE", writer);
+            AddDoc("LUETH", writer);
+            AddDoc("PIRSING", writer);
+            AddDoc("RIEGEL", writer);
+            AddDoc("TRZECZIAK", writer);
+            AddDoc("WALKER", writer);
+            AddDoc("WBR", writer);
+            AddDoc("WE", writer);
+            AddDoc("WEB", writer);
+            AddDoc("WEBE", writer);
+            AddDoc("WEBER", writer);
+            AddDoc("WEBERE", writer);
+            AddDoc("WEBREE", writer);
+            AddDoc("WEBEREI", writer);
+            AddDoc("WBRE", writer);
+            AddDoc("WITTKOPF", writer);
+            AddDoc("WOJNAROWSKI", writer);
+            AddDoc("WRICKE", writer);
+
+            IndexReader reader = writer.Reader;
+            IndexSearcher searcher = NewSearcher(reader);
+            writer.Dispose();
+
+            FuzzyQuery query = new FuzzyQuery(new Term("field", "WEBER"), 2, 1);
+            //query.setRewriteMethod(FuzzyQuery.SCORING_BOOLEAN_QUERY_REWRITE);
+            ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(8, hits.Length);
+
+            reader.Dispose();
+            directory.Dispose();
+        }
+
+        /// <summary>
+        /// MultiTermQuery provides (via attribute) information about which values
+        /// must be competitive to enter the priority queue.
+        ///
+        /// FuzzyQuery optimizes itself around this information, if the attribute
+        /// is not implemented correctly, there will be problems!
+        /// </summary>
+        [Test]
+        public virtual void TestTieBreaker()
+        {
+            Directory directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, Similarity, TimeZone);
+            AddDoc("a123456", writer);
+            AddDoc("c123456", writer);
+            AddDoc("d123456", writer);
+            AddDoc("e123456", writer);
+
+            Directory directory2 = NewDirectory();
+            RandomIndexWriter writer2 = new RandomIndexWriter(Random(), directory2, Similarity, TimeZone);
+            AddDoc("a123456", writer2);
+            AddDoc("b123456", writer2);
+            AddDoc("b123456", writer2);
+            AddDoc("b123456", writer2);
+            AddDoc("c123456", writer2);
+            AddDoc("f123456", writer2);
+
+            IndexReader ir1 = writer.Reader;
+            IndexReader ir2 = writer2.Reader;
+
+            MultiReader mr = new MultiReader(ir1, ir2);
+            IndexSearcher searcher = NewSearcher(mr);
+            FuzzyQuery fq = new FuzzyQuery(new Term("field", "z123456"), 1, 0, 2, false);
+            TopDocs docs = searcher.Search(fq, 2);
+            Assert.AreEqual(5, docs.TotalHits); // 5 docs, from the a and b's
+            mr.Dispose();
+            ir1.Dispose();
+            ir2.Dispose();
+            writer.Dispose();
+            writer2.Dispose();
+            directory.Dispose();
+            directory2.Dispose();
+        }
+
+        /// <summary>
+        /// Test the TopTermsBoostOnlyBooleanQueryRewrite rewrite method. </summary>
+        [Test]
+        public virtual void TestBoostOnlyRewrite()
+        {
+            Directory directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, Similarity, TimeZone);
+            AddDoc("Lucene", writer);
+            AddDoc("Lucene", writer);
+            AddDoc("Lucenne", writer);
+
+            IndexReader reader = writer.Reader;
+            IndexSearcher searcher = NewSearcher(reader);
+            writer.Dispose();
+
+            FuzzyQuery query = new FuzzyQuery(new Term("field", "lucene"));
+            query.MultiTermRewriteMethod = (new MultiTermQuery.TopTermsBoostOnlyBooleanQueryRewrite(50));
+            ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(3, hits.Length);
+            // normally, 'Lucenne' would be the first result as IDF will skew the score.
+            Assert.AreEqual("Lucene", reader.Document(hits[0].Doc).Get("field"));
+            Assert.AreEqual("Lucene", reader.Document(hits[1].Doc).Get("field"));
+            Assert.AreEqual("Lucenne", reader.Document(hits[2].Doc).Get("field"));
+            reader.Dispose();
+            directory.Dispose();
+        }
+
+        [Test]
+        public virtual void TestGiga()
+        {
+            MockAnalyzer analyzer = new MockAnalyzer(Random());
+            Directory index = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), index, Similarity, TimeZone);
+
+            AddDoc("Lucene in Action", w);
+            AddDoc("Lucene for Dummies", w);
+
+            //addDoc("Giga", w);
+            AddDoc("Giga byte", w);
+
+            AddDoc("ManagingGigabytesManagingGigabyte", w);
+            AddDoc("ManagingGigabytesManagingGigabytes", w);
+
+            AddDoc("The Art of Computer Science", w);
+            AddDoc("J. K. Rowling", w);
+            AddDoc("JK Rowling", w);
+            AddDoc("Joanne K Roling", w);
+            AddDoc("Bruce Willis", w);
+            AddDoc("Willis bruce", w);
+            AddDoc("Brute willis", w);
+            AddDoc("B. willis", w);
+            IndexReader r = w.Reader;
+            w.Dispose();
+
+            Query q = new FuzzyQuery(new Term("field", "giga"), 0);
+
+            // 3. search
+            IndexSearcher searcher = NewSearcher(r);
+            ScoreDoc[] hits = searcher.Search(q, 10).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+            Assert.AreEqual("Giga byte", searcher.Doc(hits[0].Doc).Get("field"));
+            r.Dispose();
+            index.Dispose();
+        }
+
+        [Test]
+        public virtual void TestDistanceAsEditsSearching()
+        {
+            Directory index = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), index, Similarity, TimeZone);
+            AddDoc("foobar", w);
+            AddDoc("test", w);
+            AddDoc("working", w);
+            IndexReader reader = w.Reader;
+            IndexSearcher searcher = NewSearcher(reader);
+            w.Dispose();
+
+            FuzzyQuery q = new FuzzyQuery(new Term("field", "fouba"), 2);
+            ScoreDoc[] hits = searcher.Search(q, 10).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+            Assert.AreEqual("foobar", searcher.Doc(hits[0].Doc).Get("field"));
+
+            q = new FuzzyQuery(new Term("field", "foubara"), 2);
+            hits = searcher.Search(q, 10).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+            Assert.AreEqual("foobar", searcher.Doc(hits[0].Doc).Get("field"));
+
+            try
+            {
+                q = new FuzzyQuery(new Term("field", "t"), 3);
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException expected)
+#pragma warning restore 168
+            {
+                // expected
+            }
+
+            reader.Dispose();
+            index.Dispose();
+        }
+
+        private void AddDoc(string text, RandomIndexWriter writer)
+        {
+            Document doc = new Document();
+            doc.Add(NewTextField("field", text, Field.Store.YES));
+            writer.AddDocument(doc);
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestIndexSearcher.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestIndexSearcher.cs b/src/Lucene.Net.Tests/Search/TestIndexSearcher.cs
new file mode 100644
index 0000000..d5dce40
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestIndexSearcher.cs
@@ -0,0 +1,145 @@
+using Lucene.Net.Documents;
+using Lucene.Net.Index;
+using Lucene.Net.Support;
+using Lucene.Net.Util;
+using NUnit.Framework;
+using System;
+using System.Threading.Tasks;
+
+namespace Lucene.Net.Search
+{
+    using Directory = Lucene.Net.Store.Directory;
+
+    /*
+    * Licensed to the Apache Software Foundation (ASF) under one or more
+    * contributor license agreements.  See the NOTICE file distributed with
+    * this work for additional information regarding copyright ownership.
+    * The ASF licenses this file to You under the Apache License, Version 2.0
+    * (the "License"); you may not use this file except in compliance with
+    * the License.  You may obtain a copy of the License at
+    *
+    *     http://www.apache.org/licenses/LICENSE-2.0
+    *
+    * Unless required by applicable law or agreed to in writing, software
+    * distributed under the License is distributed on an "AS IS" BASIS,
+    * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    * See the License for the specific language governing permissions and
+    * limitations under the License.
+    */
+
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using IOUtils = Lucene.Net.Util.IOUtils;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+
+    [TestFixture]
+    public class TestIndexSearcher : LuceneTestCase
+    {
+        internal Directory Dir;
+        internal IndexReader Reader;
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Dir = NewDirectory();
+            RandomIndexWriter iw = new RandomIndexWriter(Random(), Dir, Similarity, TimeZone);
+            for (int i = 0; i < 100; i++)
+            {
+                Document doc = new Document();
+                doc.Add(NewStringField("field", Convert.ToString(i), Field.Store.NO));
+                doc.Add(NewStringField("field2", Convert.ToString(i % 2 == 0), Field.Store.NO));
+                iw.AddDocument(doc);
+            }
+            Reader = iw.Reader;
+            iw.Dispose();
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            base.TearDown();
+            Reader.Dispose();
+            Dir.Dispose();
+        }
+
+        // should not throw exception
+        [Test]
+        public virtual void TestHugeN()
+        {
+            TaskScheduler service = new LimitedConcurrencyLevelTaskScheduler(4);
+
+            IndexSearcher[] searchers = new IndexSearcher[] { new IndexSearcher(Reader), new IndexSearcher(Reader, service) };
+            Query[] queries = new Query[] { new MatchAllDocsQuery(), new TermQuery(new Term("field", "1")) };
+            Sort[] sorts = new Sort[] { null, new Sort(new SortField("field2", SortFieldType.STRING)) };
+            Filter[] filters = new Filter[] { null, new QueryWrapperFilter(new TermQuery(new Term("field2", "true"))) };
+            ScoreDoc[] afters = new ScoreDoc[] { null, new FieldDoc(0, 0f, new object[] { new BytesRef("boo!") }) };
+
+            foreach (IndexSearcher searcher in searchers)
+            {
+                foreach (ScoreDoc after in afters)
+                {
+                    foreach (Query query in queries)
+                    {
+                        foreach (Sort sort in sorts)
+                        {
+                            foreach (Filter filter in filters)
+                            {
+                                searcher.Search(query, int.MaxValue);
+                                searcher.SearchAfter(after, query, int.MaxValue);
+                                searcher.Search(query, filter, int.MaxValue);
+                                searcher.SearchAfter(after, query, filter, int.MaxValue);
+                                if (sort != null)
+                                {
+                                    searcher.Search(query, int.MaxValue, sort);
+                                    searcher.Search(query, filter, int.MaxValue, sort);
+                                    searcher.Search(query, filter, int.MaxValue, sort, true, true);
+                                    searcher.Search(query, filter, int.MaxValue, sort, true, false);
+                                    searcher.Search(query, filter, int.MaxValue, sort, false, true);
+                                    searcher.Search(query, filter, int.MaxValue, sort, false, false);
+                                    searcher.SearchAfter(after, query, filter, int.MaxValue, sort);
+                                    searcher.SearchAfter(after, query, filter, int.MaxValue, sort, true, true);
+                                    searcher.SearchAfter(after, query, filter, int.MaxValue, sort, true, false);
+                                    searcher.SearchAfter(after, query, filter, int.MaxValue, sort, false, true);
+                                    searcher.SearchAfter(after, query, filter, int.MaxValue, sort, false, false);
+                                }
+                            }
+                        }
+                    }
+                }
+            }
+
+            TestUtil.ShutdownExecutorService(service);
+        }
+
+        [Test]
+        public virtual void TestSearchAfterPassedMaxDoc()
+        {
+            // LUCENE-5128: ensure we get a meaningful message if searchAfter exceeds maxDoc
+            Directory dir = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            w.AddDocument(new Document());
+            IndexReader r = w.Reader;
+            w.Dispose();
+
+            IndexSearcher s = new IndexSearcher(r);
+            try
+            {
+                s.SearchAfter(new ScoreDoc(r.MaxDoc, 0.54f), new MatchAllDocsQuery(), 10);
+                Assert.Fail("should have hit IllegalArgumentException when searchAfter exceeds maxDoc");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException e)
+#pragma warning restore 168
+            {
+                // ok
+            }
+            finally
+            {
+                IOUtils.Close(r, dir);
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestLiveFieldValues.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestLiveFieldValues.cs b/src/Lucene.Net.Tests/Search/TestLiveFieldValues.cs
new file mode 100644
index 0000000..9cd902c
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestLiveFieldValues.cs
@@ -0,0 +1,245 @@
+using Lucene.Net.Documents;
+using Lucene.Net.Support;
+using System;
+using System.Collections.Generic;
+using System.Globalization;
+using System.Threading;
+
+namespace Lucene.Net.Search
+{
+    using Index;
+    using NUnit.Framework;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using IndexWriter = Lucene.Net.Index.IndexWriter;
+    using IndexWriterConfig = Lucene.Net.Index.IndexWriterConfig;
+    using Int32Field = Int32Field;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using StringField = StringField;
+    using Term = Lucene.Net.Index.Term;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    [TestFixture]
+    public class TestLiveFieldValues : LuceneTestCase
+    {
+        [Test]
+        public virtual void Test()
+        {
+            Directory dir = NewFSDirectory(CreateTempDir("livefieldupdates"));
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+
+            IndexWriter w = new IndexWriter(dir, iwc);
+
+            SearcherManager mgr = new SearcherManager(w, true, new SearcherFactoryAnonymousInnerClassHelper());
+
+            const int missing = -1;
+
+            LiveFieldValues<IndexSearcher, int?> rt = new LiveFieldValuesAnonymousInnerClassHelper(mgr, missing);
+
+            int numThreads = TestUtil.NextInt(Random(), 2, 5);
+            if (VERBOSE)
+            {
+                Console.WriteLine(numThreads + " threads");
+            }
+
+            CountdownEvent startingGun = new CountdownEvent(1);
+            IList<ThreadClass> threads = new List<ThreadClass>();
+
+            int iters = AtLeast(1000);
+            int idCount = TestUtil.NextInt(Random(), 100, 10000);
+
+            double reopenChance = Random().NextDouble() * 0.01;
+            double deleteChance = Random().NextDouble() * 0.25;
+            double addChance = Random().NextDouble() * 0.5;
+
+            for (int t = 0; t < numThreads; t++)
+            {
+                int threadID = t;
+                Random threadRandom = new Random(Random().Next());
+                ThreadClass thread = new ThreadAnonymousInnerClassHelper(w, mgr, missing, rt, startingGun, iters, idCount, reopenChance, deleteChance, addChance, t, threadID, threadRandom);
+                threads.Add(thread);
+                thread.Start();
+            }
+
+            startingGun.Signal();
+
+            foreach (ThreadClass thread in threads)
+            {
+                thread.Join();
+            }
+            mgr.MaybeRefresh();
+            Assert.AreEqual(0, rt.Count);
+
+            rt.Dispose();
+            mgr.Dispose();
+            w.Dispose();
+            dir.Dispose();
+        }
+
+        private class SearcherFactoryAnonymousInnerClassHelper : SearcherFactory
+        {
+            public override IndexSearcher NewSearcher(IndexReader r)
+            {
+                return new IndexSearcher(r);
+            }
+        }
+
+        private class LiveFieldValuesAnonymousInnerClassHelper : LiveFieldValues<IndexSearcher, int?>
+        {
+            public LiveFieldValuesAnonymousInnerClassHelper(SearcherManager mgr, int missing)
+                : base(mgr, missing)
+            {
+            }
+
+            protected override int? LookupFromSearcher(IndexSearcher s, string id)
+            {
+                TermQuery tq = new TermQuery(new Term("id", id));
+                TopDocs hits = s.Search(tq, 1);
+                Assert.IsTrue(hits.TotalHits <= 1);
+                if (hits.TotalHits == 0)
+                {
+                    return null;
+                }
+                else
+                {
+                    Document doc = s.Doc(hits.ScoreDocs[0].Doc);
+                    return (int)doc.GetField("field").GetNumericValue();
+                }
+            }
+        }
+
+        private class ThreadAnonymousInnerClassHelper : ThreadClass
+        {
+            private IndexWriter w;
+            private SearcherManager Mgr;
+            private int? Missing;
+            private LiveFieldValues<IndexSearcher, int?> Rt;
+            private CountdownEvent StartingGun;
+            private int Iters;
+            private int IdCount;
+            private double ReopenChance;
+            private double DeleteChance;
+            private double AddChance;
+            private int t;
+            private int ThreadID;
+            private Random ThreadRandom;
+
+            public ThreadAnonymousInnerClassHelper(IndexWriter w, SearcherManager mgr, int? missing, LiveFieldValues<IndexSearcher, int?> rt, CountdownEvent startingGun, int iters, int idCount, double reopenChance, double deleteChance, double addChance, int t, int threadID, Random threadRandom)
+            {
+                this.w = w;
+                this.Mgr = mgr;
+                this.Missing = missing;
+                this.Rt = rt;
+                this.StartingGun = startingGun;
+                this.Iters = iters;
+                this.IdCount = idCount;
+                this.ReopenChance = reopenChance;
+                this.DeleteChance = deleteChance;
+                this.AddChance = addChance;
+                this.t = t;
+                this.ThreadID = threadID;
+                this.ThreadRandom = threadRandom;
+            }
+
+            public override void Run()
+            {
+                try
+                {
+                    IDictionary<string, int?> values = new Dictionary<string, int?>();
+                    IList<string> allIDs = new SynchronizedList<string>();
+
+                    StartingGun.Wait();
+                    for (int iter = 0; iter < Iters; iter++)
+                    {
+                        // Add/update a document
+                        Document doc = new Document();
+                        // Threads must not update the same id at the
+                        // same time:
+                        if (ThreadRandom.NextDouble() <= AddChance)
+                        {
+                            string id = string.Format(CultureInfo.InvariantCulture, "{0}_{1:X4}", ThreadID, ThreadRandom.Next(IdCount));
+                            int field = ThreadRandom.Next(int.MaxValue);
+                            doc.Add(new StringField("id", id, Field.Store.YES));
+                            doc.Add(new Int32Field("field", (int)field, Field.Store.YES));
+                            w.UpdateDocument(new Term("id", id), doc);
+                            Rt.Add(id, field);
+                            if (!values.ContainsKey(id))//Key didn't exist before
+                            {
+                                allIDs.Add(id);
+                            }
+                            values[id] = field;
+                        }
+
+                        if (allIDs.Count > 0 && ThreadRandom.NextDouble() <= DeleteChance)
+                        {
+                            string randomID = allIDs[ThreadRandom.Next(allIDs.Count)];
+                            w.DeleteDocuments(new Term("id", randomID));
+                            Rt.Delete(randomID);
+                            values[randomID] = Missing;
+                        }
+
+                        if (ThreadRandom.NextDouble() <= ReopenChance || Rt.Count > 10000)
+                        {
+                            //System.out.println("refresh @ " + rt.Size());
+                            Mgr.MaybeRefresh();
+                            if (VERBOSE)
+                            {
+                                IndexSearcher s = Mgr.Acquire();
+                                try
+                                {
+                                    Console.WriteLine("TEST: reopen " + s);
+                                }
+                                finally
+                                {
+                                    Mgr.Release(s);
+                                }
+                                Console.WriteLine("TEST: " + values.Count + " values");
+                            }
+                        }
+
+                        if (ThreadRandom.Next(10) == 7)
+                        {
+                            Assert.AreEqual(null, Rt.Get("foo"));
+                        }
+
+                        if (allIDs.Count > 0)
+                        {
+                            string randomID = allIDs[ThreadRandom.Next(allIDs.Count)];
+                            int? expected = values[randomID];
+                            if (expected == Missing)
+                            {
+                                expected = null;
+                            }
+                            Assert.AreEqual(expected, Rt.Get(randomID), "id=" + randomID);
+                        }
+                    }
+                }
+                catch (Exception t)
+                {
+                    throw new Exception(t.Message, t);
+                }
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestMatchAllDocsQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestMatchAllDocsQuery.cs b/src/Lucene.Net.Tests/Search/TestMatchAllDocsQuery.cs
new file mode 100644
index 0000000..3d8dc7e
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestMatchAllDocsQuery.cs
@@ -0,0 +1,115 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using NUnit.Framework;
+    using Analyzer = Lucene.Net.Analysis.Analyzer;
+    using Directory = Lucene.Net.Store.Directory;
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using IndexWriter = Lucene.Net.Index.IndexWriter;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using Term = Lucene.Net.Index.Term;
+
+    /// <summary>
+    /// Tests MatchAllDocsQuery.
+    ///
+    /// </summary>
+    [TestFixture]
+    public class TestMatchAllDocsQuery : LuceneTestCase
+    {
+        private Analyzer Analyzer;
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Analyzer = new MockAnalyzer(Random());
+        }
+
+        [Test]
+        public virtual void TestQuery()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter iw = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, Analyzer).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy()));
+            AddDoc("one", iw, 1f);
+            AddDoc("two", iw, 20f);
+            AddDoc("three four", iw, 300f);
+            IndexReader ir = DirectoryReader.Open(iw, true);
+
+            IndexSearcher @is = NewSearcher(ir);
+            ScoreDoc[] hits;
+
+            hits = @is.Search(new MatchAllDocsQuery(), null, 1000).ScoreDocs;
+            Assert.AreEqual(3, hits.Length);
+            Assert.AreEqual("one", @is.Doc(hits[0].Doc).Get("key"));
+            Assert.AreEqual("two", @is.Doc(hits[1].Doc).Get("key"));
+            Assert.AreEqual("three four", @is.Doc(hits[2].Doc).Get("key"));
+
+            // some artificial queries to trigger the use of skipTo():
+
+            BooleanQuery bq = new BooleanQuery();
+            bq.Add(new MatchAllDocsQuery(), Occur.MUST);
+            bq.Add(new MatchAllDocsQuery(), Occur.MUST);
+            hits = @is.Search(bq, null, 1000).ScoreDocs;
+            Assert.AreEqual(3, hits.Length);
+
+            bq = new BooleanQuery();
+            bq.Add(new MatchAllDocsQuery(), Occur.MUST);
+            bq.Add(new TermQuery(new Term("key", "three")), Occur.MUST);
+            hits = @is.Search(bq, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+
+            iw.DeleteDocuments(new Term("key", "one"));
+            ir.Dispose();
+            ir = DirectoryReader.Open(iw, true);
+            @is = NewSearcher(ir);
+
+            hits = @is.Search(new MatchAllDocsQuery(), null, 1000).ScoreDocs;
+            Assert.AreEqual(2, hits.Length);
+
+            iw.Dispose();
+            ir.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestEquals()
+        {
+            Query q1 = new MatchAllDocsQuery();
+            Query q2 = new MatchAllDocsQuery();
+            Assert.IsTrue(q1.Equals(q2));
+            q1.Boost = 1.5f;
+            Assert.IsFalse(q1.Equals(q2));
+        }
+
+        private void AddDoc(string text, IndexWriter iw, float boost)
+        {
+            Document doc = new Document();
+            Field f = NewTextField("key", text, Field.Store.YES);
+            f.Boost = boost;
+            doc.Add(f);
+            iw.AddDocument(doc);
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs b/src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs
new file mode 100644
index 0000000..6e27f37
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs
@@ -0,0 +1,431 @@
+using System.Linq;
+using Lucene.Net.Documents;
+using Lucene.Net.Index;
+using System.Collections.Generic;
+using System.Diagnostics;
+
+namespace Lucene.Net.Search
+{
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using AtomicReader = Lucene.Net.Index.AtomicReader;
+    using BooleanWeight = Lucene.Net.Search.BooleanQuery.BooleanWeight;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using DefaultSimilarity = Lucene.Net.Search.Similarities.DefaultSimilarity;
+    using Directory = Lucene.Net.Store.Directory;
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Document = Documents.Document;
+    using Field = Field;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using SimScorer = Lucene.Net.Search.Similarities.Similarity.SimScorer;
+    using SimWeight = Lucene.Net.Search.Similarities.Similarity.SimWeight;
+    using SortedSetDocValues = Lucene.Net.Index.SortedSetDocValues;
+    using SortedSetDocValuesField = SortedSetDocValuesField;
+    using StringField = StringField;
+    using Term = Lucene.Net.Index.Term;
+    using TermContext = Lucene.Net.Index.TermContext;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    /// <summary>
+    /// tests BooleanScorer2's minShouldMatch </summary>
+    [SuppressCodecs("Appending", "Lucene3x", "Lucene40", "Lucene41")]
+    [TestFixture]
+    public class TestMinShouldMatch2 : LuceneTestCase
+    {
+        internal static Directory Dir;
+        internal static DirectoryReader r;
+        internal static AtomicReader atomicReader;
+        internal static IndexSearcher Searcher;
+
+        internal static readonly string[] AlwaysTerms = new string[] { "a" };
+        internal static readonly string[] CommonTerms = new string[] { "b", "c", "d" };
+        internal static readonly string[] MediumTerms = new string[] { "e", "f", "g" };
+        internal static readonly string[] RareTerms = new string[] { "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z" };
+
+        /// <summary>
+        /// LUCENENET specific
+        /// Is non-static because Similarity and TimeZone are not static.
+        /// </summary>
+        [OneTimeSetUp]
+        public void BeforeClass()
+        {
+            Dir = NewDirectory();
+            RandomIndexWriter iw = new RandomIndexWriter(Random(), Dir, Similarity, TimeZone);
+            int numDocs = AtLeast(300);
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document doc = new Document();
+
+                AddSome(doc, AlwaysTerms);
+
+                if (Random().Next(100) < 90)
+                {
+                    AddSome(doc, CommonTerms);
+                }
+                if (Random().Next(100) < 50)
+                {
+                    AddSome(doc, MediumTerms);
+                }
+                if (Random().Next(100) < 10)
+                {
+                    AddSome(doc, RareTerms);
+                }
+                iw.AddDocument(doc);
+            }
+            iw.ForceMerge(1);
+            iw.Dispose();
+            r = DirectoryReader.Open(Dir);
+            atomicReader = GetOnlySegmentReader(r);
+            Searcher = new IndexSearcher(atomicReader);
+            Searcher.Similarity = new DefaultSimilarityAnonymousInnerClassHelper();
+        }
+
+        private class DefaultSimilarityAnonymousInnerClassHelper : DefaultSimilarity
+        {
+            public DefaultSimilarityAnonymousInnerClassHelper()
+            {
+            }
+
+            public override float QueryNorm(float sumOfSquaredWeights)
+            {
+                return 1; // we disable queryNorm, both for debugging and ease of impl
+            }
+        }
+
+        [OneTimeTearDown]
+        public static void AfterClass()
+        {
+            atomicReader.Dispose();
+            Dir.Dispose();
+            Searcher = null;
+            atomicReader = null;
+            r = null;
+            Dir = null;
+        }
+
+        private static void AddSome(Document doc, string[] values)
+        {
+            IList<string> list = Arrays.AsList(values);
+            Collections.Shuffle(list);
+            int howMany = TestUtil.NextInt(Random(), 1, list.Count);
+            for (int i = 0; i < howMany; i++)
+            {
+                doc.Add(new StringField("field", list[i], Field.Store.NO));
+                doc.Add(new SortedSetDocValuesField("dv", new BytesRef(list[i])));
+            }
+        }
+
+        private Scorer Scorer(string[] values, int minShouldMatch, bool slow)
+        {
+            BooleanQuery bq = new BooleanQuery();
+            foreach (string value in values)
+            {
+                bq.Add(new TermQuery(new Term("field", value)), Occur.SHOULD);
+            }
+            bq.MinimumNumberShouldMatch = minShouldMatch;
+
+            BooleanWeight weight = (BooleanWeight)Searcher.CreateNormalizedWeight(bq);
+
+            if (slow)
+            {
+                return new SlowMinShouldMatchScorer(weight, atomicReader, Searcher);
+            }
+            else
+            {
+                return weight.GetScorer((AtomicReaderContext)atomicReader.Context, null);
+            }
+        }
+
+        private void AssertNext(Scorer expected, Scorer actual)
+        {
+            if (actual == null)
+            {
+                Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, expected.NextDoc());
+                return;
+            }
+            int doc;
+            while ((doc = expected.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS)
+            {
+                Assert.AreEqual(doc, actual.NextDoc());
+                Assert.AreEqual(expected.Freq, actual.Freq);
+                float expectedScore = expected.GetScore();
+                float actualScore = actual.GetScore();
+                Assert.AreEqual(expectedScore, actualScore, CheckHits.ExplainToleranceDelta(expectedScore, actualScore));
+            }
+            Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, actual.NextDoc());
+        }
+
+        private void AssertAdvance(Scorer expected, Scorer actual, int amount)
+        {
+            if (actual == null)
+            {
+                Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, expected.NextDoc());
+                return;
+            }
+            int prevDoc = 0;
+            int doc;
+            while ((doc = expected.Advance(prevDoc + amount)) != DocIdSetIterator.NO_MORE_DOCS)
+            {
+                Assert.AreEqual(doc, actual.Advance(prevDoc + amount));
+                Assert.AreEqual(expected.Freq, actual.Freq);
+                float expectedScore = expected.GetScore();
+                float actualScore = actual.GetScore();
+                Assert.AreEqual(expectedScore, actualScore, CheckHits.ExplainToleranceDelta(expectedScore, actualScore));
+                prevDoc = doc;
+            }
+            Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, actual.Advance(prevDoc + amount));
+        }
+
+        /// <summary>
+        /// simple test for next(): minShouldMatch=2 on 3 terms (one common, one medium, one rare) </summary>
+        [Test]
+        public virtual void TestNextCMR2()
+        {
+            for (int common = 0; common < CommonTerms.Length; common++)
+            {
+                for (int medium = 0; medium < MediumTerms.Length; medium++)
+                {
+                    for (int rare = 0; rare < RareTerms.Length; rare++)
+                    {
+                        Scorer expected = Scorer(new string[] { CommonTerms[common], MediumTerms[medium], RareTerms[rare] }, 2, true);
+                        Scorer actual = Scorer(new string[] { CommonTerms[common], MediumTerms[medium], RareTerms[rare] }, 2, false);
+                        AssertNext(expected, actual);
+                    }
+                }
+            }
+        }
+
+        /// <summary>
+        /// simple test for advance(): minShouldMatch=2 on 3 terms (one common, one medium, one rare) </summary>
+        [Test]
+        public virtual void TestAdvanceCMR2()
+        {
+            for (int amount = 25; amount < 200; amount += 25)
+            {
+                for (int common = 0; common < CommonTerms.Length; common++)
+                {
+                    for (int medium = 0; medium < MediumTerms.Length; medium++)
+                    {
+                        for (int rare = 0; rare < RareTerms.Length; rare++)
+                        {
+                            Scorer expected = Scorer(new string[] { CommonTerms[common], MediumTerms[medium], RareTerms[rare] }, 2, true);
+                            Scorer actual = Scorer(new string[] { CommonTerms[common], MediumTerms[medium], RareTerms[rare] }, 2, false);
+                            AssertAdvance(expected, actual, amount);
+                        }
+                    }
+                }
+            }
+        }
+
+        /// <summary>
+        /// test next with giant bq of all terms with varying minShouldMatch </summary>
+        [Test]
+        public virtual void TestNextAllTerms()
+        {
+            IList<string> termsList = new List<string>();
+            termsList.AddRange(Arrays.AsList(CommonTerms));
+            termsList.AddRange(Arrays.AsList(MediumTerms));
+            termsList.AddRange(Arrays.AsList(RareTerms));
+            string[] terms = termsList.ToArray();
+
+            for (int minNrShouldMatch = 1; minNrShouldMatch <= terms.Length; minNrShouldMatch++)
+            {
+                Scorer expected = Scorer(terms, minNrShouldMatch, true);
+                Scorer actual = Scorer(terms, minNrShouldMatch, false);
+                AssertNext(expected, actual);
+            }
+        }
+
+        /// <summary>
+        /// test advance with giant bq of all terms with varying minShouldMatch </summary>
+        [Test]
+        public virtual void TestAdvanceAllTerms()
+        {
+            IList<string> termsList = new List<string>();
+            termsList.AddRange(Arrays.AsList(CommonTerms));
+            termsList.AddRange(Arrays.AsList(MediumTerms));
+            termsList.AddRange(Arrays.AsList(RareTerms));
+            string[] terms = termsList.ToArray();
+
+            for (int amount = 25; amount < 200; amount += 25)
+            {
+                for (int minNrShouldMatch = 1; minNrShouldMatch <= terms.Length; minNrShouldMatch++)
+                {
+                    Scorer expected = Scorer(terms, minNrShouldMatch, true);
+                    Scorer actual = Scorer(terms, minNrShouldMatch, false);
+                    AssertAdvance(expected, actual, amount);
+                }
+            }
+        }
+
+        /// <summary>
+        /// test next with varying numbers of terms with varying minShouldMatch </summary>
+        [Test]
+        public virtual void TestNextVaryingNumberOfTerms()
+        {
+            IList<string> termsList = new List<string>();
+            termsList.AddRange(Arrays.AsList(CommonTerms));
+            termsList.AddRange(Arrays.AsList(MediumTerms));
+            termsList.AddRange(Arrays.AsList(RareTerms));
+            Collections.Shuffle(termsList);
+
+            for (int numTerms = 2; numTerms <= termsList.Count; numTerms++)
+            {
+                string[] terms = termsList.SubList(0, numTerms).ToArray(/*new string[0]*/);
+                for (int minNrShouldMatch = 1; minNrShouldMatch <= terms.Length; minNrShouldMatch++)
+                {
+                    Scorer expected = Scorer(terms, minNrShouldMatch, true);
+                    Scorer actual = Scorer(terms, minNrShouldMatch, false);
+                    AssertNext(expected, actual);
+                }
+            }
+        }
+
+        /// <summary>
+        /// test advance with varying numbers of terms with varying minShouldMatch </summary>
+        [Test]
+        public virtual void TestAdvanceVaryingNumberOfTerms()
+        {
+            IList<string> termsList = new List<string>();
+            termsList.AddRange(Arrays.AsList(CommonTerms));
+            termsList.AddRange(Arrays.AsList(MediumTerms));
+            termsList.AddRange(Arrays.AsList(RareTerms));
+            Collections.Shuffle(termsList);
+
+            for (int amount = 25; amount < 200; amount += 25)
+            {
+                for (int numTerms = 2; numTerms <= termsList.Count; numTerms++)
+                {
+                    string[] terms = termsList.SubList(0, numTerms).ToArray(/*new string[0]*/);
+                    for (int minNrShouldMatch = 1; minNrShouldMatch <= terms.Length; minNrShouldMatch++)
+                    {
+                        Scorer expected = Scorer(terms, minNrShouldMatch, true);
+                        Scorer actual = Scorer(terms, minNrShouldMatch, false);
+                        AssertAdvance(expected, actual, amount);
+                    }
+                }
+            }
+        }
+
+        // TODO: more tests
+
+        // a slow min-should match scorer that uses a docvalues field.
+        // later, we can make debugging easier as it can record the set of ords it currently matched
+        // and e.g. print out their values and so on for the document
+        internal class SlowMinShouldMatchScorer : Scorer
+        {
+            internal int CurrentDoc = -1; // current docid
+            internal int CurrentMatched = -1; // current number of terms matched
+
+            internal readonly SortedSetDocValues Dv;
+            internal readonly int MaxDoc;
+
+            internal readonly HashSet<long?> Ords = new HashSet<long?>();
+            internal readonly SimScorer[] Sims;
+            internal readonly int MinNrShouldMatch;
+
+            internal double Score_Renamed = float.NaN;
+
+            internal SlowMinShouldMatchScorer(BooleanWeight weight, AtomicReader reader, IndexSearcher searcher)
+                : base(weight)
+            {
+                this.Dv = reader.GetSortedSetDocValues("dv");
+                this.MaxDoc = reader.MaxDoc;
+                BooleanQuery bq = (BooleanQuery)weight.Query;
+                this.MinNrShouldMatch = bq.MinimumNumberShouldMatch;
+                this.Sims = new SimScorer[(int)Dv.ValueCount];
+                foreach (BooleanClause clause in bq.GetClauses())
+                {
+                    Debug.Assert(!clause.IsProhibited);
+                    Debug.Assert(!clause.IsRequired);
+                    Term term = ((TermQuery)clause.Query).Term;
+                    long ord = Dv.LookupTerm(term.Bytes);
+                    if (ord >= 0)
+                    {
+                        bool success = Ords.Add(ord);
+                        Debug.Assert(success); // no dups
+                        TermContext context = TermContext.Build(reader.Context, term);
+                        SimWeight w = weight.Similarity.ComputeWeight(1f, searcher.CollectionStatistics("field"), searcher.TermStatistics(term, context));
+                        var dummy = w.GetValueForNormalization(); // ignored
+                        w.Normalize(1F, 1F);
+                        Sims[(int)ord] = weight.Similarity.GetSimScorer(w, (AtomicReaderContext)reader.Context);
+                    }
+                }
+            }
+
+            public override float GetScore()
+            {
+                Debug.Assert(Score_Renamed != 0, CurrentMatched.ToString());
+                return (float)Score_Renamed * ((BooleanWeight)m_weight).Coord(CurrentMatched, ((BooleanWeight)m_weight).MaxCoord);
+            }
+
+            public override int Freq
+            {
+                get { return CurrentMatched; }
+            }
+
+            public override int DocID
+            {
+                get { return CurrentDoc; }
+            }
+
+            public override int NextDoc()
+            {
+                Debug.Assert(CurrentDoc != NO_MORE_DOCS);
+                for (CurrentDoc = CurrentDoc + 1; CurrentDoc < MaxDoc; CurrentDoc++)
+                {
+                    CurrentMatched = 0;
+                    Score_Renamed = 0;
+                    Dv.SetDocument(CurrentDoc);
+                    long ord;
+                    while ((ord = Dv.NextOrd()) != SortedSetDocValues.NO_MORE_ORDS)
+                    {
+                        if (Ords.Contains(ord))
+                        {
+                            CurrentMatched++;
+                            Score_Renamed += Sims[(int)ord].Score(CurrentDoc, 1);
+                        }
+                    }
+                    if (CurrentMatched >= MinNrShouldMatch)
+                    {
+                        return CurrentDoc;
+                    }
+                }
+                return CurrentDoc = NO_MORE_DOCS;
+            }
+
+            public override int Advance(int target)
+            {
+                int doc;
+                while ((doc = NextDoc()) < target)
+                {
+                }
+                return doc;
+            }
+
+            public override long GetCost()
+            {
+                return MaxDoc;
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestMultiPhraseQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestMultiPhraseQuery.cs b/src/Lucene.Net.Tests/Search/TestMultiPhraseQuery.cs
new file mode 100644
index 0000000..0995c13
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestMultiPhraseQuery.cs
@@ -0,0 +1,631 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using NUnit.Framework;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using CannedTokenStream = Lucene.Net.Analysis.CannedTokenStream;
+    using DefaultSimilarity = Lucene.Net.Search.Similarities.DefaultSimilarity;
+    using Directory = Lucene.Net.Store.Directory;
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using IndexWriter = Lucene.Net.Index.IndexWriter;
+    using IndexWriterConfig = Lucene.Net.Index.IndexWriterConfig;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MultiFields = Lucene.Net.Index.MultiFields;
+    using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Term = Lucene.Net.Index.Term;
+    using TermsEnum = Lucene.Net.Index.TermsEnum;
+    using TextField = TextField;
+    using Token = Lucene.Net.Analysis.Token;
+
+    /// <summary>
+    /// this class tests the MultiPhraseQuery class.
+    ///
+    ///
+    /// </summary>
+    [TestFixture]
+    public class TestMultiPhraseQuery : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestPhrasePrefix()
+        {
+            Directory indexStore = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), indexStore, Similarity, TimeZone);
+            Add("blueberry pie", writer);
+            Add("blueberry strudel", writer);
+            Add("blueberry pizza", writer);
+            Add("blueberry chewing gum", writer);
+            Add("bluebird pizza", writer);
+            Add("bluebird foobar pizza", writer);
+            Add("piccadilly circus", writer);
+
+            IndexReader reader = writer.Reader;
+            IndexSearcher searcher = NewSearcher(reader);
+
+            // search for "blueberry pi*":
+            MultiPhraseQuery query1 = new MultiPhraseQuery();
+            // search for "strawberry pi*":
+            MultiPhraseQuery query2 = new MultiPhraseQuery();
+            query1.Add(new Term("body", "blueberry"));
+            query2.Add(new Term("body", "strawberry"));
+
+            LinkedList<Term> termsWithPrefix = new LinkedList<Term>();
+
+            // this TermEnum gives "piccadilly", "pie" and "pizza".
+            string prefix = "pi";
+            TermsEnum te = MultiFields.GetFields(reader).GetTerms("body").GetIterator(null);
+            te.SeekCeil(new BytesRef(prefix));
+            do
+            {
+                string s = te.Term.Utf8ToString();
+                if (s.StartsWith(prefix))
+                {
+                    termsWithPrefix.AddLast(new Term("body", s));
+                }
+                else
+                {
+                    break;
+                }
+            } while (te.Next() != null);
+
+            query1.Add(termsWithPrefix.ToArray(/*new Term[0]*/));
+            Assert.AreEqual("body:\"blueberry (piccadilly pie pizza)\"", query1.ToString());
+            query2.Add(termsWithPrefix.ToArray(/*new Term[0]*/));
+            Assert.AreEqual("body:\"strawberry (piccadilly pie pizza)\"", query2.ToString());
+
+            ScoreDoc[] result;
+            result = searcher.Search(query1, null, 1000).ScoreDocs;
+            Assert.AreEqual(2, result.Length);
+            result = searcher.Search(query2, null, 1000).ScoreDocs;
+            Assert.AreEqual(0, result.Length);
+
+            // search for "blue* pizza":
+            MultiPhraseQuery query3 = new MultiPhraseQuery();
+            termsWithPrefix.Clear();
+            prefix = "blue";
+            te.SeekCeil(new BytesRef(prefix));
+
+            do
+            {
+                if (te.Term.Utf8ToString().StartsWith(prefix))
+                {
+                    termsWithPrefix.AddLast(new Term("body", te.Term.Utf8ToString()));
+                }
+            } while (te.Next() != null);
+
+            query3.Add(termsWithPrefix.ToArray(/*new Term[0]*/));
+            query3.Add(new Term("body", "pizza"));
+
+            result = searcher.Search(query3, null, 1000).ScoreDocs;
+            Assert.AreEqual(2, result.Length); // blueberry pizza, bluebird pizza
+            Assert.AreEqual("body:\"(blueberry bluebird) pizza\"", query3.ToString());
+
+            // test slop:
+            query3.Slop = 1;
+            result = searcher.Search(query3, null, 1000).ScoreDocs;
+
+            // just make sure no exc:
+            searcher.Explain(query3, 0);
+
+            Assert.AreEqual(3, result.Length); // blueberry pizza, bluebird pizza, bluebird
+            // foobar pizza
+
+            MultiPhraseQuery query4 = new MultiPhraseQuery();
+            try
+            {
+                query4.Add(new Term("field1", "foo"));
+                query4.Add(new Term("field2", "foobar"));
+                Assert.Fail();
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException e)
+#pragma warning restore 168
+            {
+                // okay, all terms must belong to the same field
+            }
+
+            writer.Dispose();
+            reader.Dispose();
+            indexStore.Dispose();
+        }
+
+        // LUCENE-2580
+        [Test]
+        public virtual void TestTall()
+        {
+            Directory indexStore = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), indexStore, Similarity, TimeZone);
+            Add("blueberry chocolate pie", writer);
+            Add("blueberry chocolate tart", writer);
+            IndexReader r = writer.Reader;
+            writer.Dispose();
+
+            IndexSearcher searcher = NewSearcher(r);
+            MultiPhraseQuery q = new MultiPhraseQuery();
+            q.Add(new Term("body", "blueberry"));
+            q.Add(new Term("body", "chocolate"));
+            q.Add(new Term[] { new Term("body", "pie"), new Term("body", "tart") });
+            Assert.AreEqual(2, searcher.Search(q, 1).TotalHits);
+            r.Dispose();
+            indexStore.Dispose();
+        }
+
+        //ORIGINAL LINE: @Ignore public void testMultiSloppyWithRepeats() throws java.io.IOException
+        [Test]
+        [Ignore("This appears to be a known issue")]
+        public virtual void TestMultiSloppyWithRepeats() //LUCENE-3821 fixes sloppy phrase scoring, except for this known problem
+        {
+            Directory indexStore = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), indexStore, Similarity, TimeZone);
+            Add("a b c d e f g h i k", writer);
+            IndexReader r = writer.Reader;
+            writer.Dispose();
+
+            IndexSearcher searcher = NewSearcher(r);
+
+            MultiPhraseQuery q = new MultiPhraseQuery();
+            // this will fail, when the scorer would propagate [a] rather than [a,b],
+            q.Add(new Term[] { new Term("body", "a"), new Term("body", "b") });
+            q.Add(new Term[] { new Term("body", "a") });
+            q.Slop = 6;
+            Assert.AreEqual(1, searcher.Search(q, 1).TotalHits); // should match on "a b"
+
+            r.Dispose();
+            indexStore.Dispose();
+        }
+
+        [Test]
+        public virtual void TestMultiExactWithRepeats()
+        {
+            Directory indexStore = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), indexStore, Similarity, TimeZone);
+            Add("a b c d e f g h i k", writer);
+            IndexReader r = writer.Reader;
+            writer.Dispose();
+
+            IndexSearcher searcher = NewSearcher(r);
+            MultiPhraseQuery q = new MultiPhraseQuery();
+            q.Add(new Term[] { new Term("body", "a"), new Term("body", "d") }, 0);
+            q.Add(new Term[] { new Term("body", "a"), new Term("body", "f") }, 2);
+            Assert.AreEqual(1, searcher.Search(q, 1).TotalHits); // should match on "a b"
+            r.Dispose();
+            indexStore.Dispose();
+        }
+
+        private void Add(string s, RandomIndexWriter writer)
+        {
+            Document doc = new Document();
+            doc.Add(NewTextField("body", s, Field.Store.YES));
+            writer.AddDocument(doc);
+        }
+
+        [Test]
+        public virtual void TestBooleanQueryContainingSingleTermPrefixQuery()
+        {
+            // this tests against bug 33161 (now fixed)
+            // In order to cause the bug, the outer query must have more than one term
+            // and all terms required.
+            // The contained PhraseMultiQuery must contain exactly one term array.
+            Directory indexStore = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), indexStore, Similarity, TimeZone);
+            Add("blueberry pie", writer);
+            Add("blueberry chewing gum", writer);
+            Add("blue raspberry pie", writer);
+
+            IndexReader reader = writer.Reader;
+            IndexSearcher searcher = NewSearcher(reader);
+            // this query will be equivalent to +body:pie +body:"blue*"
+            BooleanQuery q = new BooleanQuery();
+            q.Add(new TermQuery(new Term("body", "pie")), Occur.MUST);
+
+            MultiPhraseQuery trouble = new MultiPhraseQuery();
+            trouble.Add(new Term[] { new Term("body", "blueberry"), new Term("body", "blue") });
+            q.Add(trouble, Occur.MUST);
+
+            // exception will be thrown here without fix
+            ScoreDoc[] hits = searcher.Search(q, null, 1000).ScoreDocs;
+
+            Assert.AreEqual(2, hits.Length, "Wrong number of hits");
+
+            // just make sure no exc:
+            searcher.Explain(q, 0);
+
+            writer.Dispose();
+            reader.Dispose();
+            indexStore.Dispose();
+        }
+
+        [Test]
+        public virtual void TestPhrasePrefixWithBooleanQuery()
+        {
+            Directory indexStore = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), indexStore, Similarity, TimeZone);
+            Add("this is a test", "object", writer);
+            Add("a note", "note", writer);
+
+            IndexReader reader = writer.Reader;
+            IndexSearcher searcher = NewSearcher(reader);
+
+            // this query will be equivalent to +type:note +body:"a t*"
+            BooleanQuery q = new BooleanQuery();
+            q.Add(new TermQuery(new Term("type", "note")), Occur.MUST);
+
+            MultiPhraseQuery trouble = new MultiPhraseQuery();
+            trouble.Add(new Term("body", "a"));
+            trouble.Add(new Term[] { new Term("body", "test"), new Term("body", "this") });
+            q.Add(trouble, Occur.MUST);
+
+            // exception will be thrown here without fix for #35626:
+            ScoreDoc[] hits = searcher.Search(q, null, 1000).ScoreDocs;
+            Assert.AreEqual(0, hits.Length, "Wrong number of hits");
+            writer.Dispose();
+            reader.Dispose();
+            indexStore.Dispose();
+        }
+
+        [Test]
+        public virtual void TestNoDocs()
+        {
+            Directory indexStore = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), indexStore, Similarity, TimeZone);
+            Add("a note", "note", writer);
+
+            IndexReader reader = writer.Reader;
+            IndexSearcher searcher = NewSearcher(reader);
+
+            MultiPhraseQuery q = new MultiPhraseQuery();
+            q.Add(new Term("body", "a"));
+            q.Add(new Term[] { new Term("body", "nope"), new Term("body", "nope") });
+            Assert.AreEqual(0, searcher.Search(q, null, 1).TotalHits, "Wrong number of hits");
+
+            // just make sure no exc:
+            searcher.Explain(q, 0);
+
+            writer.Dispose();
+            reader.Dispose();
+            indexStore.Dispose();
+        }
+
+        [Test]
+        public virtual void TestHashCodeAndEquals()
+        {
+            MultiPhraseQuery query1 = new MultiPhraseQuery();
+            MultiPhraseQuery query2 = new MultiPhraseQuery();
+
+            Assert.AreEqual(query1.GetHashCode(), query2.GetHashCode());
+            Assert.IsTrue(query1.Equals(query2));
+            Assert.AreEqual(query1, query2);
+
+            Term term1 = new Term("someField", "someText");
+
+            query1.Add(term1);
+            query2.Add(term1);
+
+            Assert.AreEqual(query1.GetHashCode(), query2.GetHashCode());
+            Assert.AreEqual(query1, query2);
+
+            Term term2 = new Term("someField", "someMoreText");
+
+            query1.Add(term2);
+
+            Assert.IsFalse(query1.GetHashCode() == query2.GetHashCode());
+            Assert.IsFalse(query1.Equals(query2));
+
+            query2.Add(term2);
+
+            Assert.AreEqual(query1.GetHashCode(), query2.GetHashCode());
+            Assert.AreEqual(query1, query2);
+        }
+
+        private void Add(string s, string type, RandomIndexWriter writer)
+        {
+            Document doc = new Document();
+            doc.Add(NewTextField("body", s, Field.Store.YES));
+            doc.Add(NewStringField("type", type, Field.Store.NO));
+            writer.AddDocument(doc);
+        }
+
+        // LUCENE-2526
+        [Test]
+        public virtual void TestEmptyToString()
+        {
+            (new MultiPhraseQuery()).ToString();
+        }
+
+        [Test]
+        public virtual void TestCustomIDF()
+        {
+            Directory indexStore = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), indexStore, Similarity, TimeZone);
+            Add("this is a test", "object", writer);
+            Add("a note", "note", writer);
+
+            IndexReader reader = writer.Reader;
+            IndexSearcher searcher = NewSearcher(reader);
+            searcher.Similarity = new DefaultSimilarityAnonymousInnerClassHelper(this);
+
+            MultiPhraseQuery query = new MultiPhraseQuery();
+            query.Add(new Term[] { new Term("body", "this"), new Term("body", "that") });
+            query.Add(new Term("body", "is"));
+            Weight weight = query.CreateWeight(searcher);
+            Assert.AreEqual(10f * 10f, weight.GetValueForNormalization(), 0.001f);
+
+            writer.Dispose();
+            reader.Dispose();
+            indexStore.Dispose();
+        }
+
+        private class DefaultSimilarityAnonymousInnerClassHelper : DefaultSimilarity
+        {
+            private readonly TestMultiPhraseQuery OuterInstance;
+
+            public DefaultSimilarityAnonymousInnerClassHelper(TestMultiPhraseQuery outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            public override Explanation IdfExplain(CollectionStatistics collectionStats, TermStatistics[] termStats)
+            {
+                return new Explanation(10f, "just a test");
+            }
+        }
+
+        [Test]
+        public virtual void TestZeroPosIncr()
+        {
+            Directory dir = new RAMDirectory();
+            Token[] tokens = new Token[3];
+            tokens[0] = new Token();
+            tokens[0].Append("a");
+            tokens[0].PositionIncrement = 1;
+            tokens[1] = new Token();
+            tokens[1].Append("b");
+            tokens[1].PositionIncrement = 0;
+            tokens[2] = new Token();
+            tokens[2].Append("c");
+            tokens[2].PositionIncrement = 0;
+
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            Document doc = new Document();
+            doc.Add(new TextField("field", new CannedTokenStream(tokens)));
+            writer.AddDocument(doc);
+            doc = new Document();
+            doc.Add(new TextField("field", new CannedTokenStream(tokens)));
+            writer.AddDocument(doc);
+            IndexReader r = writer.Reader;
+            writer.Dispose();
+            IndexSearcher s = NewSearcher(r);
+            MultiPhraseQuery mpq = new MultiPhraseQuery();
+            //mpq.setSlop(1);
+
+            // NOTE: not great that if we do the else clause here we
+            // get different scores!  MultiPhraseQuery counts that
+            // phrase as occurring twice per doc (it should be 1, I
+            // think?).  this is because MultipleTermPositions is able to
+            // return the same position more than once (0, in this
+            // case):
+            if (true)
+            {
+                mpq.Add(new Term[] { new Term("field", "b"), new Term("field", "c") }, 0);
+                mpq.Add(new Term[] { new Term("field", "a") }, 0);
+            }
+            else
+            {
+#pragma warning disable 162
+                mpq.Add(new Term[] { new Term("field", "a") }, 0);
+                mpq.Add(new Term[] { new Term("field", "b"), new Term("field", "c") }, 0);
+#pragma warning restore 162
+            }
+            TopDocs hits = s.Search(mpq, 2);
+            Assert.AreEqual(2, hits.TotalHits);
+            Assert.AreEqual(hits.ScoreDocs[0].Score, hits.ScoreDocs[1].Score, 1e-5);
+            /*
+            for(int hit=0;hit<hits.TotalHits;hit++) {
+              ScoreDoc sd = hits.ScoreDocs[hit];
+              System.out.println("  hit doc=" + sd.Doc + " score=" + sd.Score);
+            }
+            */
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        private static Token MakeToken(string text, int posIncr)
+        {
+            Token t = new Token();
+            t.Append(text);
+            t.PositionIncrement = posIncr;
+            return t;
+        }
+
+        private static readonly Token[] INCR_0_DOC_TOKENS = new Token[] { MakeToken("x", 1), MakeToken("a", 1), MakeToken("1", 0), MakeToken("m", 1), MakeToken("b", 1), MakeToken("1", 0), MakeToken("n", 1), MakeToken("c", 1), MakeToken("y", 1) };
+
+        private static readonly Token[] INCR_0_QUERY_TOKENS_AND = new Token[] { MakeToken("a", 1), MakeToken("1", 0), MakeToken("b", 1), MakeToken("1", 0), MakeToken("c", 1) };
+
+        private static readonly Token[][] INCR_0_QUERY_TOKENS_AND_OR_MATCH = new Token[][] { new Token[] { MakeToken("a", 1) }, new Token[] { MakeToken("x", 1), MakeToken("1", 0) }, new Token[] { MakeToken("b", 2) }, new Token[] { MakeToken("x", 2), MakeToken("1", 0) }, new Token[] { MakeToken("c", 3) } };
+
+        private static readonly Token[][] INCR_0_QUERY_TOKENS_AND_OR_NO_MATCHN = new Token[][] { new Token[] { MakeToken("x", 1) }, new Token[] { MakeToken("a", 1), MakeToken("1", 0) }, new Token[] { MakeToken("x", 2) }, new Token[] { MakeToken("b", 2), MakeToken("1", 0) }, new Token[] { MakeToken("c", 3) } };
+
+        /// <summary>
+        /// using query parser, MPQ will be created, and will not be strict about having all query terms
+        /// in each position - one of each position is sufficient (OR logic)
+        /// </summary>
+        [Test]
+        public virtual void TestZeroPosIncrSloppyParsedAnd()
+        {
+            MultiPhraseQuery q = new MultiPhraseQuery();
+            q.Add(new Term[] { new Term("field", "a"), new Term("field", "1") }, -1);
+            q.Add(new Term[] { new Term("field", "b"), new Term("field", "1") }, 0);
+            q.Add(new Term[] { new Term("field", "c") }, 1);
+            DoTestZeroPosIncrSloppy(q, 0);
+            q.Slop = 1;
+            DoTestZeroPosIncrSloppy(q, 0);
+            q.Slop = 2;
+            DoTestZeroPosIncrSloppy(q, 1);
+        }
+
+        private void DoTestZeroPosIncrSloppy(Query q, int nExpected)
+        {
+            Directory dir = NewDirectory(); // random dir
+            IndexWriterConfig cfg = NewIndexWriterConfig(TEST_VERSION_CURRENT, null);
+            IndexWriter writer = new IndexWriter(dir, cfg);
+            Document doc = new Document();
+            doc.Add(new TextField("field", new CannedTokenStream(INCR_0_DOC_TOKENS)));
+            writer.AddDocument(doc);
+            IndexReader r = DirectoryReader.Open(writer, false);
+            writer.Dispose();
+            IndexSearcher s = NewSearcher(r);
+
+            if (VERBOSE)
+            {
+                Console.WriteLine("QUERY=" + q);
+            }
+
+            TopDocs hits = s.Search(q, 1);
+            Assert.AreEqual(nExpected, hits.TotalHits, "wrong number of results");
+
+            if (VERBOSE)
+            {
+                for (int hit = 0; hit < hits.TotalHits; hit++)
+                {
+                    ScoreDoc sd = hits.ScoreDocs[hit];
+                    Console.WriteLine("  hit doc=" + sd.Doc + " score=" + sd.Score);
+                }
+            }
+
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        /// <summary>
+        /// PQ AND Mode - Manually creating a phrase query
+        /// </summary>
+        [Test]
+        public virtual void TestZeroPosIncrSloppyPqAnd()
+        {
+            PhraseQuery pq = new PhraseQuery();
+            int pos = -1;
+            foreach (Token tap in INCR_0_QUERY_TOKENS_AND)
+            {
+                pos += tap.PositionIncrement;
+                pq.Add(new Term("field", tap.ToString()), pos);
+            }
+            DoTestZeroPosIncrSloppy(pq, 0);
+            pq.Slop = 1;
+            DoTestZeroPosIncrSloppy(pq, 0);
+            pq.Slop = 2;
+            DoTestZeroPosIncrSloppy(pq, 1);
+        }
+
+        /// <summary>
+        /// MPQ AND Mode - Manually creating a multiple phrase query
+        /// </summary>
+        [Test]
+        public virtual void TestZeroPosIncrSloppyMpqAnd()
+        {
+            MultiPhraseQuery mpq = new MultiPhraseQuery();
+            int pos = -1;
+            foreach (Token tap in INCR_0_QUERY_TOKENS_AND)
+            {
+                pos += tap.PositionIncrement;
+                mpq.Add(new Term[] { new Term("field", tap.ToString()) }, pos); //AND logic
+            }
+            DoTestZeroPosIncrSloppy(mpq, 0);
+            mpq.Slop = 1;
+            DoTestZeroPosIncrSloppy(mpq, 0);
+            mpq.Slop = 2;
+            DoTestZeroPosIncrSloppy(mpq, 1);
+        }
+
+        /// <summary>
+        /// MPQ Combined AND OR Mode - Manually creating a multiple phrase query
+        /// </summary>
+        [Test]
+        public virtual void TestZeroPosIncrSloppyMpqAndOrMatch()
+        {
+            MultiPhraseQuery mpq = new MultiPhraseQuery();
+            foreach (Token[] tap in INCR_0_QUERY_TOKENS_AND_OR_MATCH)
+            {
+                Term[] terms = TapTerms(tap);
+                int pos = tap[0].PositionIncrement - 1;
+                mpq.Add(terms, pos); //AND logic in pos, OR across lines
+            }
+            DoTestZeroPosIncrSloppy(mpq, 0);
+            mpq.Slop = 1;
+            DoTestZeroPosIncrSloppy(mpq, 0);
+            mpq.Slop = 2;
+            DoTestZeroPosIncrSloppy(mpq, 1);
+        }
+
+        /// <summary>
+        /// MPQ Combined AND OR Mode - Manually creating a multiple phrase query - with no match
+        /// </summary>
+        [Test]
+        public virtual void TestZeroPosIncrSloppyMpqAndOrNoMatch()
+        {
+            MultiPhraseQuery mpq = new MultiPhraseQuery();
+            foreach (Token[] tap in INCR_0_QUERY_TOKENS_AND_OR_NO_MATCHN)
+            {
+                Term[] terms = TapTerms(tap);
+                int pos = tap[0].PositionIncrement - 1;
+                mpq.Add(terms, pos); //AND logic in pos, OR across lines
+            }
+            DoTestZeroPosIncrSloppy(mpq, 0);
+            mpq.Slop = 2;
+            DoTestZeroPosIncrSloppy(mpq, 0);
+        }
+
+        private Term[] TapTerms(Token[] tap)
+        {
+            Term[] terms = new Term[tap.Length];
+            for (int i = 0; i < terms.Length; i++)
+            {
+                terms[i] = new Term("field", tap[i].ToString());
+            }
+            return terms;
+        }
+
+        [Test]
+        public virtual void TestNegativeSlop()
+        {
+            MultiPhraseQuery query = new MultiPhraseQuery();
+            query.Add(new Term("field", "two"));
+            query.Add(new Term("field", "one"));
+            try
+            {
+                query.Slop = -2;
+                Assert.Fail("didn't get expected exception");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException expected)
+#pragma warning restore 168
+            {
+                // expected exception
+            }
+        }
+    }
+}
\ No newline at end of file