You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by ar...@apache.org on 2006/06/04 04:41:25 UTC
svn commit: r411501 [28/30] - in /incubator/lucene.net/trunk/C#/src: ./
Demo/DeleteFiles/ Demo/DemoLib/ Demo/DemoLib/HTML/ Demo/IndexFiles/
Demo/IndexHtml/ Demo/SearchFiles/ Lucene.Net/ Lucene.Net/Analysis/
Lucene.Net/Analysis/Standard/ Lucene.Net/Docu...
Added: incubator/lucene.net/trunk/C#/src/Test/Search/TestCustomSearcherSort.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestCustomSearcherSort.cs?rev=411501&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestCustomSearcherSort.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestCustomSearcherSort.cs Sat Jun 3 19:41:13 2006
@@ -0,0 +1,323 @@
+/*
+ * Copyright 2005 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using NUnit.Framework;
+using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
+using DateTools = Lucene.Net.Documents.DateTools;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using IndexReader = Lucene.Net.Index.IndexReader;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using Term = Lucene.Net.Index.Term;
+using Directory = Lucene.Net.Store.Directory;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+
+namespace Lucene.Net.Search
+{
+
+ /// <summary> Unit test for sorting code.
+ ///
+ /// </summary>
+ /// <author> Martin Seitz (T-Systems)
+ /// </author>
+
+ [Serializable]
+ [TestFixture]
+ public class TestCustomSearcherSort
+ {
+
+ private Directory index = null;
+ private Query query = null;
+ // reduced from 20000 to 2000 to speed up test...
+ private const int INDEX_SIZE = 2000;
+
+
+ [STAThread]
+ public static void Main(System.String[] argv)
+ {
+ // TestRunner.run(Suite()); // {{Aroush}} how is this done in NUnit?
+ }
+
+ public static NUnit.Framework.TestCase Suite()
+ {
+ return null; // return new NUnit.Core.TestSuite(typeof(TestCustomSearcherSort)); {{Aroush}} how is this done in NUnit?
+ }
+
+
+ // create an index for testing
+ private Directory GetIndex()
+ {
+ RAMDirectory indexStore = new RAMDirectory();
+ IndexWriter writer = new IndexWriter(indexStore, new StandardAnalyzer(), true);
+ RandomGen random = new RandomGen(this);
+ for (int i = 0; i < INDEX_SIZE; ++i)
+ {
+ // don't decrease; if to low the problem doesn't show up
+ Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+ if ((i % 5) != 0)
+ {
+ // some documents must not have an entry in the first sort field
+ doc.Add(new Field("publicationDate_", random.GetLuceneDate(), Field.Store.YES, Field.Index.UN_TOKENIZED));
+ }
+ if ((i % 7) == 0)
+ {
+ // some documents to match the query (see below)
+ doc.Add(new Field("content", "test", Field.Store.YES, Field.Index.TOKENIZED));
+ }
+ // every document has a defined 'mandant' field
+ doc.Add(new Field("mandant", System.Convert.ToString(i % 3), Field.Store.YES, Field.Index.UN_TOKENIZED));
+ writer.AddDocument(doc);
+ }
+ writer.Optimize();
+ writer.Close();
+ return indexStore;
+ }
+
+ /// <summary> Create index and query for test cases. </summary>
+ [TestFixtureSetUp]
+ public virtual void SetUp()
+ {
+ index = GetIndex();
+ query = new TermQuery(new Term("content", "test"));
+ }
+
+ /// <summary> Run the test using two CustomSearcher instances. </summary>
+ [Test]
+ public virtual void TestFieldSortCustomSearcher()
+ {
+ // log("Run testFieldSortCustomSearcher");
+ // define the sort criteria
+ Sort custSort = new Sort(new SortField[]{new SortField("publicationDate_"), SortField.FIELD_SCORE});
+ Searcher searcher = new CustomSearcher(this, index, 2);
+ // search and check hits
+ MatchHits(searcher, custSort);
+ }
+ /// <summary> Run the test using one CustomSearcher wrapped by a MultiSearcher. </summary>
+ [Test]
+ public virtual void TestFieldSortSingleSearcher()
+ {
+ // log("Run testFieldSortSingleSearcher");
+ // define the sort criteria
+ Sort custSort = new Sort(new SortField[]{new SortField("publicationDate_"), SortField.FIELD_SCORE});
+ Searcher searcher = new MultiSearcher(new Lucene.Net.Search.Searchable[]{new CustomSearcher(this, index, 2)});
+ // search and check hits
+ MatchHits(searcher, custSort);
+ }
+ /// <summary> Run the test using two CustomSearcher instances. </summary>
+ [Test]
+ public virtual void TestFieldSortMultiCustomSearcher()
+ {
+ // log("Run testFieldSortMultiCustomSearcher");
+ // define the sort criteria
+ Sort custSort = new Sort(new SortField[]{new SortField("publicationDate_"), SortField.FIELD_SCORE});
+ Searcher searcher = new MultiSearcher(new Lucene.Net.Search.Searchable[]{new CustomSearcher(this, index, 0), new CustomSearcher(this, index, 2)});
+ // search and check hits
+ MatchHits(searcher, custSort);
+ }
+
+
+ // make sure the documents returned by the search match the expected list
+ private void MatchHits(Searcher searcher, Sort sort)
+ {
+ // make a query without sorting first
+ Hits hitsByRank = searcher.Search(query);
+ CheckHits(hitsByRank, "Sort by rank: "); // check for duplicates
+ System.Collections.IDictionary resultMap = new System.Collections.SortedList();
+ // store hits in TreeMap - TreeMap does not allow duplicates; existing entries are silently overwritten
+ for (int hitid = 0; hitid < hitsByRank.Length(); ++hitid)
+ {
+ resultMap[(System.Int32) hitsByRank.Id(hitid)] = (System.Int32) hitid; // Value: Hits-Objekt Index
+ }
+
+ // now make a query using the sort criteria
+ Hits resultSort = searcher.Search(query, sort);
+ CheckHits(resultSort, "Sort by custom criteria: "); // check for duplicates
+
+ System.String lf = SupportClass.AppSettings.Get("line.separator", "\n");
+ // besides the sorting both sets of hits must be identical
+ for (int hitid = 0; hitid < resultSort.Length(); ++hitid)
+ {
+ System.Int32 idHitDate = (System.Int32) resultSort.Id(hitid); // document ID from sorted search
+ if (!resultMap.Contains(idHitDate))
+ {
+ Log("ID " + idHitDate + " not found. Possibliy a duplicate.");
+ }
+ Assert.IsTrue(resultMap.Contains(idHitDate)); // same ID must be in the Map from the rank-sorted search
+ // every hit must appear once in both result sets --> remove it from the Map.
+ // At the end the Map must be empty!
+ resultMap.Remove(idHitDate);
+ }
+ if (resultMap.Count == 0)
+ {
+ // log("All hits matched");
+ }
+ else
+ {
+ Log("Couldn't match " + resultMap.Count + " hits.");
+ }
+ Assert.AreEqual(resultMap.Count, 0);
+ }
+
+ /// <summary> Check the hits for duplicates.</summary>
+ /// <param name="hits">
+ /// </param>
+ private void CheckHits(Hits hits, System.String prefix)
+ {
+ if (hits != null)
+ {
+ System.Collections.IDictionary idMap = new System.Collections.SortedList();
+ for (int docnum = 0; docnum < hits.Length(); ++docnum)
+ {
+ System.Int32 luceneId;
+ try
+ {
+ luceneId = (System.Int32) hits.Id(docnum);
+ if (idMap.Contains(luceneId))
+ {
+ System.Text.StringBuilder message = new System.Text.StringBuilder(prefix);
+ message.Append("Duplicate key for hit index = ");
+ message.Append(docnum);
+ message.Append(", previous index = ");
+ message.Append(((System.Int32) idMap[luceneId]).ToString());
+ message.Append(", Lucene ID = ");
+ message.Append(luceneId);
+ Log(message.ToString());
+ }
+ else
+ {
+ idMap[luceneId] = (System.Int32) docnum;
+ }
+ }
+ catch (System.IO.IOException ioe)
+ {
+ System.Text.StringBuilder message = new System.Text.StringBuilder(prefix);
+ message.Append("Error occurred for hit index = ");
+ message.Append(docnum);
+ message.Append(" (");
+ message.Append(ioe.Message);
+ message.Append(")");
+ Log(message.ToString());
+ }
+ }
+ }
+ }
+
+ // Simply write to console - choosen to be independant of log4j etc
+ private void Log(System.String message)
+ {
+ System.Console.Out.WriteLine(message);
+ }
+
+ public class CustomSearcher : IndexSearcher
+ {
+ private void InitBlock(TestCustomSearcherSort enclosingInstance)
+ {
+ this.enclosingInstance = enclosingInstance;
+ }
+ private TestCustomSearcherSort enclosingInstance;
+ public TestCustomSearcherSort Enclosing_Instance
+ {
+ get
+ {
+ return enclosingInstance;
+ }
+
+ }
+ private int switcher;
+ /// <param name="directory">
+ /// </param>
+ /// <throws> IOException </throws>
+ public CustomSearcher(TestCustomSearcherSort enclosingInstance, Directory directory, int switcher):base(directory)
+ {
+ InitBlock(enclosingInstance);
+ this.switcher = switcher;
+ }
+ /// <param name="r">
+ /// </param>
+ public CustomSearcher(TestCustomSearcherSort enclosingInstance, IndexReader r, int switcher):base(r)
+ {
+ InitBlock(enclosingInstance);
+ this.switcher = switcher;
+ }
+ /// <param name="path">
+ /// </param>
+ /// <throws> IOException </throws>
+ public CustomSearcher(TestCustomSearcherSort enclosingInstance, System.String path, int switcher):base(path)
+ {
+ InitBlock(enclosingInstance);
+ this.switcher = switcher;
+ }
+ /* (non-Javadoc)
+ * @see Lucene.Net.search.Searchable#search(Lucene.Net.search.Query, Lucene.Net.search.Filter, int, Lucene.Net.search.Sort)
+ */
+ public override TopFieldDocs Search(Query query, Filter filter, int nDocs, Sort sort)
+ {
+ BooleanQuery bq = new BooleanQuery();
+ bq.Add(query, BooleanClause.Occur.MUST);
+ bq.Add(new TermQuery(new Term("mandant", System.Convert.ToString(switcher))), BooleanClause.Occur.MUST);
+ return base.Search(bq, filter, nDocs, sort);
+ }
+ /* (non-Javadoc)
+ * @see Lucene.Net.search.Searchable#search(Lucene.Net.search.Query, Lucene.Net.search.Filter, int)
+ */
+ public override TopDocs Search(Query query, Filter filter, int nDocs)
+ {
+ BooleanQuery bq = new BooleanQuery();
+ bq.Add(query, BooleanClause.Occur.MUST);
+ bq.Add(new TermQuery(new Term("mandant", System.Convert.ToString(switcher))), BooleanClause.Occur.MUST);
+ return base.Search(bq, filter, nDocs);
+ }
+ }
+
+ private class RandomGen
+ {
+ public RandomGen(TestCustomSearcherSort enclosingInstance)
+ {
+ InitBlock(enclosingInstance);
+ }
+ private void InitBlock(TestCustomSearcherSort enclosingInstance)
+ {
+ this.enclosingInstance = enclosingInstance;
+ System.DateTime temp_calendar;
+ temp_calendar = new System.DateTime(1980, 1, 1, 0, 0, 0, 0, new System.Globalization.GregorianCalendar());
+ base_Renamed = temp_calendar;
+ }
+ private TestCustomSearcherSort enclosingInstance;
+ public TestCustomSearcherSort Enclosing_Instance
+ {
+ get
+ {
+ return enclosingInstance;
+ }
+
+ }
+
+ private System.Random random = new System.Random((System.Int32) 0); // to generate some arbitrary contents
+ // private System.Globalization.Calendar base_Renamed;
+ private System.DateTime base_Renamed;
+
+ // Just to generate some different Lucene Date strings
+ public System.String GetLuceneDate()
+ {
+ long v1 = base_Renamed.Millisecond;
+ long v2 = random.Next();
+ long v3 = System.Int32.MinValue;
+ return DateTools.TimeToString(v1 + v2 - v3, DateTools.Resolution.DAY);
+ }
+ }
+ }
+}
\ No newline at end of file
Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestDateFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestDateFilter.cs?rev=411501&r1=411500&r2=411501&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestDateFilter.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestDateFilter.cs Sat Jun 3 19:41:13 2006
@@ -13,6 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+
using System;
using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
using DateField = Lucene.Net.Documents.DateField;
@@ -22,6 +23,7 @@
using Term = Lucene.Net.Index.Term;
using RAMDirectory = Lucene.Net.Store.RAMDirectory;
using NUnit.Framework;
+
namespace Lucene.Net.Search
{
@@ -30,14 +32,15 @@
/// </summary>
/// <author> Otis Gospodnetic
/// </author>
- /// <version> $Revision: 1.5 $
+ /// <version> $Revision: 150487 $
/// </version>
[TestFixture]
- public class TestDateFilter
+ public class TestDateFilter
{
+
/// <summary> </summary>
[Test]
- public virtual void TestBefore()
+ public virtual void TestBefore()
{
// create an index
RAMDirectory indexStore = new RAMDirectory();
@@ -45,10 +48,10 @@
long now = (System.DateTime.Now.Ticks - 621355968000000000) / 10000;
- Document doc = new Document();
+ Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
// add time that is in the past
- doc.Add(Field.Keyword("datefield", DateField.TimeToString(now - 1000)));
- doc.Add(Field.Text("body", "Today is a very sunny day in New York City"));
+ doc.Add(new Field("datefield", DateField.TimeToString(now - 1000), Field.Store.YES, Field.Index.UN_TOKENIZED));
+ doc.Add(new Field("body", "Today is a very sunny day in New York City", Field.Store.YES, Field.Index.TOKENIZED));
writer.AddDocument(doc);
writer.Optimize();
writer.Close();
@@ -93,7 +96,7 @@
/// <summary> </summary>
[Test]
- public virtual void TestAfter()
+ public virtual void TestAfter()
{
// create an index
RAMDirectory indexStore = new RAMDirectory();
@@ -101,10 +104,10 @@
long now = (System.DateTime.Now.Ticks - 621355968000000000) / 10000;
- Document doc = new Document();
+ Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
// add time that is in the future
- doc.Add(Field.Keyword("datefield", DateField.TimeToString(now + 888888)));
- doc.Add(Field.Text("body", "Today is a very sunny day in New York City"));
+ doc.Add(new Field("datefield", DateField.TimeToString(now + 888888), Field.Store.YES, Field.Index.UN_TOKENIZED));
+ doc.Add(new Field("body", "Today is a very sunny day in New York City", Field.Store.YES, Field.Index.TOKENIZED));
writer.AddDocument(doc);
writer.Optimize();
writer.Close();
Added: incubator/lucene.net/trunk/C#/src/Test/Search/TestDisjunctionMaxQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestDisjunctionMaxQuery.cs?rev=411501&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestDisjunctionMaxQuery.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestDisjunctionMaxQuery.cs Sat Jun 3 19:41:13 2006
@@ -0,0 +1,466 @@
+/*
+ * Copyright 2005 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using IndexReader = Lucene.Net.Index.IndexReader;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using Term = Lucene.Net.Index.Term;
+using Directory = Lucene.Net.Store.Directory;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using NUnit.Framework;
+
+namespace Lucene.Net.Search
+{
+
+ /// <summary> Test of the DisjunctionMaxQuery.
+ ///
+ /// </summary>
+ [TestFixture]
+ public class TestDisjunctionMaxQuery
+ {
+ public TestDisjunctionMaxQuery()
+ {
+ InitBlock();
+ }
+ private void InitBlock()
+ {
+ sim = new TestSimilarity();
+ }
+
+ /// <summary>threshold for comparing floats </summary>
+ public const float SCORE_COMP_THRESH = 0.0000f;
+
+ /// <summary> Similarity to eliminate tf, idf and lengthNorm effects to
+ /// isolate test case.
+ ///
+ /// <p>
+ /// same as TestRankingSimilarity in TestRanking.zip from
+ /// http://issues.apache.org/jira/browse/LUCENE-323
+ /// </p>
+ /// </summary>
+ /// <author> Williams
+ /// </author>
+ [Serializable]
+ private class TestSimilarity:DefaultSimilarity
+ {
+
+ public TestSimilarity()
+ {
+ }
+ public override float Tf(float freq)
+ {
+ if (freq > 0.0f)
+ return 1.0f;
+ else
+ return 0.0f;
+ }
+ public override float LengthNorm(System.String fieldName, int numTerms)
+ {
+ return 1.0f;
+ }
+ public override float Ldf(int docFreq, int numDocs)
+ {
+ return 1.0f;
+ }
+ }
+
+ public Similarity sim;
+ public Directory index;
+ public IndexReader r;
+ public IndexSearcher s;
+
+ [TestFixtureSetUp]
+ public virtual void SetUp()
+ {
+
+ index = new RAMDirectory();
+ IndexWriter writer = new IndexWriter(index, new WhitespaceAnalyzer(), true);
+ writer.SetSimilarity(sim);
+
+ // hed is the most important field, dek is secondary
+
+ // d1 is an "ok" match for: albino elephant
+ {
+ Lucene.Net.Documents.Document d1 = new Lucene.Net.Documents.Document();
+ d1.Add(Field.Keyword("id", "d1"));
+ d1.Add(Field.Text("hed", "elephant"));
+ d1.Add(Field.Text("dek", "elephant"));
+ writer.AddDocument(d1);
+ }
+
+ // d2 is a "good" match for: albino elephant
+ {
+ Lucene.Net.Documents.Document d2 = new Lucene.Net.Documents.Document();
+ d2.Add(Field.Keyword("id", "d2"));
+ d2.Add(Field.Text("hed", "elephant"));
+ d2.Add(Field.Text("dek", "albino"));
+ d2.Add(Field.Text("dek", "elephant"));
+ writer.AddDocument(d2);
+ }
+
+ // d3 is a "better" match for: albino elephant
+ {
+ Lucene.Net.Documents.Document d3 = new Lucene.Net.Documents.Document();
+ d3.Add(Field.Keyword("id", "d3"));
+ d3.Add(Field.Text("hed", "albino"));
+ d3.Add(Field.Text("hed", "elephant"));
+ writer.AddDocument(d3);
+ }
+
+ // d4 is the "best" match for: albino elephant
+ {
+ Lucene.Net.Documents.Document d4 = new Lucene.Net.Documents.Document();
+ d4.Add(Field.Keyword("id", "d4"));
+ d4.Add(Field.Text("hed", "albino"));
+ d4.Add(Field.Text("hed", "elephant"));
+ d4.Add(Field.Text("dek", "albino"));
+ writer.AddDocument(d4);
+ }
+
+ writer.Close();
+
+ r = IndexReader.Open(index);
+ s = new IndexSearcher(r);
+ s.SetSimilarity(sim);
+ }
+
+ [Test]
+ public virtual void TestSimpleEqualScores1()
+ {
+
+ DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.0f);
+ q.Add(Tq("hed", "albino"));
+ q.Add(Tq("hed", "elephant"));
+
+ Hits h = s.Search(q);
+
+ try
+ {
+ Assert.AreEqual(4, h.Length(), "all docs should match " + q.ToString());
+
+ float score = h.Score(0);
+ for (int i = 1; i < h.Length(); i++)
+ {
+ Assert.AreEqual(score, h.Score(i), SCORE_COMP_THRESH, "score #" + i + " is not the same");
+ }
+ }
+ catch (System.ApplicationException e)
+ {
+ PrintHits("testSimpleEqualScores1", h);
+ throw e;
+ }
+ }
+
+ [Test]
+ public virtual void TestSimpleEqualScores2()
+ {
+
+ DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.0f);
+ q.Add(Tq("dek", "albino"));
+ q.Add(Tq("dek", "elephant"));
+
+ Hits h = s.Search(q);
+
+ try
+ {
+ Assert.AreEqual(3, h.Length(), "3 docs should match " + q.ToString());
+ float score = h.Score(0);
+ for (int i = 1; i < h.Length(); i++)
+ {
+ Assert.AreEqual(score, h.Score(i), SCORE_COMP_THRESH, "score #" + i + " is not the same");
+ }
+ }
+ catch (System.ApplicationException e)
+ {
+ PrintHits("testSimpleEqualScores2", h);
+ throw e;
+ }
+ }
+
+ [Test]
+ public virtual void TestSimpleEqualScores3()
+ {
+
+ DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.0f);
+ q.Add(Tq("hed", "albino"));
+ q.Add(Tq("hed", "elephant"));
+ q.Add(Tq("dek", "albino"));
+ q.Add(Tq("dek", "elephant"));
+
+ Hits h = s.Search(q);
+
+ try
+ {
+ Assert.AreEqual(4, h.Length(), "all docs should match " + q.ToString());
+ float score = h.Score(0);
+ for (int i = 1; i < h.Length(); i++)
+ {
+ Assert.AreEqual(score, h.Score(i), SCORE_COMP_THRESH, "score #" + i + " is not the same");
+ }
+ }
+ catch (System.ApplicationException e)
+ {
+ PrintHits("testSimpleEqualScores3", h);
+ throw e;
+ }
+ }
+
+ [Test]
+ public virtual void TestSimpleTiebreaker()
+ {
+
+ DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.01f);
+ q.Add(Tq("dek", "albino"));
+ q.Add(Tq("dek", "elephant"));
+
+ Hits h = s.Search(q);
+
+ try
+ {
+ Assert.AreEqual(3, h.Length(), "3 docs should match " + q.ToString());
+ Assert.AreEqual("d2", h.Doc(0).Get("id"), "wrong first");
+ float score0 = h.Score(0);
+ float score1 = h.Score(1);
+ float score2 = h.Score(2);
+ Assert.IsTrue(score0 > score1, "d2 does not have better score then others: " + score0 + " >? " + score1);
+ Assert.AreEqual(score1, score2, SCORE_COMP_THRESH, "d4 and d1 don't have equal scores");
+ }
+ catch (System.ApplicationException e)
+ {
+ PrintHits("testSimpleTiebreaker", h);
+ throw e;
+ }
+ }
+
+ [Test]
+ public virtual void TestBooleanRequiredEqualScores()
+ {
+
+ BooleanQuery q = new BooleanQuery();
+ {
+ DisjunctionMaxQuery q1 = new DisjunctionMaxQuery(0.0f);
+ q1.Add(Tq("hed", "albino"));
+ q1.Add(Tq("dek", "albino"));
+ q.Add(q1, true, false);
+ }
+ {
+ DisjunctionMaxQuery q2 = new DisjunctionMaxQuery(0.0f);
+ q2.Add(Tq("hed", "elephant"));
+ q2.Add(Tq("dek", "elephant"));
+ q.Add(q2, true, false);
+ }
+
+
+ Hits h = s.Search(q);
+
+ try
+ {
+ Assert.AreEqual(3, h.Length(), "3 docs should match " + q.ToString());
+ float score = h.Score(0);
+ for (int i = 1; i < h.Length(); i++)
+ {
+ Assert.AreEqual(score, h.Score(i), SCORE_COMP_THRESH, "score #" + i + " is not the same");
+ }
+ }
+ catch (System.ApplicationException e)
+ {
+ PrintHits("testBooleanRequiredEqualScores1", h);
+ throw e;
+ }
+ }
+
+ [Test]
+ public virtual void TestBooleanOptionalNoTiebreaker()
+ {
+
+ BooleanQuery q = new BooleanQuery();
+ {
+ DisjunctionMaxQuery q1 = new DisjunctionMaxQuery(0.0f);
+ q1.Add(Tq("hed", "albino"));
+ q1.Add(Tq("dek", "albino"));
+ q.Add(q1, false, false);
+ }
+ {
+ DisjunctionMaxQuery q2 = new DisjunctionMaxQuery(0.0f);
+ q2.Add(Tq("hed", "elephant"));
+ q2.Add(Tq("dek", "elephant"));
+ q.Add(q2, false, false);
+ }
+
+
+ Hits h = s.Search(q);
+
+ try
+ {
+ Assert.AreEqual(4, h.Length(), "4 docs should match " + q.ToString());
+ float score = h.Score(0);
+ for (int i = 1; i < h.Length() - 1; i++)
+ {
+ /* note: -1 */
+ Assert.AreEqual(score, h.Score(i), SCORE_COMP_THRESH, "score #" + i + " is not the same");
+ }
+ Assert.AreEqual("d1", h.Doc(h.Length() - 1).Get("id"), "wrong last");
+ float score1 = h.Score(h.Length() - 1);
+ Assert.IsTrue(score > score1, "d1 does not have worse score then others: " + score + " >? " + score1);
+ }
+ catch (System.ApplicationException e)
+ {
+ PrintHits("testBooleanOptionalNoTiebreaker", h);
+ throw e;
+ }
+ }
+
+ [Test]
+ public virtual void TestBooleanOptionalWithTiebreaker()
+ {
+
+ BooleanQuery q = new BooleanQuery();
+ {
+ DisjunctionMaxQuery q1 = new DisjunctionMaxQuery(0.01f);
+ q1.Add(Tq("hed", "albino"));
+ q1.Add(Tq("dek", "albino"));
+ q.Add(q1, false, false);
+ }
+ {
+ DisjunctionMaxQuery q2 = new DisjunctionMaxQuery(0.01f);
+ q2.Add(Tq("hed", "elephant"));
+ q2.Add(Tq("dek", "elephant"));
+ q.Add(q2, false, false);
+ }
+
+
+ Hits h = s.Search(q);
+
+ try
+ {
+
+ Assert.AreEqual(4, h.Length(), "4 docs should match " + q.ToString());
+
+ float score0 = h.Score(0);
+ float score1 = h.Score(1);
+ float score2 = h.Score(2);
+ float score3 = h.Score(3);
+
+ System.String doc0 = h.Doc(0).Get("id");
+ System.String doc1 = h.Doc(1).Get("id");
+ System.String doc2 = h.Doc(2).Get("id");
+ System.String doc3 = h.Doc(3).Get("id");
+
+ Assert.IsTrue(doc0.Equals("d2") || doc0.Equals("d4"), "doc0 should be d2 or d4: " + doc0);
+ Assert.IsTrue(doc1.Equals("d2") || doc1.Equals("d4"), "doc1 should be d2 or d4: " + doc0);
+ Assert.AreEqual(score0, score1, SCORE_COMP_THRESH, "score0 and score1 should match");
+ Assert.AreEqual("d3", doc2, "wrong third");
+ Assert.IsTrue(score1 > score2, "d3 does not have worse score then d2 and d4: " + score1 + " >? " + score2);
+
+ Assert.AreEqual("d1", doc3, "wrong fourth");
+ Assert.IsTrue(score2 > score3, "d1 does not have worse score then d3: " + score2 + " >? " + score3);
+ }
+ catch (System.ApplicationException e)
+ {
+ PrintHits("testBooleanOptionalWithTiebreaker", h);
+ throw e;
+ }
+ }
+
+ [Test]
+ public virtual void TestBooleanOptionalWithTiebreakerAndBoost()
+ {
+
+ BooleanQuery q = new BooleanQuery();
+ {
+ DisjunctionMaxQuery q1 = new DisjunctionMaxQuery(0.01f);
+ q1.Add(Tq("hed", "albino", 1.5f));
+ q1.Add(Tq("dek", "albino"));
+ q.Add(q1, false, false);
+ }
+ {
+ DisjunctionMaxQuery q2 = new DisjunctionMaxQuery(0.01f);
+ q2.Add(Tq("hed", "elephant", 1.5f));
+ q2.Add(Tq("dek", "elephant"));
+ q.Add(q2, false, false);
+ }
+
+
+ Hits h = s.Search(q);
+
+ try
+ {
+
+ Assert.AreEqual(4, h.Length(), "4 docs should match " + q.ToString());
+
+ float score0 = h.Score(0);
+ float score1 = h.Score(1);
+ float score2 = h.Score(2);
+ float score3 = h.Score(3);
+
+ System.String doc0 = h.Doc(0).Get("id");
+ System.String doc1 = h.Doc(1).Get("id");
+ System.String doc2 = h.Doc(2).Get("id");
+ System.String doc3 = h.Doc(3).Get("id");
+
+ Assert.AreEqual("d4", doc0, "doc0 should be d4: ");
+ Assert.AreEqual("d3", doc1, "doc1 should be d3: ");
+ Assert.AreEqual("d2", doc2, "doc2 should be d2: ");
+ Assert.AreEqual("d1", doc3, "doc3 should be d1: ");
+
+ Assert.IsTrue(score0 > score1, "d4 does not have a better score then d3: " + score0 + " >? " + score1);
+ Assert.IsTrue(score1 > score2, "d3 does not have a better score then d2: " + score1 + " >? " + score2);
+ Assert.IsTrue(score2 > score3, "d3 does not have a better score then d1: " + score2 + " >? " + score3);
+ }
+ catch (System.ApplicationException e)
+ {
+ PrintHits("testBooleanOptionalWithTiebreakerAndBoost", h);
+ throw e;
+ }
+ }
+
+
+
+
+
+
+
+ /// <summary>macro </summary>
+ protected internal virtual Query Tq(System.String f, System.String t)
+ {
+ return new TermQuery(new Term(f, t));
+ }
+ /// <summary>macro </summary>
+ protected internal virtual Query Tq(System.String f, System.String t, float b)
+ {
+ Query q = Tq(f, t);
+ q.SetBoost(b);
+ return q;
+ }
+
+
+ protected internal virtual void PrintHits(System.String test, Hits h)
+ {
+
+ System.Console.Error.WriteLine("------- " + test + " -------");
+
+ for (int i = 0; i < h.Length(); i++)
+ {
+ Lucene.Net.Documents.Document d = h.Doc(i);
+ float score = h.Score(i);
+ System.Console.Error.WriteLine("#" + i + ": {0.000000000}" + score + " - " + d.Get("id"));
+ }
+ }
+ }
+}
\ No newline at end of file
Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestDocBoost.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestDocBoost.cs?rev=411501&r1=411500&r2=411501&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestDocBoost.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestDocBoost.cs Sat Jun 3 19:41:13 2006
@@ -1,5 +1,5 @@
/*
- * Copyright 2004 The Apache Software Foundation
+ * Copyright 2005 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -13,6 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+
using System;
using NUnit.Framework;
using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
@@ -21,6 +22,7 @@
using IndexWriter = Lucene.Net.Index.IndexWriter;
using Term = Lucene.Net.Index.Term;
using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+
namespace Lucene.Net.Search
{
@@ -29,12 +31,12 @@
/// </summary>
/// <author> Doug Cutting
/// </author>
- /// <version> $Revision: 1.4 $
+ /// <version> $Revision: 150492 $
/// </version>
[TestFixture]
public class TestDocBoost
{
- private class AnonymousClassHitCollector:HitCollector
+ private class AnonymousClassHitCollector : HitCollector
{
public AnonymousClassHitCollector(float[] scores, TestDocBoost enclosingInstance)
{
@@ -61,20 +63,20 @@
}
}
- [Test]
- public virtual void TestDocBoost_()
+ [Test]
+ public virtual void TestDocBoost_Renamed_Method()
{
RAMDirectory store = new RAMDirectory();
IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(), true);
- Field f1 = Field.Text("Field", "word");
- Field f2 = Field.Text("Field", "word");
+ Field f1 = new Field("field", "word", Field.Store.YES, Field.Index.TOKENIZED);
+ Field f2 = new Field("field", "word", Field.Store.YES, Field.Index.TOKENIZED);
f2.SetBoost(2.0f);
- Document d1 = new Document();
- Document d2 = new Document();
- Document d3 = new Document();
- Document d4 = new Document();
+ Lucene.Net.Documents.Document d1 = new Lucene.Net.Documents.Document();
+ Lucene.Net.Documents.Document d2 = new Lucene.Net.Documents.Document();
+ Lucene.Net.Documents.Document d3 = new Lucene.Net.Documents.Document();
+ Lucene.Net.Documents.Document d4 = new Lucene.Net.Documents.Document();
d3.SetBoost(3.0f);
d4.SetBoost(2.0f);
@@ -92,7 +94,7 @@
float[] scores = new float[4];
- new IndexSearcher(store).Search(new TermQuery(new Term("Field", "word")), new AnonymousClassHitCollector(scores, this));
+ new IndexSearcher(store).Search(new TermQuery(new Term("field", "word")), new AnonymousClassHitCollector(scores, this));
float lastScore = 0.0f;
Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestFilteredQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestFilteredQuery.cs?rev=411501&r1=411500&r2=411501&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestFilteredQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestFilteredQuery.cs Sat Jun 3 19:41:13 2006
@@ -13,6 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+
using System;
using NUnit.Framework;
using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
@@ -22,8 +23,10 @@
using IndexWriter = Lucene.Net.Index.IndexWriter;
using Term = Lucene.Net.Index.Term;
using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+
namespace Lucene.Net.Search
{
+
/// <summary> FilteredQuery JUnit tests.
///
/// <p>Created: Apr 21, 2004 1:21:46 PM
@@ -31,13 +34,14 @@
/// </summary>
/// <author> Tim Jones
/// </author>
- /// <version> $Id: TestFilteredQuery.java,v 1.5 2004/07/10 06:19:01 otis Exp $
+ /// <version> $Id: TestFilteredQuery.java 150585 2004-10-10 15:44:45Z dnaber $
/// </version>
/// <since> 1.4
/// </since>
[TestFixture]
public class TestFilteredQuery
{
+ //UPGRADE_NOTE: Field 'EnclosingInstance' was added to class 'AnonymousClassFilter' to access its enclosing instance. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1019'"
[Serializable]
private class AnonymousClassFilter : Filter
{
@@ -72,49 +76,49 @@
private Query query;
private Filter filter;
- [TestFixtureSetUp]
- public virtual void SetUp()
+ [TestFixtureSetUp]
+ public virtual void SetUp()
{
directory = new RAMDirectory();
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
- Document doc = new Document();
- doc.Add(Field.Text("Field", "one two three four five"));
- doc.Add(Field.Text("sorter", "b"));
+ Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+ doc.Add(new Field("field", "one two three four five", Field.Store.YES, Field.Index.TOKENIZED));
+ doc.Add(new Field("sorter", "b", Field.Store.YES, Field.Index.TOKENIZED));
writer.AddDocument(doc);
- doc = new Document();
- doc.Add(Field.Text("Field", "one two three four"));
- doc.Add(Field.Text("sorter", "d"));
+ doc = new Lucene.Net.Documents.Document();
+ doc.Add(new Field("field", "one two three four", Field.Store.YES, Field.Index.TOKENIZED));
+ doc.Add(new Field("sorter", "d", Field.Store.YES, Field.Index.TOKENIZED));
writer.AddDocument(doc);
- doc = new Document();
- doc.Add(Field.Text("Field", "one two three y"));
- doc.Add(Field.Text("sorter", "a"));
+ doc = new Lucene.Net.Documents.Document();
+ doc.Add(new Field("field", "one two three y", Field.Store.YES, Field.Index.TOKENIZED));
+ doc.Add(new Field("sorter", "a", Field.Store.YES, Field.Index.TOKENIZED));
writer.AddDocument(doc);
- doc = new Document();
- doc.Add(Field.Text("Field", "one two x"));
- doc.Add(Field.Text("sorter", "c"));
+ doc = new Lucene.Net.Documents.Document();
+ doc.Add(new Field("field", "one two x", Field.Store.YES, Field.Index.TOKENIZED));
+ doc.Add(new Field("sorter", "c", Field.Store.YES, Field.Index.TOKENIZED));
writer.AddDocument(doc);
writer.Optimize();
writer.Close();
searcher = new IndexSearcher(directory);
- query = new TermQuery(new Term("Field", "three"));
+ query = new TermQuery(new Term("field", "three"));
filter = new AnonymousClassFilter(this);
}
- [TestFixtureTearDown]
- public virtual void TearDown()
+ [TestFixtureTearDown]
+ public virtual void TearDown()
{
searcher.Close();
directory.Close();
}
- [Test]
- public virtual void TestFilteredQuery_()
+ [Test]
+ public virtual void TestFilteredQuery_Renamed_Method()
{
Query filteredquery = new FilteredQuery(query, filter);
Hits hits = searcher.Search(filteredquery);
@@ -125,23 +129,23 @@
Assert.AreEqual(1, hits.Length());
Assert.AreEqual(1, hits.Id(0));
- filteredquery = new FilteredQuery(new TermQuery(new Term("Field", "one")), filter);
+ filteredquery = new FilteredQuery(new TermQuery(new Term("field", "one")), filter);
hits = searcher.Search(filteredquery);
Assert.AreEqual(2, hits.Length());
- filteredquery = new FilteredQuery(new TermQuery(new Term("Field", "x")), filter);
+ filteredquery = new FilteredQuery(new TermQuery(new Term("field", "x")), filter);
hits = searcher.Search(filteredquery);
Assert.AreEqual(1, hits.Length());
Assert.AreEqual(3, hits.Id(0));
- filteredquery = new FilteredQuery(new TermQuery(new Term("Field", "y")), filter);
+ filteredquery = new FilteredQuery(new TermQuery(new Term("field", "y")), filter);
hits = searcher.Search(filteredquery);
Assert.AreEqual(0, hits.Length());
}
/// <summary> This tests FilteredQuery's rewrite correctness</summary>
[Test]
- public virtual void TestRangeQuery()
+ public virtual void TestRangeQuery()
{
RangeQuery rq = new RangeQuery(new Term("sorter", "b"), new Term("sorter", "d"), true);
Added: incubator/lucene.net/trunk/C#/src/Test/Search/TestFuzzyQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestFuzzyQuery.cs?rev=411501&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestFuzzyQuery.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestFuzzyQuery.cs Sat Jun 3 19:41:13 2006
@@ -0,0 +1,270 @@
+/*
+ * Copyright 2004 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using NUnit.Framework;
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using Term = Lucene.Net.Index.Term;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+
+namespace Lucene.Net.Search
+{
+
+ /// <summary> Tests {@link FuzzyQuery}.
+ ///
+ /// </summary>
+ /// <author> Daniel Naber
+ /// </author>
+ [TestFixture]
+ public class TestFuzzyQuery
+ {
+ [Test]
+ public virtual void TestFuzziness()
+ {
+ RAMDirectory directory = new RAMDirectory();
+ IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
+ AddDoc("aaaaa", writer);
+ AddDoc("aaaab", writer);
+ AddDoc("aaabb", writer);
+ AddDoc("aabbb", writer);
+ AddDoc("abbbb", writer);
+ AddDoc("bbbbb", writer);
+ AddDoc("ddddd", writer);
+ writer.Optimize();
+ writer.Close();
+ IndexSearcher searcher = new IndexSearcher(directory);
+
+ FuzzyQuery query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 0);
+ Hits hits = searcher.Search(query);
+ Assert.AreEqual(3, hits.Length());
+
+ // same with prefix
+ query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 1);
+ hits = searcher.Search(query);
+ Assert.AreEqual(3, hits.Length());
+ query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 2);
+ hits = searcher.Search(query);
+ Assert.AreEqual(3, hits.Length());
+ query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 3);
+ hits = searcher.Search(query);
+ Assert.AreEqual(3, hits.Length());
+ query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 4);
+ hits = searcher.Search(query);
+ Assert.AreEqual(2, hits.Length());
+ query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 5);
+ hits = searcher.Search(query);
+ Assert.AreEqual(1, hits.Length());
+ query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 6);
+ hits = searcher.Search(query);
+ Assert.AreEqual(1, hits.Length());
+
+ // not similar enough:
+ query = new FuzzyQuery(new Term("field", "xxxxx"), FuzzyQuery.defaultMinSimilarity, 0);
+ hits = searcher.Search(query);
+ Assert.AreEqual(0, hits.Length());
+ query = new FuzzyQuery(new Term("field", "aaccc"), FuzzyQuery.defaultMinSimilarity, 0); // edit distance to "aaaaa" = 3
+ hits = searcher.Search(query);
+ Assert.AreEqual(0, hits.Length());
+
+ // query identical to a word in the index:
+ query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 0);
+ hits = searcher.Search(query);
+ Assert.AreEqual(3, hits.Length());
+ Assert.AreEqual(hits.Doc(0).Get("field"), ("aaaaa"));
+ // default allows for up to two edits:
+ Assert.AreEqual(hits.Doc(1).Get("field"), ("aaaab"));
+ Assert.AreEqual(hits.Doc(2).Get("field"), ("aaabb"));
+
+ // query similar to a word in the index:
+ query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.defaultMinSimilarity, 0);
+ hits = searcher.Search(query);
+ Assert.AreEqual(3, hits.Length());
+ Assert.AreEqual(hits.Doc(0).Get("field"), ("aaaaa"));
+ Assert.AreEqual(hits.Doc(1).Get("field"), ("aaaab"));
+ Assert.AreEqual(hits.Doc(2).Get("field"), ("aaabb"));
+
+ // now with prefix
+ query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.defaultMinSimilarity, 1);
+ hits = searcher.Search(query);
+ Assert.AreEqual(3, hits.Length());
+ Assert.AreEqual(hits.Doc(0).Get("field"), ("aaaaa"));
+ Assert.AreEqual(hits.Doc(1).Get("field"), ("aaaab"));
+ Assert.AreEqual(hits.Doc(2).Get("field"), ("aaabb"));
+ query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.defaultMinSimilarity, 2);
+ hits = searcher.Search(query);
+ Assert.AreEqual(3, hits.Length());
+ Assert.AreEqual(hits.Doc(0).Get("field"), ("aaaaa"));
+ Assert.AreEqual(hits.Doc(1).Get("field"), ("aaaab"));
+ Assert.AreEqual(hits.Doc(2).Get("field"), ("aaabb"));
+ query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.defaultMinSimilarity, 3);
+ hits = searcher.Search(query);
+ Assert.AreEqual(3, hits.Length());
+ Assert.AreEqual(hits.Doc(0).Get("field"), ("aaaaa"));
+ Assert.AreEqual(hits.Doc(1).Get("field"), ("aaaab"));
+ Assert.AreEqual(hits.Doc(2).Get("field"), ("aaabb"));
+ query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.defaultMinSimilarity, 4);
+ hits = searcher.Search(query);
+ Assert.AreEqual(2, hits.Length());
+ Assert.AreEqual(hits.Doc(0).Get("field"), ("aaaaa"));
+ Assert.AreEqual(hits.Doc(1).Get("field"), ("aaaab"));
+ query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.defaultMinSimilarity, 5);
+ hits = searcher.Search(query);
+ Assert.AreEqual(0, hits.Length());
+
+
+ query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.defaultMinSimilarity, 0);
+ hits = searcher.Search(query);
+ Assert.AreEqual(1, hits.Length());
+ Assert.AreEqual(hits.Doc(0).Get("field"), ("ddddd"));
+
+ // now with prefix
+ query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.defaultMinSimilarity, 1);
+ hits = searcher.Search(query);
+ Assert.AreEqual(1, hits.Length());
+ Assert.AreEqual(hits.Doc(0).Get("field"), ("ddddd"));
+ query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.defaultMinSimilarity, 2);
+ hits = searcher.Search(query);
+ Assert.AreEqual(1, hits.Length());
+ Assert.AreEqual(hits.Doc(0).Get("field"), ("ddddd"));
+ query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.defaultMinSimilarity, 3);
+ hits = searcher.Search(query);
+ Assert.AreEqual(1, hits.Length());
+ Assert.AreEqual(hits.Doc(0).Get("field"), ("ddddd"));
+ query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.defaultMinSimilarity, 4);
+ hits = searcher.Search(query);
+ Assert.AreEqual(1, hits.Length());
+ Assert.AreEqual(hits.Doc(0).Get("field"), ("ddddd"));
+ query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.defaultMinSimilarity, 5);
+ hits = searcher.Search(query);
+ Assert.AreEqual(0, hits.Length());
+
+
+ // different field = no match:
+ query = new FuzzyQuery(new Term("anotherfield", "ddddX"), FuzzyQuery.defaultMinSimilarity, 0);
+ hits = searcher.Search(query);
+ Assert.AreEqual(0, hits.Length());
+
+ searcher.Close();
+ directory.Close();
+ }
+
+ [Test]
+ public virtual void TestFuzzinessLong()
+ {
+ RAMDirectory directory = new RAMDirectory();
+ IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
+ AddDoc("aaaaaaa", writer);
+ AddDoc("segment", writer);
+ writer.Optimize();
+ writer.Close();
+ IndexSearcher searcher = new IndexSearcher(directory);
+
+ FuzzyQuery query;
+ // not similar enough:
+ query = new FuzzyQuery(new Term("field", "xxxxx"), FuzzyQuery.defaultMinSimilarity, 0);
+ Hits hits = searcher.Search(query);
+ Assert.AreEqual(0, hits.Length());
+ // edit distance to "aaaaaaa" = 3, this matches because the string is longer than
+ // in testDefaultFuzziness so a bigger difference is allowed:
+ query = new FuzzyQuery(new Term("field", "aaaaccc"), FuzzyQuery.defaultMinSimilarity, 0);
+ hits = searcher.Search(query);
+ Assert.AreEqual(1, hits.Length());
+ Assert.AreEqual(hits.Doc(0).Get("field"), ("aaaaaaa"));
+
+ // now with prefix
+ query = new FuzzyQuery(new Term("field", "aaaaccc"), FuzzyQuery.defaultMinSimilarity, 1);
+ hits = searcher.Search(query);
+ Assert.AreEqual(1, hits.Length());
+ Assert.AreEqual(hits.Doc(0).Get("field"), ("aaaaaaa"));
+ query = new FuzzyQuery(new Term("field", "aaaaccc"), FuzzyQuery.defaultMinSimilarity, 4);
+ hits = searcher.Search(query);
+ Assert.AreEqual(1, hits.Length());
+ Assert.AreEqual(hits.Doc(0).Get("field"), ("aaaaaaa"));
+ query = new FuzzyQuery(new Term("field", "aaaaccc"), FuzzyQuery.defaultMinSimilarity, 5);
+ hits = searcher.Search(query);
+ Assert.AreEqual(0, hits.Length());
+
+ // no match, more than half of the characters is wrong:
+ query = new FuzzyQuery(new Term("field", "aaacccc"), FuzzyQuery.defaultMinSimilarity, 0);
+ hits = searcher.Search(query);
+ Assert.AreEqual(0, hits.Length());
+
+ // now with prefix
+ query = new FuzzyQuery(new Term("field", "aaacccc"), FuzzyQuery.defaultMinSimilarity, 2);
+ hits = searcher.Search(query);
+ Assert.AreEqual(0, hits.Length());
+
+ // "student" and "stellent" are indeed similar to "segment" by default:
+ query = new FuzzyQuery(new Term("field", "student"), FuzzyQuery.defaultMinSimilarity, 0);
+ hits = searcher.Search(query);
+ Assert.AreEqual(1, hits.Length());
+ query = new FuzzyQuery(new Term("field", "stellent"), FuzzyQuery.defaultMinSimilarity, 0);
+ hits = searcher.Search(query);
+ Assert.AreEqual(1, hits.Length());
+
+ // now with prefix
+ query = new FuzzyQuery(new Term("field", "student"), FuzzyQuery.defaultMinSimilarity, 1);
+ hits = searcher.Search(query);
+ Assert.AreEqual(1, hits.Length());
+ query = new FuzzyQuery(new Term("field", "stellent"), FuzzyQuery.defaultMinSimilarity, 1);
+ hits = searcher.Search(query);
+ Assert.AreEqual(1, hits.Length());
+ query = new FuzzyQuery(new Term("field", "student"), FuzzyQuery.defaultMinSimilarity, 2);
+ hits = searcher.Search(query);
+ Assert.AreEqual(0, hits.Length());
+ query = new FuzzyQuery(new Term("field", "stellent"), FuzzyQuery.defaultMinSimilarity, 2);
+ hits = searcher.Search(query);
+ Assert.AreEqual(0, hits.Length());
+
+ // "student" doesn't match anymore thanks to increased minimum similarity:
+ query = new FuzzyQuery(new Term("field", "student"), 0.6f, 0);
+ hits = searcher.Search(query);
+ Assert.AreEqual(0, hits.Length());
+
+ try
+ {
+ query = new FuzzyQuery(new Term("field", "student"), 1.1f);
+ Assert.Fail("Expected IllegalArgumentException");
+ }
+ catch (System.ArgumentException e)
+ {
+ // expecting exception
+ }
+ try
+ {
+ query = new FuzzyQuery(new Term("field", "student"), - 0.1f);
+ Assert.Fail("Expected IllegalArgumentException");
+ }
+ catch (System.ArgumentException e)
+ {
+ // expecting exception
+ }
+
+ searcher.Close();
+ directory.Close();
+ }
+
+ private void AddDoc(System.String text, IndexWriter writer)
+ {
+ Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+ doc.Add(new Field("field", text, Field.Store.YES, Field.Index.TOKENIZED));
+ writer.AddDocument(doc);
+ }
+ }
+}
\ No newline at end of file
Added: incubator/lucene.net/trunk/C#/src/Test/Search/TestMatchAllDocsQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestMatchAllDocsQuery.cs?rev=411501&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestMatchAllDocsQuery.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestMatchAllDocsQuery.cs Sat Jun 3 19:41:13 2006
@@ -0,0 +1,90 @@
+/*
+ * Copyright 2005 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using Term = Lucene.Net.Index.Term;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using NUnit.Framework;
+
+namespace Lucene.Net.Search
+{
+
+ /// <summary> Tests MatchAllDocsQuery.
+ ///
+ /// </summary>
+ /// <author> Daniel Naber
+ /// </author>
+ [TestFixture]
+ public class TestMatchAllDocsQuery
+ {
+ [Test]
+ public virtual void TestQuery()
+ {
+ RAMDirectory dir = new RAMDirectory();
+ IndexWriter iw = new IndexWriter(dir, new StandardAnalyzer(), true);
+ AddDoc("one", iw);
+ AddDoc("two", iw);
+ AddDoc("three four", iw);
+ iw.Close();
+
+ IndexSearcher is_Renamed = new IndexSearcher(dir);
+ Hits hits = is_Renamed.Search(new MatchAllDocsQuery());
+ Assert.AreEqual(3, hits.Length());
+
+ // some artificial queries to trigger the use of skipTo():
+
+ BooleanQuery bq = new BooleanQuery();
+ bq.Add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST);
+ bq.Add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST);
+ hits = is_Renamed.Search(bq);
+ Assert.AreEqual(3, hits.Length());
+
+ bq = new BooleanQuery();
+ bq.Add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST);
+ bq.Add(new TermQuery(new Term("key", "three")), BooleanClause.Occur.MUST);
+ hits = is_Renamed.Search(bq);
+ Assert.AreEqual(1, hits.Length());
+
+ // delete a document:
+ is_Renamed.GetIndexReader().Delete(0);
+ hits = is_Renamed.Search(new MatchAllDocsQuery());
+ Assert.AreEqual(2, hits.Length());
+
+ is_Renamed.Close();
+ }
+
+ [Test]
+ public virtual void TestEquals()
+ {
+ Query q1 = new MatchAllDocsQuery();
+ Query q2 = new MatchAllDocsQuery();
+ Assert.IsTrue(q1.Equals(q2));
+ q1.SetBoost(1.5f);
+ Assert.IsFalse(q1.Equals(q2));
+ }
+
+ private void AddDoc(System.String text, IndexWriter iw)
+ {
+ Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+ doc.Add(new Field("key", text, Field.Store.YES, Field.Index.TOKENIZED));
+ iw.AddDocument(doc);
+ }
+ }
+}
\ No newline at end of file
Added: incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiPhraseQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestMultiPhraseQuery.cs?rev=411501&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiPhraseQuery.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiPhraseQuery.cs Sat Jun 3 19:41:13 2006
@@ -0,0 +1,207 @@
+/*
+ * Copyright 2004 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
+using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using IndexReader = Lucene.Net.Index.IndexReader;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using Term = Lucene.Net.Index.Term;
+using TermEnum = Lucene.Net.Index.TermEnum;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using NUnit.Framework;
+
+namespace Lucene.Net.Search
+{
+
+ /// <summary> This class tests the MultiPhraseQuery class.
+ ///
+ /// </summary>
+ /// <author> Otis Gospodnetic, Daniel Naber
+ /// </author>
+ /// <version> $Id: TestMultiPhraseQuery.java 219387 2005-07-17 10:47:14Z dnaber $
+ /// </version>
+ [TestFixture]
+ public class TestMultiPhraseQuery
+ {
+
+ [Test]
+ public virtual void TestPhrasePrefix()
+ {
+ RAMDirectory indexStore = new RAMDirectory();
+ IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true);
+ Add("blueberry pie", writer);
+ Add("blueberry strudel", writer);
+ Add("blueberry pizza", writer);
+ Add("blueberry chewing gum", writer);
+ Add("bluebird pizza", writer);
+ Add("bluebird foobar pizza", writer);
+ Add("piccadilly circus", writer);
+ writer.Optimize();
+ writer.Close();
+
+ IndexSearcher searcher = new IndexSearcher(indexStore);
+
+ // search for "blueberry pi*":
+ MultiPhraseQuery query1 = new MultiPhraseQuery();
+ // search for "strawberry pi*":
+ MultiPhraseQuery query2 = new MultiPhraseQuery();
+ query1.Add(new Term("body", "blueberry"));
+ query2.Add(new Term("body", "strawberry"));
+
+ System.Collections.ArrayList termsWithPrefix = new System.Collections.ArrayList();
+ IndexReader ir = IndexReader.Open(indexStore);
+
+ // this TermEnum gives "piccadilly", "pie" and "pizza".
+ System.String prefix = "pi";
+ TermEnum te = ir.Terms(new Term("body", prefix));
+ do
+ {
+ if (te.Term().Text().StartsWith(prefix))
+ {
+ termsWithPrefix.Add(te.Term());
+ }
+ }
+ while (te.Next());
+
+ query1.Add((Term[]) termsWithPrefix.ToArray(typeof(Term)));
+ Assert.AreEqual("body:\"blueberry (piccadilly pie pizza)\"", query1.ToString());
+ query2.Add((Term[]) termsWithPrefix.ToArray(typeof(Term)));
+ Assert.AreEqual("body:\"strawberry (piccadilly pie pizza)\"", query2.ToString());
+
+ Hits result;
+ result = searcher.Search(query1);
+ Assert.AreEqual(2, result.Length());
+ result = searcher.Search(query2);
+ Assert.AreEqual(0, result.Length());
+
+ // search for "blue* pizza":
+ MultiPhraseQuery query3 = new MultiPhraseQuery();
+ termsWithPrefix.Clear();
+ prefix = "blue";
+ te = ir.Terms(new Term("body", prefix));
+ do
+ {
+ if (te.Term().Text().StartsWith(prefix))
+ {
+ termsWithPrefix.Add(te.Term());
+ }
+ }
+ while (te.Next());
+ query3.Add((Term[]) termsWithPrefix.ToArray(typeof(Term)));
+ query3.Add(new Term("body", "pizza"));
+
+ result = searcher.Search(query3);
+ Assert.AreEqual(2, result.Length()); // blueberry pizza, bluebird pizza
+ Assert.AreEqual("body:\"(blueberry bluebird) pizza\"", query3.ToString());
+
+ // test slop:
+ query3.SetSlop(1);
+ result = searcher.Search(query3);
+ Assert.AreEqual(3, result.Length()); // blueberry pizza, bluebird pizza, bluebird foobar pizza
+
+ MultiPhraseQuery query4 = new MultiPhraseQuery();
+ try
+ {
+ query4.Add(new Term("field1", "foo"));
+ query4.Add(new Term("field2", "foobar"));
+ Assert.Fail();
+ }
+ catch (System.ArgumentException e)
+ {
+ // okay, all terms must belong to the same field
+ }
+
+ searcher.Close();
+ indexStore.Close();
+ }
+
+ private void Add(System.String s, IndexWriter writer)
+ {
+ Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+ doc.Add(new Field("body", s, Field.Store.YES, Field.Index.TOKENIZED));
+ writer.AddDocument(doc);
+ }
+
+ [Test]
+ public virtual void TestBooleanQueryContainingSingleTermPrefixQuery()
+ {
+ // this tests against bug 33161 (now fixed)
+ // In order to cause the bug, the outer query must have more than one term
+ // and all terms required.
+ // The contained PhraseMultiQuery must contain exactly one term array.
+
+ RAMDirectory indexStore = new RAMDirectory();
+ IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true);
+ Add("blueberry pie", writer);
+ Add("blueberry chewing gum", writer);
+ Add("blue raspberry pie", writer);
+ writer.Optimize();
+ writer.Close();
+
+ IndexSearcher searcher = new IndexSearcher(indexStore);
+ // This query will be equivalent to +body:pie +body:"blue*"
+ BooleanQuery q = new BooleanQuery();
+ q.Add(new TermQuery(new Term("body", "pie")), BooleanClause.Occur.MUST);
+
+ MultiPhraseQuery trouble = new MultiPhraseQuery();
+ trouble.Add(new Term[]{new Term("body", "blueberry"), new Term("body", "blue")});
+ q.Add(trouble, BooleanClause.Occur.MUST);
+
+ // exception will be thrown here without fix
+ Hits hits = searcher.Search(q);
+
+ Assert.AreEqual(2, hits.Length(), "Wrong number of hits");
+ searcher.Close();
+ }
+
+ [Test]
+ public virtual void TestPhrasePrefixWithBooleanQuery()
+ {
+ RAMDirectory indexStore = new RAMDirectory();
+ IndexWriter writer = new IndexWriter(indexStore, new StandardAnalyzer(new System.String[]{}), true);
+ Add("This is a test", "object", writer);
+ Add("a note", "note", writer);
+ writer.Close();
+
+ IndexSearcher searcher = new IndexSearcher(indexStore);
+
+ // This query will be equivalent to +type:note +body:"a t*"
+ BooleanQuery q = new BooleanQuery();
+ q.Add(new TermQuery(new Term("type", "note")), BooleanClause.Occur.MUST);
+
+ MultiPhraseQuery trouble = new MultiPhraseQuery();
+ trouble.Add(new Term("body", "a"));
+ trouble.Add(new Term[]{new Term("body", "test"), new Term("body", "this")});
+ q.Add(trouble, BooleanClause.Occur.MUST);
+
+ // exception will be thrown here without fix for #35626:
+ Hits hits = searcher.Search(q);
+ Assert.AreEqual(0, hits.Length(), "Wrong number of hits");
+ searcher.Close();
+ }
+
+ private void Add(System.String s, System.String type, IndexWriter writer)
+ {
+ Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+ doc.Add(new Field("body", s, Field.Store.YES, Field.Index.TOKENIZED));
+ doc.Add(new Field("type", type, Field.Store.YES, Field.Index.UN_TOKENIZED));
+ writer.AddDocument(doc);
+ }
+ }
+}
\ No newline at end of file
Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiSearcher.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestMultiSearcher.cs?rev=411501&r1=411500&r2=411501&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiSearcher.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiSearcher.cs Sat Jun 3 19:41:13 2006
@@ -13,7 +13,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+
using System;
+using KeywordAnalyzer = Lucene.Net.Analysis.KeywordAnalyzer;
using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
using Document = Lucene.Net.Documents.Document;
using Field = Lucene.Net.Documents.Field;
@@ -24,17 +26,19 @@
using Directory = Lucene.Net.Store.Directory;
using RAMDirectory = Lucene.Net.Store.RAMDirectory;
using NUnit.Framework;
+
namespace Lucene.Net.Search
{
/// <summary> Tests {@link MultiSearcher} class.
///
/// </summary>
- /// <version> $Id: TestMultiSearcher.java,v 1.6 2004/03/02 13:09:57 otis Exp $
+ /// <version> $Id: TestMultiSearcher.java 354819 2005-12-07 17:48:37Z yonik $
/// </version>
[TestFixture]
public class TestMultiSearcher
{
+
/// <summary> ReturnS a new instance of the concrete MultiSearcher class
/// used in this test.
/// </summary>
@@ -43,30 +47,30 @@
return new MultiSearcher(searchers);
}
- [Test]
- public virtual void TestEmptyIndex()
+ [Test]
+ public virtual void TestEmptyIndex()
{
// creating two directories for indices
Directory indexStoreA = new RAMDirectory();
Directory indexStoreB = new RAMDirectory();
// creating a document to store
- Document lDoc = new Document();
- lDoc.Add(Field.Text("fulltext", "Once upon a time....."));
- lDoc.Add(Field.Keyword("id", "doc1"));
- lDoc.Add(Field.Keyword("handle", "1"));
+ Lucene.Net.Documents.Document lDoc = new Lucene.Net.Documents.Document();
+ lDoc.Add(new Field("fulltext", "Once upon a time.....", Field.Store.YES, Field.Index.TOKENIZED));
+ lDoc.Add(new Field("id", "doc1", Field.Store.YES, Field.Index.UN_TOKENIZED));
+ lDoc.Add(new Field("handle", "1", Field.Store.YES, Field.Index.UN_TOKENIZED));
// creating a document to store
- Document lDoc2 = new Document();
- lDoc2.Add(Field.Text("fulltext", "in a galaxy far far away....."));
- lDoc2.Add(Field.Keyword("id", "doc2"));
- lDoc2.Add(Field.Keyword("handle", "1"));
+ Lucene.Net.Documents.Document lDoc2 = new Lucene.Net.Documents.Document();
+ lDoc2.Add(new Field("fulltext", "in a galaxy far far away.....", Field.Store.YES, Field.Index.TOKENIZED));
+ lDoc2.Add(new Field("id", "doc2", Field.Store.YES, Field.Index.UN_TOKENIZED));
+ lDoc2.Add(new Field("handle", "1", Field.Store.YES, Field.Index.UN_TOKENIZED));
// creating a document to store
- Document lDoc3 = new Document();
- lDoc3.Add(Field.Text("fulltext", "a bizarre bug manifested itself...."));
- lDoc3.Add(Field.Keyword("id", "doc3"));
- lDoc3.Add(Field.Keyword("handle", "1"));
+ Lucene.Net.Documents.Document lDoc3 = new Lucene.Net.Documents.Document();
+ lDoc3.Add(new Field("fulltext", "a bizarre bug manifested itself....", Field.Store.YES, Field.Index.TOKENIZED));
+ lDoc3.Add(new Field("id", "doc3", Field.Store.YES, Field.Index.UN_TOKENIZED));
+ lDoc3.Add(new Field("handle", "1", Field.Store.YES, Field.Index.UN_TOKENIZED));
// creating an index writer for the first index
IndexWriter writerA = new IndexWriter(indexStoreA, new StandardAnalyzer(), true);
@@ -102,23 +106,12 @@
Assert.AreEqual(3, hits.Length());
- try
- {
- // iterating over the hit documents
- for (int i = 0; i < hits.Length(); i++)
- {
- Document d = hits.Doc(i);
- }
- }
- catch (System.IndexOutOfRangeException e)
- {
- Assert.Fail("ArrayIndexOutOfBoundsException thrown: " + e.Message);
- System.Console.Error.WriteLine(e.Source);
- }
- finally
+ // iterating over the hit documents
+ for (int i = 0; i < hits.Length(); i++)
{
- mSearcher.Close();
+ Lucene.Net.Documents.Document d = hits.Doc(i);
}
+ mSearcher.Close();
//--------------------------------------------------------------------
@@ -143,24 +136,13 @@
Assert.AreEqual(4, hits2.Length());
- try
- {
- // iterating over the hit documents
- for (int i = 0; i < hits2.Length(); i++)
- {
- // no exception should happen at this point
- Document d = hits2.Doc(i);
- }
- }
- catch (System.Exception e)
- {
- Assert.Fail("Exception thrown: " + e.Message);
- System.Console.Error.WriteLine(e.Source);
- }
- finally
+ // iterating over the hit documents
+ for (int i = 0; i < hits2.Length(); i++)
{
- mSearcher2.Close();
+ // no exception should happen at this point
+ Lucene.Net.Documents.Document d = hits2.Doc(i);
}
+ mSearcher2.Close();
//--------------------------------------------------------------------
// scenario 3
@@ -189,23 +171,132 @@
Assert.AreEqual(3, hits3.Length());
+ // iterating over the hit documents
+ for (int i = 0; i < hits3.Length(); i++)
+ {
+ Lucene.Net.Documents.Document d = hits3.Doc(i);
+ }
+ mSearcher3.Close();
+ }
+
+ private static Lucene.Net.Documents.Document CreateDocument(System.String contents1, System.String contents2)
+ {
+ Lucene.Net.Documents.Document document = new Lucene.Net.Documents.Document();
+
+ document.Add(new Field("contents", contents1, Field.Store.YES, Field.Index.UN_TOKENIZED));
+
+ if (contents2 != null)
+ {
+ document.Add(new Field("contents", contents2, Field.Store.YES, Field.Index.UN_TOKENIZED));
+ }
+
+ return document;
+ }
+
+ private static void InitIndex(Directory directory, int nDocs, bool create, System.String contents2)
+ {
+ IndexWriter indexWriter = null;
+
try
{
- // iterating over the hit documents
- for (int i = 0; i < hits3.Length(); i++)
+ indexWriter = new IndexWriter(directory, new KeywordAnalyzer(), create);
+
+ for (int i = 0; i < nDocs; i++)
{
- Document d = hits3.Doc(i);
+ indexWriter.AddDocument(CreateDocument("doc" + i, contents2));
}
}
- catch (System.IO.IOException e)
- {
- Assert.Fail("IOException thrown: " + e.Message);
- System.Console.Error.WriteLine(e.Source);
- }
finally
{
- mSearcher3.Close();
+ if (indexWriter != null)
+ {
+ indexWriter.Close();
+ }
}
+ }
+
+ /* uncomment this when the highest score is always normalized to 1.0, even when it was < 1.0
+ public void testNormalization1() throws IOException {
+ testNormalization(1, "Using 1 document per index:");
+ }
+ */
+
+ [Test]
+ public virtual void TestNormalization10()
+ {
+ _TestNormalization(10, "Using 10 documents per index:");
+ }
+
+ private void _TestNormalization(int nDocs, System.String message)
+ {
+ Query query = new TermQuery(new Term("contents", "doc0"));
+
+ RAMDirectory ramDirectory1;
+ IndexSearcher indexSearcher1;
+ Hits hits;
+
+ ramDirectory1 = new RAMDirectory();
+
+ // First put the documents in the same index
+ InitIndex(ramDirectory1, nDocs, true, null); // documents with a single token "doc0", "doc1", etc...
+ InitIndex(ramDirectory1, nDocs, false, "x"); // documents with two tokens "doc0" and "x", "doc1" and x, etc...
+
+ indexSearcher1 = new IndexSearcher(ramDirectory1);
+
+ hits = indexSearcher1.Search(query);
+
+ Assert.AreEqual(2, hits.Length(), message);
+
+ Assert.AreEqual(1, hits.Score(0), 1e-6, message); // hits.score(0) is 0.594535 if only a single document is in first index
+
+ // Store the scores for use later
+ float[] scores = new float[]{hits.Score(0), hits.Score(1)};
+
+ Assert.IsTrue(scores[0] > scores[1], message);
+
+ indexSearcher1.Close();
+ ramDirectory1.Close();
+ hits = null;
+
+
+
+ RAMDirectory ramDirectory2;
+ IndexSearcher indexSearcher2;
+
+ ramDirectory1 = new RAMDirectory();
+ ramDirectory2 = new RAMDirectory();
+
+ // Now put the documents in a different index
+ InitIndex(ramDirectory1, nDocs, true, null); // documents with a single token "doc0", "doc1", etc...
+ InitIndex(ramDirectory2, nDocs, true, "x"); // documents with two tokens "doc0" and "x", "doc1" and x, etc...
+
+ indexSearcher1 = new IndexSearcher(ramDirectory1);
+ indexSearcher2 = new IndexSearcher(ramDirectory2);
+
+ Searcher searcher = GetMultiSearcherInstance(new Searcher[]{indexSearcher1, indexSearcher2});
+
+ hits = searcher.Search(query);
+
+ Assert.AreEqual(2, hits.Length(), message);
+
+ // The scores should be the same (within reason)
+ Assert.AreEqual(scores[0], hits.Score(0), 1e-6, message); // This will a document from ramDirectory1
+ Assert.AreEqual(scores[1], hits.Score(1), 1e-6, message); // This will a document from ramDirectory2
+
+
+
+ // Adding a Sort.RELEVANCE object should not change anything
+ hits = searcher.Search(query, Sort.RELEVANCE);
+
+ Assert.AreEqual(2, hits.Length(), message);
+
+ Assert.AreEqual(scores[0], hits.Score(0), 1e-6, message); // This will a document from ramDirectory1
+ Assert.AreEqual(scores[1], hits.Score(1), 1e-6, message); // This will a document from ramDirectory2
+
+ searcher.Close();
+
+ ramDirectory1.Close();
+ ramDirectory2.Close();
}
}
}
Added: incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiSearcherRanking.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestMultiSearcherRanking.cs?rev=411501&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiSearcherRanking.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiSearcherRanking.cs Sat Jun 3 19:41:13 2006
@@ -0,0 +1,183 @@
+/*
+ * Copyright 2004 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using NUnit.Framework;
+using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using ParseException = Lucene.Net.QueryParsers.ParseException;
+using QueryParser = Lucene.Net.QueryParsers.QueryParser;
+using Directory = Lucene.Net.Store.Directory;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+
+namespace Lucene.Net.Search
+{
+
+ /// <summary> Tests {@link MultiSearcher} ranking, i.e. makes sure this bug is fixed:
+ /// http://issues.apache.org/bugzilla/show_bug.cgi?id=31841
+ ///
+ /// </summary>
+ /// <version> $Id: TestMultiSearcher.java 150492 2004-09-06 22:01:49Z dnaber $
+ /// </version>
+ [TestFixture]
+ public class TestMultiSearcherRanking
+ {
+
+ private bool verbose = false; // set to true to output hits
+ private System.String FIELD_NAME = "body";
+ private Searcher multiSearcher;
+ private Searcher singleSearcher;
+
+ [Test]
+ public virtual void TestOneTermQuery()
+ {
+ CheckQuery("three");
+ }
+
+ [Test]
+ public virtual void TestTwoTermQuery()
+ {
+ CheckQuery("three foo");
+ }
+
+ [Test]
+ public virtual void TestPrefixQuery()
+ {
+ CheckQuery("multi*");
+ }
+
+ [Test]
+ public virtual void TestFuzzyQuery()
+ {
+ CheckQuery("multiThree~");
+ }
+
+ [Test]
+ public virtual void TestRangeQuery()
+ {
+ CheckQuery("{multiA TO multiP}");
+ }
+
+ [Test]
+ public virtual void TestMultiPhraseQuery()
+ {
+ CheckQuery("\"blueberry pi*\"");
+ }
+
+ [Test]
+ public virtual void TestNoMatchQuery()
+ {
+ CheckQuery("+three +nomatch");
+ }
+
+ /*
+ public void testTermRepeatedQuery() throws IOException, ParseException {
+ // TODO: this corner case yields different results.
+ checkQuery("multi* multi* foo");
+ }
+ */
+
+ /// <summary> checks if a query yields the same result when executed on
+ /// a single IndexSearcher containing all documents and on a
+ /// MultiSearcher aggregating sub-searchers
+ /// </summary>
+ /// <param name="queryStr"> the query to check.
+ /// </param>
+ /// <throws> IOException </throws>
+ /// <throws> ParseException </throws>
+ private void CheckQuery(System.String queryStr)
+ {
+ // check result hit ranking
+ if (verbose)
+ System.Console.Out.WriteLine("Query: " + queryStr);
+ Query query = Lucene.Net.QueryParsers.QueryParser.Parse(queryStr, FIELD_NAME, new StandardAnalyzer());
+ Hits multiSearcherHits = multiSearcher.Search(query);
+ Hits singleSearcherHits = singleSearcher.Search(query);
+ Assert.AreEqual(multiSearcherHits.Length(), singleSearcherHits.Length());
+ for (int i = 0; i < multiSearcherHits.Length(); i++)
+ {
+ Lucene.Net.Documents.Document docMulti = multiSearcherHits.Doc(i);
+ Lucene.Net.Documents.Document docSingle = singleSearcherHits.Doc(i);
+ if (verbose)
+ System.Console.Out.WriteLine("Multi: " + docMulti.Get(FIELD_NAME) + " score=" + multiSearcherHits.Score(i));
+ if (verbose)
+ System.Console.Out.WriteLine("Single: " + docSingle.Get(FIELD_NAME) + " score=" + singleSearcherHits.Score(i));
+ Assert.AreEqual(multiSearcherHits.Score(i), singleSearcherHits.Score(i), 0.001f);
+ Assert.AreEqual(docMulti.Get(FIELD_NAME), docSingle.Get(FIELD_NAME));
+ }
+ if (verbose)
+ System.Console.Out.WriteLine();
+ }
+
+ /// <summary> initializes multiSearcher and singleSearcher with the same document set</summary>
+ [TestFixtureSetUp]
+ public virtual void SetUp()
+ {
+ // create MultiSearcher from two seperate searchers
+ Directory d1 = new RAMDirectory();
+ IndexWriter iw1 = new IndexWriter(d1, new StandardAnalyzer(), true);
+ AddCollection1(iw1);
+ iw1.Close();
+ Directory d2 = new RAMDirectory();
+ IndexWriter iw2 = new IndexWriter(d2, new StandardAnalyzer(), true);
+ AddCollection2(iw2);
+ iw2.Close();
+
+ Lucene.Net.Search.Searchable[] s = new Lucene.Net.Search.Searchable[2];
+ s[0] = new IndexSearcher(d1);
+ s[1] = new IndexSearcher(d2);
+ multiSearcher = new MultiSearcher(s);
+
+ // create IndexSearcher which contains all documents
+ Directory d = new RAMDirectory();
+ IndexWriter iw = new IndexWriter(d, new StandardAnalyzer(), true);
+ AddCollection1(iw);
+ AddCollection2(iw);
+ iw.Close();
+ singleSearcher = new IndexSearcher(d);
+ }
+
+ private void AddCollection1(IndexWriter iw)
+ {
+ Add("one blah three", iw);
+ Add("one foo three multiOne", iw);
+ Add("one foobar three multiThree", iw);
+ Add("blueberry pie", iw);
+ Add("blueberry strudel", iw);
+ Add("blueberry pizza", iw);
+ }
+
+ private void AddCollection2(IndexWriter iw)
+ {
+ Add("two blah three", iw);
+ Add("two foo xxx multiTwo", iw);
+ Add("two foobar xxx multiThreee", iw);
+ Add("blueberry chewing gum", iw);
+ Add("bluebird pizza", iw);
+ Add("bluebird foobar pizza", iw);
+ Add("piccadilly circus", iw);
+ }
+
+ private void Add(System.String value_Renamed, IndexWriter iw)
+ {
+ Lucene.Net.Documents.Document d = new Lucene.Net.Documents.Document();
+ d.Add(new Field(FIELD_NAME, value_Renamed, Field.Store.YES, Field.Index.TOKENIZED));
+ iw.AddDocument(d);
+ }
+ }
+}
\ No newline at end of file
Added: incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiThreadTermVectors.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestMultiThreadTermVectors.cs?rev=411501&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiThreadTermVectors.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiThreadTermVectors.cs Sat Jun 3 19:41:13 2006
@@ -0,0 +1,221 @@
+/*
+ * Copyright 2004 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using NUnit.Framework;
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using IndexReader = Lucene.Net.Index.IndexReader;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using TermFreqVector = Lucene.Net.Index.TermFreqVector;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using English = Lucene.Net.Util.English;
+
+namespace Lucene.Net.Search
+{
+
+ /// <author> Bernhard Messer
+ /// </author>
+ /// <version> $rcs = ' $Id: TestMultiThreadTermVectors.java 150569 2004-10-06 10:40:23Z goller $ ' ;
+ /// </version>
+ [TestFixture]
+ public class TestMultiThreadTermVectors
+ {
+ private RAMDirectory directory = new RAMDirectory();
+ public int numDocs = 100;
+ public int numThreads = 3;
+
+
+ [TestFixtureSetUp]
+ public virtual void SetUp()
+ {
+ IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(), true);
+ //writer.setUseCompoundFile(false);
+ //writer.infoStream = System.out;
+ for (int i = 0; i < numDocs; i++)
+ {
+ Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+ Field fld = new Field("field", English.IntToEnglish(i), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.YES);
+ doc.Add(fld);
+ writer.AddDocument(doc);
+ }
+ writer.Close();
+ }
+
+ [Test]
+ public virtual void Test()
+ {
+
+ IndexReader reader = null;
+
+ try
+ {
+ reader = IndexReader.Open(directory);
+ for (int i = 1; i <= numThreads; i++)
+ _TestTermPositionVectors(reader, i);
+ }
+ catch (System.IO.IOException ioe)
+ {
+ Assert.Fail(ioe.Message);
+ }
+ finally
+ {
+ if (reader != null)
+ {
+ try
+ {
+ /** close the opened reader */
+ reader.Close();
+ }
+ catch (System.IO.IOException ioe)
+ {
+ System.Console.Error.WriteLine(ioe.StackTrace);
+ }
+ }
+ }
+ }
+
+ public virtual void _TestTermPositionVectors(IndexReader reader, int threadCount)
+ {
+ MultiThreadTermVectorsReader[] mtr = new MultiThreadTermVectorsReader[threadCount];
+ for (int i = 0; i < threadCount; i++)
+ {
+ mtr[i] = new MultiThreadTermVectorsReader();
+ mtr[i].Init(reader);
+ }
+
+
+ /** run until all threads finished */
+ int threadsAlive = mtr.Length;
+ while (threadsAlive > 0)
+ {
+ try
+ {
+ //System.out.println("Threads alive");
+ System.Threading.Thread.Sleep(new System.TimeSpan((System.Int64) 10000 * 10));
+ threadsAlive = mtr.Length;
+ for (int i = 0; i < mtr.Length; i++)
+ {
+ if (mtr[i].IsAlive() == true)
+ {
+ break;
+ }
+
+ threadsAlive--;
+ }
+ }
+ catch (System.Threading.ThreadInterruptedException ie)
+ {
+ }
+ }
+
+ long totalTime = 0L;
+ for (int i = 0; i < mtr.Length; i++)
+ {
+ totalTime += mtr[i].timeElapsed;
+ mtr[i] = null;
+ }
+
+ //System.out.println("threadcount: " + mtr.length + " average term vector time: " + totalTime/mtr.length);
+ }
+ }
+
+ class MultiThreadTermVectorsReader : IThreadRunnable
+ {
+
+ private IndexReader reader = null;
+ private SupportClass.ThreadClass t = null;
+
+ private int runsToDo = 100;
+ internal long timeElapsed = 0;
+
+
+ public virtual void Init(IndexReader reader)
+ {
+ this.reader = reader;
+ timeElapsed = 0;
+ t = new SupportClass.ThreadClass(new System.Threading.ThreadStart(this.Run));
+ t.Start();
+ }
+
+ public virtual bool IsAlive()
+ {
+ if (t == null)
+ return false;
+
+ return t.IsAlive;
+ }
+
+ public virtual void Run()
+ {
+ try
+ {
+ // run the test 100 times
+ for (int i = 0; i < runsToDo; i++)
+ TestTermVectors();
+ }
+ catch (System.Exception e)
+ {
+ System.Console.Error.WriteLine(e.StackTrace);
+ }
+ return ;
+ }
+
+ [Test]
+ private void TestTermVectors()
+ {
+ // check:
+ int numDocs = reader.NumDocs();
+ long start = 0L;
+ for (int docId = 0; docId < numDocs; docId++)
+ {
+ start = (System.DateTime.Now.Ticks - 621355968000000000) / 10000;
+ TermFreqVector[] vectors = reader.GetTermFreqVectors(docId);
+ timeElapsed += (System.DateTime.Now.Ticks - 621355968000000000) / 10000 - start;
+
+ // verify vectors result
+ VerifyVectors(vectors, docId);
+
+ start = (System.DateTime.Now.Ticks - 621355968000000000) / 10000;
+ TermFreqVector vector = reader.GetTermFreqVector(docId, "field");
+ timeElapsed += (System.DateTime.Now.Ticks - 621355968000000000) / 10000 - start;
+
+ vectors = new TermFreqVector[1];
+ vectors[0] = vector;
+
+ VerifyVectors(vectors, docId);
+ }
+ }
+
+ private void VerifyVectors(TermFreqVector[] vectors, int num)
+ {
+ System.Text.StringBuilder temp = new System.Text.StringBuilder();
+ System.String[] terms = null;
+ for (int i = 0; i < vectors.Length; i++)
+ {
+ terms = vectors[i].GetTerms();
+ for (int z = 0; z < terms.Length; z++)
+ {
+ temp.Append(terms[z]);
+ }
+ }
+
+ if (!English.IntToEnglish(num).Trim().Equals(temp.ToString().Trim()))
+ System.Console.Out.WriteLine("worng term result");
+ }
+ }
+}
\ No newline at end of file
Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestNot.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestNot.cs?rev=411501&r1=411500&r2=411501&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestNot.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestNot.cs Sat Jun 3 19:41:13 2006
@@ -13,15 +13,16 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+
using System;
using NUnit.Framework;
using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
using Document = Lucene.Net.Documents.Document;
using Field = Lucene.Net.Documents.Field;
using IndexWriter = Lucene.Net.Index.IndexWriter;
-using Term = Lucene.Net.Index.Term;
using QueryParser = Lucene.Net.QueryParsers.QueryParser;
using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+
namespace Lucene.Net.Search
{
@@ -30,26 +31,27 @@
/// </summary>
/// <author> Doug Cutting
/// </author>
- /// <version> $Revision: 1.3 $
+ /// <version> $Revision: 150497 $
/// </version>
[TestFixture]
public class TestNot
{
- [Test]
- public virtual void TestNot_()
+
+ [Test]
+ public virtual void TestNot_Renamed_Method()
{
RAMDirectory store = new RAMDirectory();
IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(), true);
- Document d1 = new Document();
- d1.Add(Field.Text("Field", "a b"));
+ Lucene.Net.Documents.Document d1 = new Lucene.Net.Documents.Document();
+ d1.Add(new Field("field", "a b", Field.Store.YES, Field.Index.TOKENIZED));
writer.AddDocument(d1);
writer.Optimize();
writer.Close();
Searcher searcher = new IndexSearcher(store);
- Query query = Lucene.Net.QueryParsers.QueryParser.Parse("a NOT b", "Field", new SimpleAnalyzer());
+ Query query = Lucene.Net.QueryParsers.QueryParser.Parse("a NOT b", "field", new SimpleAnalyzer());
//System.out.println(query);
Hits hits = searcher.Search(query);
Assert.AreEqual(0, hits.Length());
Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestParallelMultiSearcher.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestParallelMultiSearcher.cs?rev=411501&r1=411500&r2=411501&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestParallelMultiSearcher.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestParallelMultiSearcher.cs Sat Jun 3 19:41:13 2006
@@ -13,12 +13,16 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+
using System;
+
namespace Lucene.Net.Search
{
+
/// <summary> Unit tests for the ParallelMultiSearcher </summary>
- public class TestParallelMultiSearcher : TestMultiSearcher
+ public class TestParallelMultiSearcher:TestMultiSearcher
{
+
protected internal override MultiSearcher GetMultiSearcherInstance(Searcher[] searchers)
{
return new ParallelMultiSearcher(searchers);