You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by do...@apache.org on 2009/07/29 20:04:24 UTC
svn commit: r798995 [32/35] - in /incubator/lucene.net/trunk/C#/src:
Lucene.Net/ Lucene.Net/Analysis/ Lucene.Net/Analysis/Standard/
Lucene.Net/Document/ Lucene.Net/Index/ Lucene.Net/QueryParser/
Lucene.Net/Search/ Lucene.Net/Search/Function/ Lucene.Net...
Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestFuzzyQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestFuzzyQuery.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestFuzzyQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestFuzzyQuery.cs Wed Jul 29 18:04:12 2009
@@ -33,8 +33,6 @@
/// <summary> Tests {@link FuzzyQuery}.
///
/// </summary>
- /// <author> Daniel Naber
- /// </author>
[TestFixture]
public class TestFuzzyQuery : LuceneTestCase
{
@@ -42,7 +40,7 @@
public virtual void TestFuzziness()
{
RAMDirectory directory = new RAMDirectory();
- IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
+ IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
AddDoc("aaaaa", writer);
AddDoc("aaaab", writer);
AddDoc("aaabb", writer);
@@ -55,114 +53,114 @@
IndexSearcher searcher = new IndexSearcher(directory);
FuzzyQuery query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 0);
- Hits hits = searcher.Search(query);
- Assert.AreEqual(3, hits.Length());
+ ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(3, hits.Length);
// same with prefix
query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 1);
- hits = searcher.Search(query);
- Assert.AreEqual(3, hits.Length());
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(3, hits.Length);
query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 2);
- hits = searcher.Search(query);
- Assert.AreEqual(3, hits.Length());
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(3, hits.Length);
query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 3);
- hits = searcher.Search(query);
- Assert.AreEqual(3, hits.Length());
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(3, hits.Length);
query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 4);
- hits = searcher.Search(query);
- Assert.AreEqual(2, hits.Length());
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(2, hits.Length);
query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 5);
- hits = searcher.Search(query);
- Assert.AreEqual(1, hits.Length());
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length);
query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 6);
- hits = searcher.Search(query);
- Assert.AreEqual(1, hits.Length());
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length);
// not similar enough:
query = new FuzzyQuery(new Term("field", "xxxxx"), FuzzyQuery.defaultMinSimilarity, 0);
- hits = searcher.Search(query);
- Assert.AreEqual(0, hits.Length());
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(0, hits.Length);
query = new FuzzyQuery(new Term("field", "aaccc"), FuzzyQuery.defaultMinSimilarity, 0); // edit distance to "aaaaa" = 3
- hits = searcher.Search(query);
- Assert.AreEqual(0, hits.Length());
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(0, hits.Length);
// query identical to a word in the index:
query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 0);
- hits = searcher.Search(query);
- Assert.AreEqual(3, hits.Length());
- Assert.AreEqual(hits.Doc(0).Get("field"), ("aaaaa"));
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(3, hits.Length);
+ Assert.AreEqual(searcher.Doc(hits[0].doc).Get("field"), ("aaaaa"));
// default allows for up to two edits:
- Assert.AreEqual(hits.Doc(1).Get("field"), ("aaaab"));
- Assert.AreEqual(hits.Doc(2).Get("field"), ("aaabb"));
+ Assert.AreEqual(searcher.Doc(hits[1].doc).Get("field"), ("aaaab"));
+ Assert.AreEqual(searcher.Doc(hits[2].doc).Get("field"), ("aaabb"));
// query similar to a word in the index:
query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.defaultMinSimilarity, 0);
- hits = searcher.Search(query);
- Assert.AreEqual(3, hits.Length());
- Assert.AreEqual(hits.Doc(0).Get("field"), ("aaaaa"));
- Assert.AreEqual(hits.Doc(1).Get("field"), ("aaaab"));
- Assert.AreEqual(hits.Doc(2).Get("field"), ("aaabb"));
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(3, hits.Length);
+ Assert.AreEqual(searcher.Doc(hits[0].doc).Get("field"), ("aaaaa"));
+ Assert.AreEqual(searcher.Doc(hits[1].doc).Get("field"), ("aaaab"));
+ Assert.AreEqual(searcher.Doc(hits[2].doc).Get("field"), ("aaabb"));
// now with prefix
query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.defaultMinSimilarity, 1);
- hits = searcher.Search(query);
- Assert.AreEqual(3, hits.Length());
- Assert.AreEqual(hits.Doc(0).Get("field"), ("aaaaa"));
- Assert.AreEqual(hits.Doc(1).Get("field"), ("aaaab"));
- Assert.AreEqual(hits.Doc(2).Get("field"), ("aaabb"));
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(3, hits.Length);
+ Assert.AreEqual(searcher.Doc(hits[0].doc).Get("field"), ("aaaaa"));
+ Assert.AreEqual(searcher.Doc(hits[1].doc).Get("field"), ("aaaab"));
+ Assert.AreEqual(searcher.Doc(hits[2].doc).Get("field"), ("aaabb"));
query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.defaultMinSimilarity, 2);
- hits = searcher.Search(query);
- Assert.AreEqual(3, hits.Length());
- Assert.AreEqual(hits.Doc(0).Get("field"), ("aaaaa"));
- Assert.AreEqual(hits.Doc(1).Get("field"), ("aaaab"));
- Assert.AreEqual(hits.Doc(2).Get("field"), ("aaabb"));
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(3, hits.Length);
+ Assert.AreEqual(searcher.Doc(hits[0].doc).Get("field"), ("aaaaa"));
+ Assert.AreEqual(searcher.Doc(hits[1].doc).Get("field"), ("aaaab"));
+ Assert.AreEqual(searcher.Doc(hits[2].doc).Get("field"), ("aaabb"));
query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.defaultMinSimilarity, 3);
- hits = searcher.Search(query);
- Assert.AreEqual(3, hits.Length());
- Assert.AreEqual(hits.Doc(0).Get("field"), ("aaaaa"));
- Assert.AreEqual(hits.Doc(1).Get("field"), ("aaaab"));
- Assert.AreEqual(hits.Doc(2).Get("field"), ("aaabb"));
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(3, hits.Length);
+ Assert.AreEqual(searcher.Doc(hits[0].doc).Get("field"), ("aaaaa"));
+ Assert.AreEqual(searcher.Doc(hits[1].doc).Get("field"), ("aaaab"));
+ Assert.AreEqual(searcher.Doc(hits[2].doc).Get("field"), ("aaabb"));
query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.defaultMinSimilarity, 4);
- hits = searcher.Search(query);
- Assert.AreEqual(2, hits.Length());
- Assert.AreEqual(hits.Doc(0).Get("field"), ("aaaaa"));
- Assert.AreEqual(hits.Doc(1).Get("field"), ("aaaab"));
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(2, hits.Length);
+ Assert.AreEqual(searcher.Doc(hits[0].doc).Get("field"), ("aaaaa"));
+ Assert.AreEqual(searcher.Doc(hits[1].doc).Get("field"), ("aaaab"));
query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.defaultMinSimilarity, 5);
- hits = searcher.Search(query);
- Assert.AreEqual(0, hits.Length());
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(0, hits.Length);
query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.defaultMinSimilarity, 0);
- hits = searcher.Search(query);
- Assert.AreEqual(1, hits.Length());
- Assert.AreEqual(hits.Doc(0).Get("field"), ("ddddd"));
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length);
+ Assert.AreEqual(searcher.Doc(hits[0].doc).Get("field"), ("ddddd"));
// now with prefix
query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.defaultMinSimilarity, 1);
- hits = searcher.Search(query);
- Assert.AreEqual(1, hits.Length());
- Assert.AreEqual(hits.Doc(0).Get("field"), ("ddddd"));
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length);
+ Assert.AreEqual(searcher.Doc(hits[0].doc).Get("field"), ("ddddd"));
query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.defaultMinSimilarity, 2);
- hits = searcher.Search(query);
- Assert.AreEqual(1, hits.Length());
- Assert.AreEqual(hits.Doc(0).Get("field"), ("ddddd"));
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length);
+ Assert.AreEqual(searcher.Doc(hits[0].doc).Get("field"), ("ddddd"));
query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.defaultMinSimilarity, 3);
- hits = searcher.Search(query);
- Assert.AreEqual(1, hits.Length());
- Assert.AreEqual(hits.Doc(0).Get("field"), ("ddddd"));
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length);
+ Assert.AreEqual(searcher.Doc(hits[0].doc).Get("field"), ("ddddd"));
query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.defaultMinSimilarity, 4);
- hits = searcher.Search(query);
- Assert.AreEqual(1, hits.Length());
- Assert.AreEqual(hits.Doc(0).Get("field"), ("ddddd"));
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length);
+ Assert.AreEqual(searcher.Doc(hits[0].doc).Get("field"), ("ddddd"));
query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.defaultMinSimilarity, 5);
- hits = searcher.Search(query);
- Assert.AreEqual(0, hits.Length());
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(0, hits.Length);
// different field = no match:
query = new FuzzyQuery(new Term("anotherfield", "ddddX"), FuzzyQuery.defaultMinSimilarity, 0);
- hits = searcher.Search(query);
- Assert.AreEqual(0, hits.Length());
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(0, hits.Length);
searcher.Close();
directory.Close();
@@ -172,7 +170,7 @@
public virtual void TestFuzzinessLong()
{
RAMDirectory directory = new RAMDirectory();
- IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
+ IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
AddDoc("aaaaaaa", writer);
AddDoc("segment", writer);
writer.Optimize();
@@ -182,64 +180,64 @@
FuzzyQuery query;
// not similar enough:
query = new FuzzyQuery(new Term("field", "xxxxx"), FuzzyQuery.defaultMinSimilarity, 0);
- Hits hits = searcher.Search(query);
- Assert.AreEqual(0, hits.Length());
+ ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(0, hits.Length);
// edit distance to "aaaaaaa" = 3, this matches because the string is longer than
// in testDefaultFuzziness so a bigger difference is allowed:
query = new FuzzyQuery(new Term("field", "aaaaccc"), FuzzyQuery.defaultMinSimilarity, 0);
- hits = searcher.Search(query);
- Assert.AreEqual(1, hits.Length());
- Assert.AreEqual(hits.Doc(0).Get("field"), ("aaaaaaa"));
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length);
+ Assert.AreEqual(searcher.Doc(hits[0].doc).Get("field"), ("aaaaaaa"));
// now with prefix
query = new FuzzyQuery(new Term("field", "aaaaccc"), FuzzyQuery.defaultMinSimilarity, 1);
- hits = searcher.Search(query);
- Assert.AreEqual(1, hits.Length());
- Assert.AreEqual(hits.Doc(0).Get("field"), ("aaaaaaa"));
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length);
+ Assert.AreEqual(searcher.Doc(hits[0].doc).Get("field"), ("aaaaaaa"));
query = new FuzzyQuery(new Term("field", "aaaaccc"), FuzzyQuery.defaultMinSimilarity, 4);
- hits = searcher.Search(query);
- Assert.AreEqual(1, hits.Length());
- Assert.AreEqual(hits.Doc(0).Get("field"), ("aaaaaaa"));
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length);
+ Assert.AreEqual(searcher.Doc(hits[0].doc).Get("field"), ("aaaaaaa"));
query = new FuzzyQuery(new Term("field", "aaaaccc"), FuzzyQuery.defaultMinSimilarity, 5);
- hits = searcher.Search(query);
- Assert.AreEqual(0, hits.Length());
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(0, hits.Length);
// no match, more than half of the characters is wrong:
query = new FuzzyQuery(new Term("field", "aaacccc"), FuzzyQuery.defaultMinSimilarity, 0);
- hits = searcher.Search(query);
- Assert.AreEqual(0, hits.Length());
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(0, hits.Length);
// now with prefix
query = new FuzzyQuery(new Term("field", "aaacccc"), FuzzyQuery.defaultMinSimilarity, 2);
- hits = searcher.Search(query);
- Assert.AreEqual(0, hits.Length());
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(0, hits.Length);
// "student" and "stellent" are indeed similar to "segment" by default:
query = new FuzzyQuery(new Term("field", "student"), FuzzyQuery.defaultMinSimilarity, 0);
- hits = searcher.Search(query);
- Assert.AreEqual(1, hits.Length());
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length);
query = new FuzzyQuery(new Term("field", "stellent"), FuzzyQuery.defaultMinSimilarity, 0);
- hits = searcher.Search(query);
- Assert.AreEqual(1, hits.Length());
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length);
// now with prefix
query = new FuzzyQuery(new Term("field", "student"), FuzzyQuery.defaultMinSimilarity, 1);
- hits = searcher.Search(query);
- Assert.AreEqual(1, hits.Length());
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length);
query = new FuzzyQuery(new Term("field", "stellent"), FuzzyQuery.defaultMinSimilarity, 1);
- hits = searcher.Search(query);
- Assert.AreEqual(1, hits.Length());
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length);
query = new FuzzyQuery(new Term("field", "student"), FuzzyQuery.defaultMinSimilarity, 2);
- hits = searcher.Search(query);
- Assert.AreEqual(0, hits.Length());
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(0, hits.Length);
query = new FuzzyQuery(new Term("field", "stellent"), FuzzyQuery.defaultMinSimilarity, 2);
- hits = searcher.Search(query);
- Assert.AreEqual(0, hits.Length());
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(0, hits.Length);
// "student" doesn't match anymore thanks to increased minimum similarity:
query = new FuzzyQuery(new Term("field", "student"), 0.6f, 0);
- hits = searcher.Search(query);
- Assert.AreEqual(0, hits.Length());
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(0, hits.Length);
try
{
@@ -267,7 +265,7 @@
private void AddDoc(System.String text, IndexWriter writer)
{
Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
- doc.Add(new Field("field", text, Field.Store.YES, Field.Index.TOKENIZED));
+ doc.Add(new Field("field", text, Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
}
}
Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestMatchAllDocsQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestMatchAllDocsQuery.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestMatchAllDocsQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestMatchAllDocsQuery.cs Wed Jul 29 18:04:12 2009
@@ -33,8 +33,6 @@
/// <summary> Tests MatchAllDocsQuery.
///
/// </summary>
- /// <author> Daniel Naber
- /// </author>
[TestFixture]
public class TestMatchAllDocsQuery : LuceneTestCase
{
@@ -42,34 +40,34 @@
public virtual void TestQuery()
{
RAMDirectory dir = new RAMDirectory();
- IndexWriter iw = new IndexWriter(dir, new StandardAnalyzer(), true);
+ IndexWriter iw = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
AddDoc("one", iw);
AddDoc("two", iw);
AddDoc("three four", iw);
iw.Close();
IndexSearcher is_Renamed = new IndexSearcher(dir);
- Hits hits = is_Renamed.Search(new MatchAllDocsQuery());
- Assert.AreEqual(3, hits.Length());
+ ScoreDoc[] hits = is_Renamed.Search(new MatchAllDocsQuery(), null, 1000).scoreDocs;
+ Assert.AreEqual(3, hits.Length);
// some artificial queries to trigger the use of skipTo():
BooleanQuery bq = new BooleanQuery();
bq.Add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST);
bq.Add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST);
- hits = is_Renamed.Search(bq);
- Assert.AreEqual(3, hits.Length());
+ hits = is_Renamed.Search(bq, null, 1000).scoreDocs;
+ Assert.AreEqual(3, hits.Length);
bq = new BooleanQuery();
bq.Add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST);
bq.Add(new TermQuery(new Term("key", "three")), BooleanClause.Occur.MUST);
- hits = is_Renamed.Search(bq);
- Assert.AreEqual(1, hits.Length());
+ hits = is_Renamed.Search(bq, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length);
// delete a document:
is_Renamed.GetIndexReader().DeleteDocument(0);
- hits = is_Renamed.Search(new MatchAllDocsQuery());
- Assert.AreEqual(2, hits.Length());
+ hits = is_Renamed.Search(new MatchAllDocsQuery(), null, 1000).scoreDocs;
+ Assert.AreEqual(2, hits.Length);
is_Renamed.Close();
}
@@ -87,7 +85,7 @@
private void AddDoc(System.String text, IndexWriter iw)
{
Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
- doc.Add(new Field("key", text, Field.Store.YES, Field.Index.TOKENIZED));
+ doc.Add(new Field("key", text, Field.Store.YES, Field.Index.ANALYZED));
iw.AddDocument(doc);
}
}
Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiPhraseQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestMultiPhraseQuery.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiPhraseQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiPhraseQuery.cs Wed Jul 29 18:04:12 2009
@@ -47,7 +47,7 @@
public virtual void TestPhrasePrefix()
{
RAMDirectory indexStore = new RAMDirectory();
- IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true);
+ IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
Add("blueberry pie", writer);
Add("blueberry strudel", writer);
Add("blueberry pizza", writer);
@@ -87,11 +87,11 @@
query2.Add((Term[]) termsWithPrefix.ToArray(typeof(Term)));
Assert.AreEqual("body:\"strawberry (piccadilly pie pizza)\"", query2.ToString());
- Hits result;
- result = searcher.Search(query1);
- Assert.AreEqual(2, result.Length());
- result = searcher.Search(query2);
- Assert.AreEqual(0, result.Length());
+ ScoreDoc[] result;
+ result = searcher.Search(query1, null, 1000).scoreDocs;
+ Assert.AreEqual(2, result.Length);
+ result = searcher.Search(query2, null, 1000).scoreDocs;
+ Assert.AreEqual(0, result.Length);
// search for "blue* pizza":
MultiPhraseQuery query3 = new MultiPhraseQuery();
@@ -109,14 +109,14 @@
query3.Add((Term[]) termsWithPrefix.ToArray(typeof(Term)));
query3.Add(new Term("body", "pizza"));
- result = searcher.Search(query3);
- Assert.AreEqual(2, result.Length()); // blueberry pizza, bluebird pizza
+ result = searcher.Search(query3, null, 1000).scoreDocs;
+ Assert.AreEqual(2, result.Length); // blueberry pizza, bluebird pizza
Assert.AreEqual("body:\"(blueberry bluebird) pizza\"", query3.ToString());
// test slop:
query3.SetSlop(1);
- result = searcher.Search(query3);
- Assert.AreEqual(3, result.Length()); // blueberry pizza, bluebird pizza, bluebird foobar pizza
+ result = searcher.Search(query3, null, 1000).scoreDocs;
+ Assert.AreEqual(3, result.Length); // blueberry pizza, bluebird pizza, bluebird foobar pizza
MultiPhraseQuery query4 = new MultiPhraseQuery();
try
@@ -137,7 +137,7 @@
private void Add(System.String s, IndexWriter writer)
{
Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
- doc.Add(new Field("body", s, Field.Store.YES, Field.Index.TOKENIZED));
+ doc.Add(new Field("body", s, Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
}
@@ -150,7 +150,7 @@
// The contained PhraseMultiQuery must contain exactly one term array.
RAMDirectory indexStore = new RAMDirectory();
- IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true);
+ IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
Add("blueberry pie", writer);
Add("blueberry chewing gum", writer);
Add("blue raspberry pie", writer);
@@ -167,9 +167,9 @@
q.Add(trouble, BooleanClause.Occur.MUST);
// exception will be thrown here without fix
- Hits hits = searcher.Search(q);
+ ScoreDoc[] hits = searcher.Search(q, null, 1000).scoreDocs;
- Assert.AreEqual(2, hits.Length(), "Wrong number of hits");
+ Assert.AreEqual(2, hits.Length, "Wrong number of hits");
searcher.Close();
}
@@ -177,7 +177,7 @@
public virtual void TestPhrasePrefixWithBooleanQuery()
{
RAMDirectory indexStore = new RAMDirectory();
- IndexWriter writer = new IndexWriter(indexStore, new StandardAnalyzer(new System.String[]{}), true);
+ IndexWriter writer = new IndexWriter(indexStore, new StandardAnalyzer(new System.String[] { }), true, IndexWriter.MaxFieldLength.LIMITED);
Add("This is a test", "object", writer);
Add("a note", "note", writer);
writer.Close();
@@ -194,16 +194,16 @@
q.Add(trouble, BooleanClause.Occur.MUST);
// exception will be thrown here without fix for #35626:
- Hits hits = searcher.Search(q);
- Assert.AreEqual(0, hits.Length(), "Wrong number of hits");
+ ScoreDoc[] hits = searcher.Search(q, null, 1000).scoreDocs;
+ Assert.AreEqual(0, hits.Length, "Wrong number of hits");
searcher.Close();
}
private void Add(System.String s, System.String type, IndexWriter writer)
{
Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
- doc.Add(new Field("body", s, Field.Store.YES, Field.Index.TOKENIZED));
- doc.Add(new Field("type", type, Field.Store.YES, Field.Index.UN_TOKENIZED));
+ doc.Add(new Field("body", s, Field.Store.YES, Field.Index.ANALYZED));
+ doc.Add(new Field("type", type, Field.Store.YES, Field.Index.NOT_ANALYZED));
writer.AddDocument(doc);
}
}
Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiSearcher.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestMultiSearcher.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiSearcher.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiSearcher.cs Wed Jul 29 18:04:12 2009
@@ -112,26 +112,26 @@
// creating a document to store
Document lDoc = new Document();
- lDoc.Add(new Field("fulltext", "Once upon a time.....", Field.Store.YES, Field.Index.TOKENIZED));
- lDoc.Add(new Field("id", "doc1", Field.Store.YES, Field.Index.UN_TOKENIZED));
- lDoc.Add(new Field("handle", "1", Field.Store.YES, Field.Index.UN_TOKENIZED));
+ lDoc.Add(new Field("fulltext", "Once upon a time.....", Field.Store.YES, Field.Index.ANALYZED));
+ lDoc.Add(new Field("id", "doc1", Field.Store.YES, Field.Index.NOT_ANALYZED));
+ lDoc.Add(new Field("handle", "1", Field.Store.YES, Field.Index.NOT_ANALYZED));
// creating a document to store
Document lDoc2 = new Document();
- lDoc2.Add(new Field("fulltext", "in a galaxy far far away.....", Field.Store.YES, Field.Index.TOKENIZED));
- lDoc2.Add(new Field("id", "doc2", Field.Store.YES, Field.Index.UN_TOKENIZED));
- lDoc2.Add(new Field("handle", "1", Field.Store.YES, Field.Index.UN_TOKENIZED));
+ lDoc2.Add(new Field("fulltext", "in a galaxy far far away.....", Field.Store.YES, Field.Index.ANALYZED));
+ lDoc2.Add(new Field("id", "doc2", Field.Store.YES, Field.Index.NOT_ANALYZED));
+ lDoc2.Add(new Field("handle", "1", Field.Store.YES, Field.Index.NOT_ANALYZED));
// creating a document to store
Document lDoc3 = new Document();
- lDoc3.Add(new Field("fulltext", "a bizarre bug manifested itself....", Field.Store.YES, Field.Index.TOKENIZED));
- lDoc3.Add(new Field("id", "doc3", Field.Store.YES, Field.Index.UN_TOKENIZED));
- lDoc3.Add(new Field("handle", "1", Field.Store.YES, Field.Index.UN_TOKENIZED));
+ lDoc3.Add(new Field("fulltext", "a bizarre bug manifested itself....", Field.Store.YES, Field.Index.ANALYZED));
+ lDoc3.Add(new Field("id", "doc3", Field.Store.YES, Field.Index.NOT_ANALYZED));
+ lDoc3.Add(new Field("handle", "1", Field.Store.YES, Field.Index.NOT_ANALYZED));
// creating an index writer for the first index
- IndexWriter writerA = new IndexWriter(indexStoreA, new StandardAnalyzer(), true);
+ IndexWriter writerA = new IndexWriter(indexStoreA, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
// creating an index writer for the second index, but writing nothing
- IndexWriter writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(), true);
+ IndexWriter writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
//--------------------------------------------------------------------
// scenario 1
@@ -159,14 +159,14 @@
// creating the multiSearcher
Searcher mSearcher = GetMultiSearcherInstance(searchers);
// performing the search
- Hits hits = mSearcher.Search(query);
+ ScoreDoc[] hits = mSearcher.Search(query, null, 1000).scoreDocs;
- Assert.AreEqual(3, hits.Length());
+ Assert.AreEqual(3, hits.Length);
// iterating over the hit documents
- for (int i = 0; i < hits.Length(); i++)
+ for (int i = 0; i < hits.Length; i++)
{
- Document d = hits.Doc(i);
+ Document d = mSearcher.Doc(hits[i].doc);
}
mSearcher.Close();
@@ -176,7 +176,7 @@
//--------------------------------------------------------------------
// adding one document to the empty index
- writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(), false);
+ writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
writerB.AddDocument(lDoc);
writerB.Optimize();
writerB.Close();
@@ -189,27 +189,27 @@
// creating the mulitSearcher
MultiSearcher mSearcher2 = GetMultiSearcherInstance(searchers2);
// performing the same search
- Hits hits2 = mSearcher2.Search(query);
+ ScoreDoc[] hits2 = mSearcher2.Search(query, null, 1000).scoreDocs;
- Assert.AreEqual(4, hits2.Length());
+ Assert.AreEqual(4, hits2.Length);
// iterating over the hit documents
- for (int i = 0; i < hits2.Length(); i++)
+ for (int i = 0; i < hits2.Length; i++)
{
// no exception should happen at this point
- Document d = hits2.Doc(i);
+ Document d = mSearcher2.Doc(hits2[i].doc);
}
// test the subSearcher() method:
Query subSearcherQuery = parser.Parse("id:doc1");
- hits2 = mSearcher2.Search(subSearcherQuery);
- Assert.AreEqual(2, hits2.Length());
- Assert.AreEqual(0, mSearcher2.SubSearcher(hits2.Id(0))); // hit from searchers2[0]
- Assert.AreEqual(1, mSearcher2.SubSearcher(hits2.Id(1))); // hit from searchers2[1]
+ hits2 = mSearcher2.Search(subSearcherQuery, null, 1000).scoreDocs;
+ Assert.AreEqual(2, hits2.Length);
+ Assert.AreEqual(0, mSearcher2.SubSearcher(hits2[0].doc)); // hit from searchers2[0]
+ Assert.AreEqual(1, mSearcher2.SubSearcher(hits2[1].doc)); // hit from searchers2[1]
subSearcherQuery = parser.Parse("id:doc2");
- hits2 = mSearcher2.Search(subSearcherQuery);
- Assert.AreEqual(1, hits2.Length());
- Assert.AreEqual(1, mSearcher2.SubSearcher(hits2.Id(0))); // hit from searchers2[1]
+ hits2 = mSearcher2.Search(subSearcherQuery, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits2.Length);
+ Assert.AreEqual(1, mSearcher2.SubSearcher(hits2[0].doc)); // hit from searchers2[1]
mSearcher2.Close();
//--------------------------------------------------------------------
@@ -223,7 +223,7 @@
readerB.Close();
// optimizing the index with the writer
- writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(), false);
+ writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
writerB.Optimize();
writerB.Close();
@@ -235,14 +235,14 @@
// creating the mulitSearcher
Searcher mSearcher3 = GetMultiSearcherInstance(searchers3);
// performing the same search
- Hits hits3 = mSearcher3.Search(query);
+ ScoreDoc[] hits3 = mSearcher3.Search(query, null, 1000).scoreDocs;
- Assert.AreEqual(3, hits3.Length());
+ Assert.AreEqual(3, hits3.Length);
// iterating over the hit documents
- for (int i = 0; i < hits3.Length(); i++)
+ for (int i = 0; i < hits3.Length; i++)
{
- Document d = hits3.Doc(i);
+ Document d = mSearcher3.Doc(hits3[i].doc);
}
mSearcher3.Close();
indexStoreA.Close();
@@ -253,11 +253,11 @@
{
Document document = new Document();
- document.Add(new Field("contents", contents1, Field.Store.YES, Field.Index.UN_TOKENIZED));
- document.Add(new Field("other", "other contents", Field.Store.YES, Field.Index.UN_TOKENIZED));
+ document.Add(new Field("contents", contents1, Field.Store.YES, Field.Index.NOT_ANALYZED));
+ document.Add(new Field("other", "other contents", Field.Store.YES, Field.Index.NOT_ANALYZED));
if (contents2 != null)
{
- document.Add(new Field("contents", contents2, Field.Store.YES, Field.Index.UN_TOKENIZED));
+ document.Add(new Field("contents", contents2, Field.Store.YES, Field.Index.NOT_ANALYZED));
}
return document;
@@ -269,7 +269,7 @@
try
{
- indexWriter = new IndexWriter(directory, new KeywordAnalyzer(), create);
+ indexWriter = new IndexWriter(directory, new KeywordAnalyzer(), create, IndexWriter.MaxFieldLength.LIMITED);
for (int i = 0; i < nDocs; i++)
{
@@ -304,10 +304,10 @@
MultiSearcher searcher = GetMultiSearcherInstance(new Searcher[]{indexSearcher1, indexSearcher2});
Assert.IsTrue(searcher != null, "searcher is null and it shouldn't be");
- Hits hits = searcher.Search(query);
+ ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
Assert.IsTrue(hits != null, "hits is null and it shouldn't be");
- Assert.IsTrue(hits.Length() == 2, hits.Length() + " does not equal: " + 2);
- Document document = searcher.Doc(hits.Id(0));
+ Assert.IsTrue(hits.Length == 2, hits.Length + " does not equal: " + 2);
+ Document document = searcher.Doc(hits[0].doc);
Assert.IsTrue(document != null, "document is null and it shouldn't be");
Assert.IsTrue(document.GetFields().Count == 2, "document.getFields() Size: " + document.GetFields().Count + " is not: " + 2);
//Should be one document from each directory
@@ -315,7 +315,7 @@
System.Collections.Hashtable ftl = new System.Collections.Hashtable();
ftl.Add("other", "other");
SetBasedFieldSelector fs = new SetBasedFieldSelector(ftl, new System.Collections.Hashtable());
- document = searcher.Doc(hits.Id(0), fs);
+ document = searcher.Doc(hits[0].doc, fs);
Assert.IsTrue(document != null, "document is null and it shouldn't be");
Assert.IsTrue(document.GetFields().Count == 1, "document.getFields() Size: " + document.GetFields().Count + " is not: " + 1);
System.String value_Renamed = document.Get("contents");
@@ -325,7 +325,7 @@
ftl.Clear();
ftl.Add("contents", "contents");
fs = new SetBasedFieldSelector(ftl, new System.Collections.Hashtable());
- document = searcher.Doc(hits.Id(1), fs);
+ document = searcher.Doc(hits[1].doc, fs);
value_Renamed = document.Get("contents");
Assert.IsTrue(value_Renamed != null, "value is null and it shouldn't be");
value_Renamed = document.Get("other");
@@ -350,7 +350,7 @@
RAMDirectory ramDirectory1;
IndexSearcher indexSearcher1;
- Hits hits;
+ ScoreDoc[] hits;
ramDirectory1 = new MockRAMDirectory();
@@ -359,15 +359,13 @@
InitIndex(ramDirectory1, nDocs, false, "x"); // documents with two tokens "doc0" and "x", "doc1" and x, etc...
indexSearcher1 = new IndexSearcher(ramDirectory1);
+
+ hits = indexSearcher1.Search(query, null, 1000).scoreDocs;
- hits = indexSearcher1.Search(query);
-
- Assert.AreEqual(2, hits.Length(), message);
-
- Assert.AreEqual(1, hits.Score(0), 1e-6, message); // hits.score(0) is 0.594535 if only a single document is in first index
+ Assert.AreEqual(2, hits.Length, message);
// Store the scores for use later
- float[] scores = new float[]{hits.Score(0), hits.Score(1)};
+ float[] scores = new float[]{hits[0].score, hits[1].score};
Assert.IsTrue(scores[0] > scores[1], message);
@@ -391,24 +389,24 @@
indexSearcher2 = new IndexSearcher(ramDirectory2);
Searcher searcher = GetMultiSearcherInstance(new Searcher[]{indexSearcher1, indexSearcher2});
+
+ hits = searcher.Search(query, null, 1000).scoreDocs;
- hits = searcher.Search(query);
-
- Assert.AreEqual(2, hits.Length(), message);
+ Assert.AreEqual(2, hits.Length, message);
// The scores should be the same (within reason)
- Assert.AreEqual(scores[0], hits.Score(0), 1e-6, message); // This will a document from ramDirectory1
- Assert.AreEqual(scores[1], hits.Score(1), 1e-6, message); // This will a document from ramDirectory2
+ Assert.AreEqual(scores[0], hits[0].score, 1e-6, message); // This will a document from ramDirectory1
+ Assert.AreEqual(scores[1], hits[1].score, 1e-6, message); // This will a document from ramDirectory2
// Adding a Sort.RELEVANCE object should not change anything
- hits = searcher.Search(query, Sort.RELEVANCE);
+ hits = searcher.Search(query, null, 1000, Sort.RELEVANCE).scoreDocs;
- Assert.AreEqual(2, hits.Length(), message);
+ Assert.AreEqual(2, hits.Length, message);
- Assert.AreEqual(scores[0], hits.Score(0), 1e-6, message); // This will a document from ramDirectory1
- Assert.AreEqual(scores[1], hits.Score(1), 1e-6, message); // This will a document from ramDirectory2
+ Assert.AreEqual(scores[0], hits[0].score, 1e-6, message); // This will a document from ramDirectory1
+ Assert.AreEqual(scores[1], hits[1].score, 1e-6, message); // This will a document from ramDirectory2
searcher.Close();
Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiSearcherRanking.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestMultiSearcherRanking.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiSearcherRanking.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiSearcherRanking.cs Wed Jul 29 18:04:12 2009
@@ -111,18 +111,18 @@
System.Console.Out.WriteLine("Query: " + queryStr);
Lucene.Net.QueryParsers.QueryParser queryParser = new Lucene.Net.QueryParsers.QueryParser(FIELD_NAME, new StandardAnalyzer());
Lucene.Net.Search.Query query = queryParser.Parse(queryStr);
- Hits multiSearcherHits = multiSearcher.Search(query);
- Hits singleSearcherHits = singleSearcher.Search(query);
- Assert.AreEqual(multiSearcherHits.Length(), singleSearcherHits.Length());
- for (int i = 0; i < multiSearcherHits.Length(); i++)
+ ScoreDoc[] multiSearcherHits = multiSearcher.Search(query, null, 1000).scoreDocs;
+ ScoreDoc[] singleSearcherHits = singleSearcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(multiSearcherHits.Length, singleSearcherHits.Length);
+ for (int i = 0; i < multiSearcherHits.Length; i++)
{
- Lucene.Net.Documents.Document docMulti = multiSearcherHits.Doc(i);
- Lucene.Net.Documents.Document docSingle = singleSearcherHits.Doc(i);
+ Lucene.Net.Documents.Document docMulti = multiSearcher.Doc(multiSearcherHits[i].doc);
+ Lucene.Net.Documents.Document docSingle = singleSearcher.Doc(singleSearcherHits[i].doc);
if (verbose)
- System.Console.Out.WriteLine("Multi: " + docMulti.Get(FIELD_NAME) + " score=" + multiSearcherHits.Score(i));
+ System.Console.Out.WriteLine("Multi: " + docMulti.Get(FIELD_NAME) + " score=" + multiSearcherHits[i].score);
if (verbose)
- System.Console.Out.WriteLine("Single: " + docSingle.Get(FIELD_NAME) + " score=" + singleSearcherHits.Score(i));
- Assert.AreEqual(multiSearcherHits.Score(i), singleSearcherHits.Score(i), 0.001f);
+ System.Console.Out.WriteLine("Single: " + docSingle.Get(FIELD_NAME) + " score=" + singleSearcherHits[i].score);
+ Assert.AreEqual(multiSearcherHits[i].score, singleSearcherHits[i].score, 0.001f);
Assert.AreEqual(docMulti.Get(FIELD_NAME), docSingle.Get(FIELD_NAME));
}
if (verbose)
@@ -136,11 +136,11 @@
base.SetUp();
// create MultiSearcher from two seperate searchers
Directory d1 = new RAMDirectory();
- IndexWriter iw1 = new IndexWriter(d1, new StandardAnalyzer(), true);
+ IndexWriter iw1 = new IndexWriter(d1, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
AddCollection1(iw1);
iw1.Close();
Directory d2 = new RAMDirectory();
- IndexWriter iw2 = new IndexWriter(d2, new StandardAnalyzer(), true);
+ IndexWriter iw2 = new IndexWriter(d2, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
AddCollection2(iw2);
iw2.Close();
@@ -151,7 +151,7 @@
// create IndexSearcher which contains all documents
Directory d = new RAMDirectory();
- IndexWriter iw = new IndexWriter(d, new StandardAnalyzer(), true);
+ IndexWriter iw = new IndexWriter(d, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
AddCollection1(iw);
AddCollection2(iw);
iw.Close();
@@ -182,7 +182,7 @@
private void Add(System.String value_Renamed, IndexWriter iw)
{
Lucene.Net.Documents.Document d = new Lucene.Net.Documents.Document();
- d.Add(new Field(FIELD_NAME, value_Renamed, Field.Store.YES, Field.Index.TOKENIZED));
+ d.Add(new Field(FIELD_NAME, value_Renamed, Field.Store.YES, Field.Index.ANALYZED));
iw.AddDocument(d);
}
}
Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiThreadTermVectors.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestMultiThreadTermVectors.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiThreadTermVectors.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiThreadTermVectors.cs Wed Jul 29 18:04:12 2009
@@ -46,13 +46,13 @@
public override void SetUp()
{
base.SetUp();
- IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(), true);
+ IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
//writer.setUseCompoundFile(false);
//writer.infoStream = System.out;
for (int i = 0; i < numDocs; i++)
{
Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
- Fieldable fld = new Field("field", English.IntToEnglish(i), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.YES);
+ Fieldable fld = new Field("field", English.IntToEnglish(i), Field.Store.YES, Field.Index.NOT_ANALYZED, Field.TermVector.YES);
doc.Add(fld);
writer.AddDocument(doc);
}
Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestNot.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestNot.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestNot.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestNot.cs Wed Jul 29 18:04:12 2009
@@ -44,10 +44,10 @@
public virtual void TestNot_Renamed_Method()
{
RAMDirectory store = new RAMDirectory();
- IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(), true);
+ IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
Lucene.Net.Documents.Document d1 = new Lucene.Net.Documents.Document();
- d1.Add(new Field("field", "a b", Field.Store.YES, Field.Index.TOKENIZED));
+ d1.Add(new Field("field", "a b", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(d1);
writer.Optimize();
@@ -57,8 +57,8 @@
Lucene.Net.QueryParsers.QueryParser parser = new Lucene.Net.QueryParsers.QueryParser("field", new SimpleAnalyzer());
Lucene.Net.Search.Query query = parser.Parse("a NOT b");
//System.out.println(query);
- Hits hits = searcher.Search(query);
- Assert.AreEqual(0, hits.Length());
+ ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(0, hits.Length);
}
}
}
\ No newline at end of file
Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestPhrasePrefixQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestPhrasePrefixQuery.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestPhrasePrefixQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestPhrasePrefixQuery.cs Wed Jul 29 18:04:12 2009
@@ -46,17 +46,17 @@
public virtual void TestPhrasePrefix()
{
RAMDirectory indexStore = new RAMDirectory();
- IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true);
+ IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
Lucene.Net.Documents.Document doc1 = new Lucene.Net.Documents.Document();
Lucene.Net.Documents.Document doc2 = new Lucene.Net.Documents.Document();
Lucene.Net.Documents.Document doc3 = new Lucene.Net.Documents.Document();
Lucene.Net.Documents.Document doc4 = new Lucene.Net.Documents.Document();
Lucene.Net.Documents.Document doc5 = new Lucene.Net.Documents.Document();
- doc1.Add(new Field("body", "blueberry pie", Field.Store.YES, Field.Index.TOKENIZED));
- doc2.Add(new Field("body", "blueberry strudel", Field.Store.YES, Field.Index.TOKENIZED));
- doc3.Add(new Field("body", "blueberry pizza", Field.Store.YES, Field.Index.TOKENIZED));
- doc4.Add(new Field("body", "blueberry chewing gum", Field.Store.YES, Field.Index.TOKENIZED));
- doc5.Add(new Field("body", "piccadilly circus", Field.Store.YES, Field.Index.TOKENIZED));
+ doc1.Add(new Field("body", "blueberry pie", Field.Store.YES, Field.Index.ANALYZED));
+ doc2.Add(new Field("body", "blueberry strudel", Field.Store.YES, Field.Index.ANALYZED));
+ doc3.Add(new Field("body", "blueberry pizza", Field.Store.YES, Field.Index.ANALYZED));
+ doc4.Add(new Field("body", "blueberry chewing gum", Field.Store.YES, Field.Index.ANALYZED));
+ doc5.Add(new Field("body", "piccadilly circus", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc1);
writer.AddDocument(doc2);
writer.AddDocument(doc3);
@@ -92,12 +92,12 @@
query1.Add((Term[]) termsWithPrefix.ToArray(typeof(Term)));
query2.Add((Term[]) termsWithPrefix.ToArray(typeof(Term)));
- Hits result;
- result = searcher.Search(query1);
- Assert.AreEqual(2, result.Length());
-
- result = searcher.Search(query2);
- Assert.AreEqual(0, result.Length());
+ ScoreDoc[] result;
+ result = searcher.Search(query1, null, 1000).scoreDocs;
+ Assert.AreEqual(2, result.Length);
+
+ result = searcher.Search(query2, null, 1000).scoreDocs;
+ Assert.AreEqual(0, result.Length);
}
}
}
\ No newline at end of file
Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestPhraseQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestPhraseQuery.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestPhraseQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestPhraseQuery.cs Wed Jul 29 18:04:12 2009
@@ -19,13 +19,14 @@
using NUnit.Framework;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using Lucene.Net.Analysis;
using Lucene.Net.Documents;
using IndexWriter = Lucene.Net.Index.IndexWriter;
+using QueryParser = Lucene.Net.QueryParsers.QueryParser;
using Term = Lucene.Net.Index.Term;
using Directory = Lucene.Net.Store.Directory;
using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using Lucene.Net.Analysis;
-using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
namespace Lucene.Net.Search
{
@@ -35,8 +36,6 @@
/// </summary>
/// <seealso cref="TestPositionIncrement">
/// </seealso>
- /// <author> Erik Hatcher
- /// </author>
[TestFixture]
public class TestPhraseQuery : LuceneTestCase
{
@@ -57,7 +56,6 @@
{
return enclosingInstance;
}
-
}
public override TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
@@ -84,22 +82,22 @@
base.SetUp();
directory = new RAMDirectory();
Analyzer analyzer = new AnonymousClassAnalyzer(this);
- IndexWriter writer = new IndexWriter(directory, analyzer, true);
+ IndexWriter writer = new IndexWriter(directory, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
- doc.Add(new Field("field", "one two three four five", Field.Store.YES, Field.Index.TOKENIZED));
- doc.Add(new Field("repeated", "this is a repeated field - first part", Field.Store.YES, Field.Index.TOKENIZED));
- Fieldable repeatedField = new Field("repeated", "second part of a repeated field", Field.Store.YES, Field.Index.TOKENIZED);
+ doc.Add(new Field("field", "one two three four five", Field.Store.YES, Field.Index.ANALYZED));
+ doc.Add(new Field("repeated", "this is a repeated field - first part", Field.Store.YES, Field.Index.ANALYZED));
+ Fieldable repeatedField = new Field("repeated", "second part of a repeated field", Field.Store.YES, Field.Index.ANALYZED);
doc.Add(repeatedField);
- doc.Add(new Field("palindrome", "one two three two one", Field.Store.YES, Field.Index.TOKENIZED));
+ doc.Add(new Field("palindrome", "one two three two one", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
doc = new Document();
- doc.Add(new Field("nonexist", "phrase exist notexist exist found", Field.Store.YES, Field.Index.TOKENIZED));
+ doc.Add(new Field("nonexist", "phrase exist notexist exist found", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
doc = new Document();
- doc.Add(new Field("nonexist", "phrase exist notexist exist found", Field.Store.YES, Field.Index.TOKENIZED));
+ doc.Add(new Field("nonexist", "phrase exist notexist exist found", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
writer.Optimize();
@@ -123,8 +121,8 @@
query.SetSlop(2);
query.Add(new Term("field", "one"));
query.Add(new Term("field", "five"));
- Hits hits = searcher.Search(query);
- Assert.AreEqual(0, hits.Length());
+ ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(0, hits.Length);
QueryUtils.Check(query, searcher);
}
@@ -134,8 +132,8 @@
query.SetSlop(3);
query.Add(new Term("field", "one"));
query.Add(new Term("field", "five"));
- Hits hits = searcher.Search(query);
- Assert.AreEqual(1, hits.Length());
+ ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length);
QueryUtils.Check(query, searcher);
}
@@ -146,16 +144,16 @@
// slop is zero by default
query.Add(new Term("field", "four"));
query.Add(new Term("field", "five"));
- Hits hits = searcher.Search(query);
- Assert.AreEqual(1, hits.Length(), "exact match");
+ ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length, "exact match");
QueryUtils.Check(query, searcher);
query = new PhraseQuery();
query.Add(new Term("field", "two"));
query.Add(new Term("field", "one"));
- hits = searcher.Search(query);
- Assert.AreEqual(0, hits.Length(), "reverse not exact");
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(0, hits.Length, "reverse not exact");
QueryUtils.Check(query, searcher);
}
@@ -166,8 +164,8 @@
query.SetSlop(1);
query.Add(new Term("field", "one"));
query.Add(new Term("field", "two"));
- Hits hits = searcher.Search(query);
- Assert.AreEqual(1, hits.Length(), "in order");
+ ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length, "in order");
QueryUtils.Check(query, searcher);
@@ -177,8 +175,8 @@
query.SetSlop(1);
query.Add(new Term("field", "two"));
query.Add(new Term("field", "one"));
- hits = searcher.Search(query);
- Assert.AreEqual(0, hits.Length(), "reversed, slop not 2 or more");
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(0, hits.Length, "reversed, slop not 2 or more");
QueryUtils.Check(query, searcher);
}
@@ -189,8 +187,8 @@
query.SetSlop(2); // must be at least two for reverse order match
query.Add(new Term("field", "two"));
query.Add(new Term("field", "one"));
- Hits hits = searcher.Search(query);
- Assert.AreEqual(1, hits.Length(), "just sloppy enough");
+ ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length, "just sloppy enough");
QueryUtils.Check(query, searcher);
@@ -198,8 +196,8 @@
query.SetSlop(2);
query.Add(new Term("field", "three"));
query.Add(new Term("field", "one"));
- hits = searcher.Search(query);
- Assert.AreEqual(0, hits.Length(), "not sloppy enough");
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(0, hits.Length, "not sloppy enough");
QueryUtils.Check(query, searcher);
}
@@ -213,8 +211,8 @@
query.Add(new Term("field", "one"));
query.Add(new Term("field", "three"));
query.Add(new Term("field", "five"));
- Hits hits = searcher.Search(query);
- Assert.AreEqual(1, hits.Length(), "two total moves");
+ ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length, "two total moves");
QueryUtils.Check(query, searcher);
@@ -223,14 +221,14 @@
query.Add(new Term("field", "five"));
query.Add(new Term("field", "three"));
query.Add(new Term("field", "one"));
- hits = searcher.Search(query);
- Assert.AreEqual(0, hits.Length(), "slop of 5 not close enough");
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(0, hits.Length, "slop of 5 not close enough");
QueryUtils.Check(query, searcher);
query.SetSlop(6);
- hits = searcher.Search(query);
- Assert.AreEqual(1, hits.Length(), "slop of 6 just right");
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length, "slop of 6 just right");
QueryUtils.Check(query, searcher);
}
@@ -239,9 +237,9 @@
{
RAMDirectory directory = new RAMDirectory();
StopAnalyzer stopAnalyzer = new StopAnalyzer();
- IndexWriter writer = new IndexWriter(directory, stopAnalyzer, true);
+ IndexWriter writer = new IndexWriter(directory, stopAnalyzer, true, IndexWriter.MaxFieldLength.LIMITED);
Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
- doc.Add(new Field("field", "the stop words are here", Field.Store.YES, Field.Index.TOKENIZED));
+ doc.Add(new Field("field", "the stop words are here", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
writer.Close();
@@ -251,8 +249,8 @@
PhraseQuery query = new PhraseQuery();
query.Add(new Term("field", "stop"));
query.Add(new Term("field", "words"));
- Hits hits = searcher.Search(query);
- Assert.AreEqual(1, hits.Length());
+ ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length);
QueryUtils.Check(query, searcher);
@@ -260,8 +258,8 @@
query = new PhraseQuery();
query.Add(new Term("field", "words"));
query.Add(new Term("field", "here"));
- hits = searcher.Search(query);
- Assert.AreEqual(1, hits.Length());
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length);
QueryUtils.Check(query, searcher);
@@ -272,15 +270,15 @@
public virtual void TestPhraseQueryInConjunctionScorer()
{
RAMDirectory directory = new RAMDirectory();
- IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
+ IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
- doc.Add(new Field("source", "marketing info", Field.Store.YES, Field.Index.TOKENIZED));
+ doc.Add(new Field("source", "marketing info", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
doc = new Lucene.Net.Documents.Document();
- doc.Add(new Field("contents", "foobar", Field.Store.YES, Field.Index.TOKENIZED));
- doc.Add(new Field("source", "marketing info", Field.Store.YES, Field.Index.TOKENIZED));
+ doc.Add(new Field("contents", "foobar", Field.Store.YES, Field.Index.ANALYZED));
+ doc.Add(new Field("source", "marketing info", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
writer.Optimize();
@@ -291,33 +289,33 @@
PhraseQuery phraseQuery = new PhraseQuery();
phraseQuery.Add(new Term("source", "marketing"));
phraseQuery.Add(new Term("source", "info"));
- Hits hits = searcher.Search(phraseQuery);
- Assert.AreEqual(2, hits.Length());
+ ScoreDoc[] hits = searcher.Search(phraseQuery, null, 1000).scoreDocs;
+ Assert.AreEqual(2, hits.Length);
QueryUtils.Check(phraseQuery, searcher);
TermQuery termQuery = new TermQuery(new Term("contents", "foobar"));
- BooleanQuery booleanQuery = new BooleanQuery();
- booleanQuery.Add(termQuery, BooleanClause.Occur.MUST);
- booleanQuery.Add(phraseQuery, BooleanClause.Occur.MUST);
- hits = searcher.Search(booleanQuery);
- Assert.AreEqual(1, hits.Length());
+ BooleanQuery boolQuery = new BooleanQuery();
+ boolQuery.Add(termQuery, BooleanClause.Occur.MUST);
+ boolQuery.Add(phraseQuery, BooleanClause.Occur.MUST);
+ hits = searcher.Search(boolQuery, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length);
QueryUtils.Check(termQuery, searcher);
searcher.Close();
- writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
+ writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
doc = new Lucene.Net.Documents.Document();
- doc.Add(new Field("contents", "map entry woo", Field.Store.YES, Field.Index.TOKENIZED));
+ doc.Add(new Field("contents", "map entry woo", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
doc = new Lucene.Net.Documents.Document();
- doc.Add(new Field("contents", "woo map entry", Field.Store.YES, Field.Index.TOKENIZED));
+ doc.Add(new Field("contents", "woo map entry", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
doc = new Lucene.Net.Documents.Document();
- doc.Add(new Field("contents", "map foobarword entry woo", Field.Store.YES, Field.Index.TOKENIZED));
+ doc.Add(new Field("contents", "map foobarword entry woo", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
writer.Optimize();
@@ -330,24 +328,24 @@
phraseQuery.Add(new Term("contents", "map"));
phraseQuery.Add(new Term("contents", "entry"));
- hits = searcher.Search(termQuery);
- Assert.AreEqual(3, hits.Length());
- hits = searcher.Search(phraseQuery);
- Assert.AreEqual(2, hits.Length());
-
-
- booleanQuery = new BooleanQuery();
- booleanQuery.Add(termQuery, BooleanClause.Occur.MUST);
- booleanQuery.Add(phraseQuery, BooleanClause.Occur.MUST);
- hits = searcher.Search(booleanQuery);
- Assert.AreEqual(2, hits.Length());
-
- booleanQuery = new BooleanQuery();
- booleanQuery.Add(phraseQuery, BooleanClause.Occur.MUST);
- booleanQuery.Add(termQuery, BooleanClause.Occur.MUST);
- hits = searcher.Search(booleanQuery);
- Assert.AreEqual(2, hits.Length());
- QueryUtils.Check(booleanQuery, searcher);
+ hits = searcher.Search(termQuery, null, 1000).scoreDocs;
+ Assert.AreEqual(3, hits.Length);
+ hits = searcher.Search(phraseQuery, null, 1000).scoreDocs;
+ Assert.AreEqual(2, hits.Length);
+
+
+ boolQuery = new BooleanQuery();
+ boolQuery.Add(termQuery, BooleanClause.Occur.MUST);
+ boolQuery.Add(phraseQuery, BooleanClause.Occur.MUST);
+ hits = searcher.Search(boolQuery, null, 1000).scoreDocs;
+ Assert.AreEqual(2, hits.Length);
+
+ boolQuery = new BooleanQuery();
+ boolQuery.Add(phraseQuery, BooleanClause.Occur.MUST);
+ boolQuery.Add(termQuery, BooleanClause.Occur.MUST);
+ hits = searcher.Search(boolQuery, null, 1000).scoreDocs;
+ Assert.AreEqual(2, hits.Length);
+ QueryUtils.Check(boolQuery, searcher);
searcher.Close();
@@ -358,18 +356,18 @@
public virtual void TestSlopScoring()
{
Directory directory = new RAMDirectory();
- IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
+ IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
- doc.Add(new Field("field", "foo firstname lastname foo", Field.Store.YES, Field.Index.TOKENIZED));
+ doc.Add(new Field("field", "foo firstname lastname foo", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
Lucene.Net.Documents.Document doc2 = new Lucene.Net.Documents.Document();
- doc2.Add(new Field("field", "foo firstname xxx lastname foo", Field.Store.YES, Field.Index.TOKENIZED));
+ doc2.Add(new Field("field", "foo firstname xxx lastname foo", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc2);
Lucene.Net.Documents.Document doc3 = new Lucene.Net.Documents.Document();
- doc3.Add(new Field("field", "foo firstname xxx yyy lastname foo", Field.Store.YES, Field.Index.TOKENIZED));
+ doc3.Add(new Field("field", "foo firstname xxx yyy lastname foo", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc3);
writer.Optimize();
@@ -380,20 +378,33 @@
query.Add(new Term("field", "firstname"));
query.Add(new Term("field", "lastname"));
query.SetSlop(System.Int32.MaxValue);
- Hits hits = searcher.Search(query);
- Assert.AreEqual(3, hits.Length());
+ ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(3, hits.Length);
// Make sure that those matches where the terms appear closer to
// each other get a higher score:
- Assert.AreEqual(0.71, hits.Score(0), 0.01);
- Assert.AreEqual(0, hits.Id(0));
- Assert.AreEqual(0.44, hits.Score(1), 0.01);
- Assert.AreEqual(1, hits.Id(1));
- Assert.AreEqual(0.31, hits.Score(2), 0.01);
- Assert.AreEqual(2, hits.Id(2));
+ Assert.AreEqual(0.71, hits[0].score, 0.01);
+ Assert.AreEqual(0, hits[0].doc);
+ Assert.AreEqual(0.44, hits[1].score, 0.01);
+ Assert.AreEqual(1, hits[1].doc);
+ Assert.AreEqual(0.31, hits[2].score, 0.01);
+ Assert.AreEqual(2, hits[2].doc);
QueryUtils.Check(query, searcher);
}
-
- [Test]
+
+ [Test]
+ public void TestToString()
+ {
+ StopAnalyzer analyzer = new StopAnalyzer();
+ StopFilter.SetEnablePositionIncrementsDefault(true);
+ QueryParser qp = new QueryParser("field", analyzer);
+ qp.SetEnablePositionIncrements(true);
+ PhraseQuery q = (PhraseQuery)qp.Parse("\"this hi this is a test is\"");
+ Assert.AreEqual("field:\"? hi ? ? ? test\"", q.ToString());
+ q.Add(new Term("field", "hello"), 1);
+ Assert.AreEqual("field:\"? hi|hello ? ? ? test\"", q.ToString());
+ }
+
+ [Test]
public virtual void TestWrappedPhrase()
{
query.Add(new Term("repeated", "first"));
@@ -402,14 +413,14 @@
query.Add(new Term("repeated", "part"));
query.SetSlop(100);
- Hits hits = searcher.Search(query);
- Assert.AreEqual(1, hits.Length(), "slop of 100 just right");
+ ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length, "slop of 100 just right");
QueryUtils.Check(query, searcher);
query.SetSlop(99);
- hits = searcher.Search(query);
- Assert.AreEqual(0, hits.Length(), "slop of 99 not enough");
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(0, hits.Length, "slop of 99 not enough");
QueryUtils.Check(query, searcher);
}
@@ -423,8 +434,8 @@
query.Add(new Term("nonexist", "found"));
query.SetSlop(2); // would be found this way
- Hits hits = searcher.Search(query);
- Assert.AreEqual(2, hits.Length(), "phrase without repetitions exists in 2 docs");
+ ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(2, hits.Length, "phrase without repetitions exists in 2 docs");
QueryUtils.Check(query, searcher);
// phrase with repetitions that exists in 2 docs
@@ -434,8 +445,8 @@
query.Add(new Term("nonexist", "exist"));
query.SetSlop(1); // would be found
- hits = searcher.Search(query);
- Assert.AreEqual(2, hits.Length(), "phrase with repetitions exists in two docs");
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(2, hits.Length, "phrase with repetitions exists in two docs");
QueryUtils.Check(query, searcher);
// phrase I with repetitions that does not exist in any doc
@@ -445,8 +456,8 @@
query.Add(new Term("nonexist", "phrase"));
query.SetSlop(1000); // would not be found no matter how high the slop is
- hits = searcher.Search(query);
- Assert.AreEqual(0, hits.Length(), "nonexisting phrase with repetitions does not exist in any doc");
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(0, hits.Length, "nonexisting phrase with repetitions does not exist in any doc");
QueryUtils.Check(query, searcher);
// phrase II with repetitions that does not exist in any doc
@@ -457,8 +468,8 @@
query.Add(new Term("nonexist", "exist"));
query.SetSlop(1000); // would not be found no matter how high the slop is
- hits = searcher.Search(query);
- Assert.AreEqual(0, hits.Length(), "nonexisting phrase with repetitions does not exist in any doc");
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(0, hits.Length, "nonexisting phrase with repetitions does not exist in any doc");
QueryUtils.Check(query, searcher);
}
@@ -477,17 +488,17 @@
query.SetSlop(0); // to use exact phrase scorer
query.Add(new Term("field", "two"));
query.Add(new Term("field", "three"));
- Hits hits = searcher.Search(query);
- Assert.AreEqual(1, hits.Length(), "phrase found with exact phrase scorer");
- float score0 = hits.Score(0);
+ ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length, "phrase found with exact phrase scorer");
+ float score0 = hits[0].score;
//System.out.println("(exact) field: two three: "+score0);
QueryUtils.Check(query, searcher);
// search on non palyndrome, find phrase with slop 2, though no slop required here.
query.SetSlop(2); // to use sloppy scorer
- hits = searcher.Search(query);
- Assert.AreEqual(1, hits.Length(), "just sloppy enough");
- float score1 = hits.Score(0);
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length, "just sloppy enough");
+ float score1 = hits[0].score;
//System.out.println("(sloppy) field: two three: "+score1);
Assert.AreEqual(score0, score1, SCORE_COMP_THRESH, "exact scorer and sloppy scorer score the same when slop does not matter");
QueryUtils.Check(query, searcher);
@@ -497,9 +508,9 @@
query.SetSlop(2); // must be at least two for both ordered and reversed to match
query.Add(new Term("palindrome", "two"));
query.Add(new Term("palindrome", "three"));
- hits = searcher.Search(query);
- Assert.AreEqual(1, hits.Length(), "just sloppy enough");
- float score2 = hits.Score(0);
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length, "just sloppy enough");
+ float score2 = hits[0].score;
//System.out.println("palindrome: two three: "+score2);
QueryUtils.Check(query, searcher);
@@ -511,9 +522,9 @@
query.SetSlop(2); // must be at least two for both ordered and reversed to match
query.Add(new Term("palindrome", "three"));
query.Add(new Term("palindrome", "two"));
- hits = searcher.Search(query);
- Assert.AreEqual(1, hits.Length(), "just sloppy enough");
- float score3 = hits.Score(0);
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length, "just sloppy enough");
+ float score3 = hits[0].score;
//System.out.println("palindrome: three two: "+score3);
QueryUtils.Check(query, searcher);
@@ -539,17 +550,17 @@
query.Add(new Term("field", "one"));
query.Add(new Term("field", "two"));
query.Add(new Term("field", "three"));
- Hits hits = searcher.Search(query);
- Assert.AreEqual(1, hits.Length(), "phrase found with exact phrase scorer");
- float score0 = hits.Score(0);
+ ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length, "phrase found with exact phrase scorer");
+ float score0 = hits[0].score;
//System.out.println("(exact) field: one two three: "+score0);
QueryUtils.Check(query, searcher);
// search on non palyndrome, find phrase with slop 3, though no slop required here.
query.SetSlop(4); // to use sloppy scorer
- hits = searcher.Search(query);
- Assert.AreEqual(1, hits.Length(), "just sloppy enough");
- float score1 = hits.Score(0);
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length, "just sloppy enough");
+ float score1 = hits[0].score;
//System.out.println("(sloppy) field: one two three: "+score1);
Assert.AreEqual(score0, score1, SCORE_COMP_THRESH, "exact scorer and sloppy scorer score the same when slop does not matter");
QueryUtils.Check(query, searcher);
@@ -560,9 +571,9 @@
query.Add(new Term("palindrome", "one"));
query.Add(new Term("palindrome", "two"));
query.Add(new Term("palindrome", "three"));
- hits = searcher.Search(query);
- Assert.AreEqual(1, hits.Length(), "just sloppy enough");
- float score2 = hits.Score(0);
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length, "just sloppy enough");
+ float score2 = hits[0].score;
//System.out.println("palindrome: one two three: "+score2);
QueryUtils.Check(query, searcher);
@@ -575,9 +586,9 @@
query.Add(new Term("palindrome", "three"));
query.Add(new Term("palindrome", "two"));
query.Add(new Term("palindrome", "one"));
- hits = searcher.Search(query);
- Assert.AreEqual(1, hits.Length(), "just sloppy enough");
- float score3 = hits.Score(0);
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length, "just sloppy enough");
+ float score3 = hits[0].score;
//System.out.println("palindrome: three two one: "+score3);
QueryUtils.Check(query, searcher);
@@ -585,5 +596,15 @@
//assertTrue("reversed scores higher in palindrome",score1+SCORE_COMP_THRESH<score3);
//assertEquals("ordered or reversed does not matter",score2, score3, SCORE_COMP_THRESH);
}
+
+ // LUCENE-1280
+ [Test]
+ public void TestEmptyPhraseQuery()
+ {
+ PhraseQuery q1 = new PhraseQuery();
+ BooleanQuery q2 = new BooleanQuery();
+ q2.Add(new PhraseQuery(), BooleanClause.Occur.MUST);
+ q2.ToString();
+ }
}
}
\ No newline at end of file
Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestPositionIncrement.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestPositionIncrement.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestPositionIncrement.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestPositionIncrement.cs Wed Jul 29 18:04:12 2009
@@ -76,14 +76,15 @@
private int[] INCREMENTS = new int[]{1, 2, 1, 0, 1};
private int i = 0;
- public override Token Next()
+ public override Token Next(Token reusableToken)
{
+ System.Diagnostics.Debug.Assert(reusableToken != null);
if (i == TOKENS.Length)
return null;
- Token t = new Token(TOKENS[i], i, i);
- t.SetPositionIncrement(INCREMENTS[i]);
+ reusableToken.Reinit(TOKENS[i], i, i);
+ reusableToken.SetPositionIncrement(INCREMENTS[i]);
i++;
- return t;
+ return reusableToken;
}
}
private void InitBlock(TestPositionIncrement enclosingInstance)
@@ -137,94 +138,94 @@
{
Analyzer analyzer = new AnonymousClassAnalyzer(this);
RAMDirectory store = new RAMDirectory();
- IndexWriter writer = new IndexWriter(store, analyzer, true);
+ IndexWriter writer = new IndexWriter(store, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
Document d = new Document();
- d.Add(new Field("field", "bogus", Field.Store.YES, Field.Index.TOKENIZED));
+ d.Add(new Field("field", "bogus", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(d);
writer.Optimize();
writer.Close();
IndexSearcher searcher = new IndexSearcher(store);
PhraseQuery q;
- Hits hits;
+ ScoreDoc[] hits;
q = new PhraseQuery();
q.Add(new Term("field", "1"));
q.Add(new Term("field", "2"));
- hits = searcher.Search(q);
- Assert.AreEqual(0, hits.Length());
+ hits = searcher.Search(q, null, 1000).scoreDocs;
+ Assert.AreEqual(0, hits.Length);
// same as previous, just specify positions explicitely.
q = new PhraseQuery();
q.Add(new Term("field", "1"), 0);
q.Add(new Term("field", "2"), 1);
- hits = searcher.Search(q);
- Assert.AreEqual(0, hits.Length());
+ hits = searcher.Search(q, null, 1000).scoreDocs;
+ Assert.AreEqual(0, hits.Length);
// specifying correct positions should find the phrase.
q = new PhraseQuery();
q.Add(new Term("field", "1"), 0);
q.Add(new Term("field", "2"), 2);
- hits = searcher.Search(q);
- Assert.AreEqual(1, hits.Length());
+ hits = searcher.Search(q, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length);
q = new PhraseQuery();
q.Add(new Term("field", "2"));
q.Add(new Term("field", "3"));
- hits = searcher.Search(q);
- Assert.AreEqual(1, hits.Length());
+ hits = searcher.Search(q, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length);
q = new PhraseQuery();
q.Add(new Term("field", "3"));
q.Add(new Term("field", "4"));
- hits = searcher.Search(q);
- Assert.AreEqual(0, hits.Length());
+ hits = searcher.Search(q, null, 1000).scoreDocs;
+ Assert.AreEqual(0, hits.Length);
// phrase query would find it when correct positions are specified.
q = new PhraseQuery();
q.Add(new Term("field", "3"), 0);
q.Add(new Term("field", "4"), 0);
- hits = searcher.Search(q);
- Assert.AreEqual(1, hits.Length());
+ hits = searcher.Search(q, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length);
// phrase query should fail for non existing searched term
// even if there exist another searched terms in the same searched position.
q = new PhraseQuery();
q.Add(new Term("field", "3"), 0);
q.Add(new Term("field", "9"), 0);
- hits = searcher.Search(q);
- Assert.AreEqual(0, hits.Length());
+ hits = searcher.Search(q, null, 1000).scoreDocs;
+ Assert.AreEqual(0, hits.Length);
// multi-phrase query should succed for non existing searched term
// because there exist another searched terms in the same searched position.
MultiPhraseQuery mq = new MultiPhraseQuery();
mq.Add(new Term[]{new Term("field", "3"), new Term("field", "9")}, 0);
- hits = searcher.Search(mq);
- Assert.AreEqual(1, hits.Length());
+ hits = searcher.Search(mq, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length);
q = new PhraseQuery();
q.Add(new Term("field", "2"));
q.Add(new Term("field", "4"));
- hits = searcher.Search(q);
- Assert.AreEqual(1, hits.Length());
+ hits = searcher.Search(q, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length);
q = new PhraseQuery();
q.Add(new Term("field", "3"));
q.Add(new Term("field", "5"));
- hits = searcher.Search(q);
- Assert.AreEqual(1, hits.Length());
+ hits = searcher.Search(q, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length);
q = new PhraseQuery();
q.Add(new Term("field", "4"));
q.Add(new Term("field", "5"));
- hits = searcher.Search(q);
- Assert.AreEqual(1, hits.Length());
+ hits = searcher.Search(q, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length);
q = new PhraseQuery();
q.Add(new Term("field", "2"));
q.Add(new Term("field", "5"));
- hits = searcher.Search(q);
- Assert.AreEqual(0, hits.Length());
+ hits = searcher.Search(q, null, 1000).scoreDocs;
+ Assert.AreEqual(0, hits.Length);
// analyzer to introduce stopwords and increment gaps
Analyzer stpa = new AnonymousClassAnalyzer1(this);
@@ -232,35 +233,36 @@
// should not find "1 2" because there is a gap of 1 in the index
Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("field", stpa);
q = (PhraseQuery) qp.Parse("\"1 2\"");
- hits = searcher.Search(q);
- Assert.AreEqual(0, hits.Length());
+ hits = searcher.Search(q, null, 1000).scoreDocs;
+ Assert.AreEqual(0, hits.Length);
// omitted stop word cannot help because stop filter swallows the increments.
q = (PhraseQuery) qp.Parse("\"1 stop 2\"");
- hits = searcher.Search(q);
- Assert.AreEqual(0, hits.Length());
+ hits = searcher.Search(q, null, 1000).scoreDocs;
+ Assert.AreEqual(0, hits.Length);
// query parser alone won't help, because stop filter swallows the increments.
- qp.SetEnablePositionIncrements(true);
+ bool dflt = StopFilter.GetEnablePositionIncrementsDefault();
+ StopFilter.SetEnablePositionIncrementsDefault(false);
+ qp.SetEnablePositionIncrements(true);
q = (PhraseQuery) qp.Parse("\"1 stop 2\"");
- hits = searcher.Search(q);
- Assert.AreEqual(0, hits.Length());
+ hits = searcher.Search(q, null, 1000).scoreDocs;
+ Assert.AreEqual(0, hits.Length);
- bool dflt = StopFilter.GetEnablePositionIncrementsDefault();
try
{
// stop filter alone won't help, because query parser swallows the increments.
qp.SetEnablePositionIncrements(false);
StopFilter.SetEnablePositionIncrementsDefault(true);
q = (PhraseQuery) qp.Parse("\"1 stop 2\"");
- hits = searcher.Search(q);
- Assert.AreEqual(0, hits.Length());
+ hits = searcher.Search(q, null, 1000).scoreDocs;
+ Assert.AreEqual(0, hits.Length);
// when both qp qnd stopFilter propagate increments, we should find the doc.
qp.SetEnablePositionIncrements(true);
q = (PhraseQuery) qp.Parse("\"1 stop 2\"");
- hits = searcher.Search(q);
- Assert.AreEqual(1, hits.Length());
+ hits = searcher.Search(q, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length);
}
finally
{
@@ -276,13 +278,11 @@
{
Analyzer analyzer = new WhitespaceAnalyzer();
TokenStream ts = analyzer.TokenStream("field", new System.IO.StringReader("one two three four five"));
-
- while (true)
+
+ Token reusableToken = new Token();
+ for (Token nextToken = ts.Next(reusableToken); nextToken != null; nextToken = ts.Next(reusableToken))
{
- Token token = ts.Next();
- if (token == null)
- break;
- Assert.AreEqual(1, token.GetPositionIncrement(), token.TermText());
+ Assert.AreEqual(1, nextToken.GetPositionIncrement(), nextToken.TermText());
}
}
}
Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestPrefixFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestPrefixFilter.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestPrefixFilter.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestPrefixFilter.cs Wed Jul 29 18:04:12 2009
@@ -33,10 +33,6 @@
/// <summary> Tests {@link PrefixFilter} class.
///
/// </summary>
- /// <author> Yura Smolsky
- /// </author>
- /// <author> yonik
- /// </author>
[TestFixture]
public class TestPrefixFilter : LuceneTestCase
{
@@ -46,11 +42,11 @@
RAMDirectory directory = new RAMDirectory();
System.String[] categories = new System.String[]{"/Computers/Linux", "/Computers/Mac/One", "/Computers/Mac/Two", "/Computers/Windows"};
- IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
+ IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
for (int i = 0; i < categories.Length; i++)
{
Document doc = new Document();
- doc.Add(new Field("category", categories[i], Field.Store.YES, Field.Index.UN_TOKENIZED));
+ doc.Add(new Field("category", categories[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
writer.AddDocument(doc);
}
writer.Close();
@@ -59,56 +55,56 @@
PrefixFilter filter = new PrefixFilter(new Term("category", "/Computers"));
Query query = new ConstantScoreQuery(filter);
IndexSearcher searcher = new IndexSearcher(directory);
- Hits hits = searcher.Search(query);
- Assert.AreEqual(4, hits.Length());
+ ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(4, hits.Length);
// test middle of values
filter = new PrefixFilter(new Term("category", "/Computers/Mac"));
query = new ConstantScoreQuery(filter);
- hits = searcher.Search(query);
- Assert.AreEqual(2, hits.Length());
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(2, hits.Length);
// test start of values
filter = new PrefixFilter(new Term("category", "/Computers/Linux"));
query = new ConstantScoreQuery(filter);
- hits = searcher.Search(query);
- Assert.AreEqual(1, hits.Length());
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length);
// test end of values
filter = new PrefixFilter(new Term("category", "/Computers/Windows"));
query = new ConstantScoreQuery(filter);
- hits = searcher.Search(query);
- Assert.AreEqual(1, hits.Length());
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length);
// test non-existant
filter = new PrefixFilter(new Term("category", "/Computers/ObsoleteOS"));
query = new ConstantScoreQuery(filter);
- hits = searcher.Search(query);
- Assert.AreEqual(0, hits.Length());
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(0, hits.Length);
// test non-existant, before values
filter = new PrefixFilter(new Term("category", "/Computers/AAA"));
query = new ConstantScoreQuery(filter);
- hits = searcher.Search(query);
- Assert.AreEqual(0, hits.Length());
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(0, hits.Length);
// test non-existant, after values
filter = new PrefixFilter(new Term("category", "/Computers/ZZZ"));
query = new ConstantScoreQuery(filter);
- hits = searcher.Search(query);
- Assert.AreEqual(0, hits.Length());
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(0, hits.Length);
// test zero length prefix
filter = new PrefixFilter(new Term("category", ""));
query = new ConstantScoreQuery(filter);
- hits = searcher.Search(query);
- Assert.AreEqual(4, hits.Length());
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(4, hits.Length);
// test non existent field
filter = new PrefixFilter(new Term("nonexistantfield", "/Computers"));
query = new ConstantScoreQuery(filter);
- hits = searcher.Search(query);
- Assert.AreEqual(0, hits.Length());
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(0, hits.Length);
}
}
}
\ No newline at end of file
Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestPrefixQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestPrefixQuery.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestPrefixQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestPrefixQuery.cs Wed Jul 29 18:04:12 2009
@@ -44,23 +44,23 @@
RAMDirectory directory = new RAMDirectory();
System.String[] categories = new System.String[]{"/Computers", "/Computers/Mac", "/Computers/Windows"};
- IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
+ IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
for (int i = 0; i < categories.Length; i++)
{
Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
- doc.Add(new Field("category", categories[i], Field.Store.YES, Field.Index.UN_TOKENIZED));
+ doc.Add(new Field("category", categories[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
writer.AddDocument(doc);
}
writer.Close();
PrefixQuery query = new PrefixQuery(new Term("category", "/Computers"));
IndexSearcher searcher = new IndexSearcher(directory);
- Hits hits = searcher.Search(query);
- Assert.AreEqual(3, hits.Length(), "All documents in /Computers category and below");
+ ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(3, hits.Length, "All documents in /Computers category and below");
query = new PrefixQuery(new Term("category", "/Computers/Mac"));
- hits = searcher.Search(query);
- Assert.AreEqual(1, hits.Length(), "One in /Computers/Mac");
+ hits = searcher.Search(query, null, 1000).scoreDocs;
+ Assert.AreEqual(1, hits.Length, "One in /Computers/Mac");
}
}
}
\ No newline at end of file