You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by ar...@apache.org on 2006/06/04 04:41:25 UTC
svn commit: r411501 [26/30] - in /incubator/lucene.net/trunk/C#/src: ./
Demo/DeleteFiles/ Demo/DemoLib/ Demo/DemoLib/HTML/ Demo/IndexFiles/
Demo/IndexHtml/ Demo/SearchFiles/ Lucene.Net/ Lucene.Net/Analysis/
Lucene.Net/Analysis/Standard/ Lucene.Net/Docu...
Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestTermVectorsWriter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestTermVectorsWriter.cs?rev=411501&r1=411500&r2=411501&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestTermVectorsWriter.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestTermVectorsWriter.cs Sat Jun 3 19:41:13 2006
@@ -13,25 +13,35 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+
using System;
using NUnit.Framework;
+using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+
namespace Lucene.Net.Index
{
[TestFixture]
public class TestTermVectorsWriter
{
+ private void InitBlock()
+ {
+ positions = new int[testTerms.Length][];
+ }
+
private System.String[] testTerms = new System.String[]{"this", "is", "a", "test"};
private System.String[] testFields = new System.String[]{"f1", "f2", "f3"};
- private int[][] positions = new int[3][];
+ private int[][] positions;
private RAMDirectory dir = new RAMDirectory();
private System.String seg = "testSegment";
private FieldInfos fieldInfos = new FieldInfos();
- [TestFixtureSetUp]
- protected virtual void SetUp()
+
+ [TestFixtureSetUp]
+ public virtual void SetUp()
{
- positions = new int[testTerms.Length][];
for (int i = 0; i < testFields.Length; i++)
{
@@ -44,18 +54,18 @@
positions[i] = new int[5];
for (int j = 0; j < positions[i].Length; j++)
{
- positions[i][j] = i * 100;
+ positions[i][j] = j * 10;
}
}
}
- [TestFixtureTearDown]
- protected virtual void TearDown()
+ [TestFixtureTearDown]
+ public virtual void TearDown()
{
}
- [Test]
- public virtual void Test()
+ [Test]
+ public virtual void Test()
{
Assert.IsTrue(dir != null);
Assert.IsTrue(positions != null);
@@ -82,39 +92,32 @@
//Now read it back in
TermVectorsReader reader = new TermVectorsReader(dir, seg);
Assert.IsTrue(reader != null);
- CheckTermVector(reader, 0, 0);
+ checkTermVector(reader, 0, 0);
} catch (IOException e) {
e.printStackTrace();
Assert.IsTrue(false);
}
} */
- [Test]
- public virtual void TestWriter()
+ [Test]
+ public virtual void TestWriter()
{
- try
- {
- TermVectorsWriter writer = new TermVectorsWriter(dir, seg, fieldInfos);
- writer.OpenDocument();
- Assert.IsTrue(writer.IsDocumentOpen() == true);
- WriteField(writer, testFields[0]);
- writer.CloseDocument();
- writer.Close();
- Assert.IsTrue(writer.IsDocumentOpen() == false);
- //Check to see the files were created
- Assert.IsTrue(dir.FileExists(seg + TermVectorsWriter.TVD_EXTENSION));
- Assert.IsTrue(dir.FileExists(seg + TermVectorsWriter.TVX_EXTENSION));
- //Now read it back in
- TermVectorsReader reader = new TermVectorsReader(dir, seg, fieldInfos);
- Assert.IsTrue(reader != null);
- CheckTermVector(reader, 0, testFields[0]);
- }
- catch (System.IO.IOException e)
- {
- System.Console.Error.WriteLine(e.StackTrace);
- Assert.IsTrue(false);
- }
+ TermVectorsWriter writer = new TermVectorsWriter(dir, seg, fieldInfos);
+ writer.OpenDocument();
+ Assert.IsTrue(writer.IsDocumentOpen() == true);
+ WriteField(writer, testFields[0]);
+ writer.CloseDocument();
+ writer.Close();
+ Assert.IsTrue(writer.IsDocumentOpen() == false);
+ //Check to see the files were created
+ Assert.IsTrue(dir.FileExists(seg + TermVectorsWriter.TvdExtension));
+ Assert.IsTrue(dir.FileExists(seg + TermVectorsWriter.TvxExtension));
+ //Now read it back in
+ TermVectorsReader reader = new TermVectorsReader(dir, seg, fieldInfos);
+ Assert.IsTrue(reader != null);
+ CheckTermVector(reader, 0, testFields[0]);
}
+
private void CheckTermVector(TermVectorsReader reader, int docNum, System.String field)
{
TermFreqVector vector = reader.Get(docNum, field);
@@ -130,33 +133,26 @@
}
/// <summary> Test one document, multiple fields</summary>
+ /// <throws> IOException </throws>
[Test]
- public virtual void TestMultipleFields()
+ public virtual void TestMultipleFields()
{
- try
- {
- TermVectorsWriter writer = new TermVectorsWriter(dir, seg, fieldInfos);
- WriteDocument(writer, testFields.Length);
-
- writer.Close();
-
- Assert.IsTrue(writer.IsDocumentOpen() == false);
- //Check to see the files were created
- Assert.IsTrue(dir.FileExists(seg + TermVectorsWriter.TVD_EXTENSION));
- Assert.IsTrue(dir.FileExists(seg + TermVectorsWriter.TVX_EXTENSION));
- //Now read it back in
- TermVectorsReader reader = new TermVectorsReader(dir, seg, fieldInfos);
- Assert.IsTrue(reader != null);
-
- for (int j = 0; j < testFields.Length; j++)
- {
- CheckTermVector(reader, 0, testFields[j]);
- }
- }
- catch (System.IO.IOException e)
+ TermVectorsWriter writer = new TermVectorsWriter(dir, seg, fieldInfos);
+ WriteDocument(writer, testFields.Length);
+
+ writer.Close();
+
+ Assert.IsTrue(writer.IsDocumentOpen() == false);
+ //Check to see the files were created
+ Assert.IsTrue(dir.FileExists(seg + TermVectorsWriter.TvdExtension));
+ Assert.IsTrue(dir.FileExists(seg + TermVectorsWriter.TvxExtension));
+ //Now read it back in
+ TermVectorsReader reader = new TermVectorsReader(dir, seg, fieldInfos);
+ Assert.IsTrue(reader != null);
+
+ for (int j = 0; j < testFields.Length; j++)
{
- System.Console.Error.WriteLine(e.StackTrace);
- Assert.IsTrue(false);
+ CheckTermVector(reader, 0, testFields[j]);
}
}
@@ -176,7 +172,7 @@
/// <summary> </summary>
/// <param name="writer">The writer to write to
/// </param>
- /// <param name="j">The Field number
+ /// <param name="f">The field name
/// </param>
/// <throws> IOException </throws>
private void WriteField(TermVectorsWriter writer, System.String f)
@@ -193,38 +189,41 @@
[Test]
public virtual void TestMultipleDocuments()
{
-
- try
+ TermVectorsWriter writer = new TermVectorsWriter(dir, seg, fieldInfos);
+ Assert.IsTrue(writer != null);
+ for (int i = 0; i < 10; i++)
{
- TermVectorsWriter writer = new TermVectorsWriter(dir, seg, fieldInfos);
- Assert.IsTrue(writer != null);
- for (int i = 0; i < 10; i++)
- {
- WriteDocument(writer, testFields.Length);
- }
- writer.Close();
- }
- catch (System.IO.IOException e)
- {
- System.Console.Error.WriteLine(e.StackTrace);
- Assert.IsTrue(false);
+ WriteDocument(writer, testFields.Length);
}
+ writer.Close();
//Do some arbitrary tests
- try
+ TermVectorsReader reader = new TermVectorsReader(dir, seg, fieldInfos);
+ for (int i = 0; i < 10; i++)
{
- TermVectorsReader reader = new TermVectorsReader(dir, seg, fieldInfos);
- for (int i = 0; i < 10; i++)
- {
- Assert.IsTrue(reader != null);
- CheckTermVector(reader, 5, testFields[0]);
- CheckTermVector(reader, 2, testFields[2]);
- }
- }
- catch (System.IO.IOException e)
- {
- System.Console.Error.WriteLine(e.StackTrace);
- Assert.IsTrue(false);
+ Assert.IsTrue(reader != null);
+ CheckTermVector(reader, 5, testFields[0]);
+ CheckTermVector(reader, 2, testFields[2]);
}
+ }
+
+ /// <summary> Test that no NullPointerException will be raised,
+ /// when adding one document with a single, empty field
+ /// and term vectors enabled.
+ /// </summary>
+ /// <throws> IOException </throws>
+ /// <summary>
+ /// </summary>
+ [Test]
+ public virtual void TestBadSegment()
+ {
+ dir = new RAMDirectory();
+ IndexWriter ir = new IndexWriter(dir, new StandardAnalyzer(), true);
+
+ Lucene.Net.Documents.Document document = new Lucene.Net.Documents.Document();
+ document.Add(new Field("tvtest", "", Field.Store.NO, Field.Index.TOKENIZED, Field.TermVector.YES));
+ ir.AddDocument(document);
+ ir.Close();
+ dir.Close();
}
}
}
Added: incubator/lucene.net/trunk/C#/src/Test/Index/TestWordlistLoader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestWordlistLoader.cs?rev=411501&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestWordlistLoader.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestWordlistLoader.cs Sat Jun 3 19:41:13 2006
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2005 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using NUnit.Framework;
+using WordlistLoader = Lucene.Net.Analysis.WordlistLoader;
+
+namespace Lucene.Net.Index
+{
+ [TestFixture]
+ public class TestWordlistLoader
+ {
+ [Test]
+ public virtual void TestWordlistLoading()
+ {
+ System.String s = "ONE\n two \nthree";
+ System.Collections.Hashtable wordSet1 = WordlistLoader.GetWordSet(new System.IO.StringReader(s));
+ CheckSet(wordSet1);
+ //UPGRADE_ISSUE: Constructor 'java.io.BufferedReader.BufferedReader' was not converted. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1000_javaioBufferedReaderBufferedReader_javaioReader'"
+ System.Collections.Hashtable wordSet2 = WordlistLoader.GetWordSet(new System.IO.StringReader(s));
+ CheckSet(wordSet2);
+ }
+
+ private void CheckSet(System.Collections.Hashtable wordset)
+ {
+ Assert.AreEqual(3, wordset.Count);
+ Assert.IsTrue(wordset.Contains("ONE")); // case is not modified
+ Assert.IsTrue(wordset.Contains("two")); // surrounding whitespace is removed
+ Assert.IsTrue(wordset.Contains("three"));
+ Assert.IsFalse(wordset.Contains("four"));
+ }
+ }
+}
\ No newline at end of file
Modified: incubator/lucene.net/trunk/C#/src/Test/IndexTest.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/IndexTest.cs?rev=411501&r1=411500&r2=411501&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/IndexTest.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/IndexTest.cs Sat Jun 3 19:41:13 2006
@@ -13,10 +13,12 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+
using System;
using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
using FileDocument = Lucene.Net.Demo.FileDocument;
using IndexWriter = Lucene.Net.Index.IndexWriter;
+
namespace Lucene.Net
{
@@ -28,13 +30,14 @@
try
{
System.DateTime start = System.DateTime.Now;
- // FIXME: OG: what's with this hard-coded dirs??
- IndexWriter writer = new IndexWriter("F:\\test", new SimpleAnalyzer(), true);
+ System.String tempFileName = System.IO.Path.GetTempFileName();
+ System.String tempDirectory = System.IO.Path.GetDirectoryName(tempFileName);
+ tempFileName = System.IO.Path.GetFileName(tempFileName);
+ IndexWriter writer = new IndexWriter(System.IO.Path.Combine(tempDirectory, "luceneTest") + tempFileName + ".idx", new SimpleAnalyzer(), true);
+
+ writer.SetMergeFactor(20);
- writer.mergeFactor = 20;
-
- // FIXME: OG: what's with this hard-coded dirs??
- IndexDocs(writer, new System.IO.FileInfo("F:\\recipes"));
+ IndexDocs(writer, new System.IO.FileInfo("/tmp"));
writer.Optimize();
writer.Close();
@@ -46,17 +49,17 @@
System.Diagnostics.Process runtime = System.Diagnostics.Process.GetCurrentProcess();
- // System.Console.Out.Write(runtime.freeMemory()); // {{Aroush}} -- need to report how much free memory we have
+ // System.Console.Out.Write(java.lang.Runtime.freeMemory()); // {{Aroush}} how do we get freeMemory() in .NET?
System.Console.Out.WriteLine(" free memory before gc");
- System.Console.Out.Write(System.GC.GetTotalMemory(true));
+ // System.Console.Out.Write(java.lang.Runtime.totalMemory()); // {{Aroush}} how do we get totalMemory() in .NET?
System.Console.Out.WriteLine(" total memory before gc");
System.GC.Collect();
- // System.Console.Out.Write(runtime.freeMemory()); // {{Aroush}} -- need to report how much free memory we have
- System.Console.Out.WriteLine(" free memory after gc");
- System.Console.Out.Write(System.GC.GetTotalMemory(true));
- System.Console.Out.WriteLine(" total memory after gc");
+ // System.Console.Out.Write(java.lang.Runtime.freeMemory()); // {{Aroush}} how do we get freeMemory() in .NET?
+ System.Console.Out.WriteLine(" free memory after gc");
+ // System.Console.Out.Write(java.lang.Runtime.totalMemory()); // {{Aroush}} how do we get totalMemory() in .NET?
+ System.Console.Out.WriteLine(" total memory after gc");
}
catch (System.Exception e)
{
Added: incubator/lucene.net/trunk/C#/src/Test/QueryParser/TestMultiAnalyzer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/QueryParser/TestMultiAnalyzer.cs?rev=411501&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/QueryParser/TestMultiAnalyzer.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/QueryParser/TestMultiAnalyzer.cs Sat Jun 3 19:41:13 2006
@@ -0,0 +1,268 @@
+/*
+ * Copyright 2004 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using NUnit.Framework;
+using Analyzer = Lucene.Net.Analysis.Analyzer;
+using LowerCaseFilter = Lucene.Net.Analysis.LowerCaseFilter;
+using Token = Lucene.Net.Analysis.Token;
+using TokenFilter = Lucene.Net.Analysis.TokenFilter;
+using TokenStream = Lucene.Net.Analysis.TokenStream;
+using StandardTokenizer = Lucene.Net.Analysis.Standard.StandardTokenizer;
+using ParseException = Lucene.Net.QueryParsers.ParseException;
+using QueryParser = Lucene.Net.QueryParsers.QueryParser;
+
+namespace Lucene.Net.QueryParser
+{
+
+ /// <summary> Test QueryParser's ability to deal with Analyzers that return more
+ /// than one token per position or that return tokens with a position
+ /// increment > 1.
+ ///
+ /// </summary>
+ /// <author> Daniel Naber
+ /// </author>
+ [TestFixture]
+ public class TestMultiAnalyzer
+ {
+
+ private static int multiToken = 0;
+
+ [Test]
+ public virtual void TestMultiAnalyzer_Renamed_Method()
+ {
+
+ Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("", new MultiAnalyzer(this));
+
+ // trivial, no multiple tokens:
+ Assert.AreEqual("foo", qp.Parse("foo").ToString());
+ Assert.AreEqual("foo", qp.Parse("\"foo\"").ToString());
+ Assert.AreEqual("foo foobar", qp.Parse("foo foobar").ToString());
+ Assert.AreEqual("\"foo foobar\"", qp.Parse("\"foo foobar\"").ToString());
+ Assert.AreEqual("\"foo foobar blah\"", qp.Parse("\"foo foobar blah\"").ToString());
+
+ // two tokens at the same position:
+ Assert.AreEqual("(multi multi2) foo", qp.Parse("multi foo").ToString());
+ Assert.AreEqual("foo (multi multi2)", qp.Parse("foo multi").ToString());
+ Assert.AreEqual("(multi multi2) (multi multi2)", qp.Parse("multi multi").ToString());
+ Assert.AreEqual("+(foo (multi multi2)) +(bar (multi multi2))", qp.Parse("+(foo multi) +(bar multi)").ToString());
+ Assert.AreEqual("+(foo (multi multi2)) field:\"bar (multi multi2)\"", qp.Parse("+(foo multi) field:\"bar multi\"").ToString());
+
+ // phrases:
+ Assert.AreEqual("\"(multi multi2) foo\"", qp.Parse("\"multi foo\"").ToString());
+ Assert.AreEqual("\"foo (multi multi2)\"", qp.Parse("\"foo multi\"").ToString());
+ Assert.AreEqual("\"foo (multi multi2) foobar (multi multi2)\"", qp.Parse("\"foo multi foobar multi\"").ToString());
+
+ // fields:
+ Assert.AreEqual("(field:multi field:multi2) field:foo", qp.Parse("field:multi field:foo").ToString());
+ Assert.AreEqual("field:\"(multi multi2) foo\"", qp.Parse("field:\"multi foo\"").ToString());
+
+ // three tokens at one position:
+ Assert.AreEqual("triplemulti multi3 multi2", qp.Parse("triplemulti").ToString());
+ Assert.AreEqual("foo (triplemulti multi3 multi2) foobar", qp.Parse("foo triplemulti foobar").ToString());
+
+ // phrase with non-default slop:
+ Assert.AreEqual("\"(multi multi2) foo\"~10", qp.Parse("\"multi foo\"~10").ToString());
+
+ // phrase with non-default boost:
+ Assert.AreEqual("\"(multi multi2) foo\"^2.0", qp.Parse("\"multi foo\"^2").ToString());
+
+ // non-default operator:
+ qp.SetDefaultOperator(Lucene.Net.QueryParsers.QueryParser.AND_OPERATOR);
+ Assert.AreEqual("+(multi multi2) +foo", qp.Parse("multi foo").ToString());
+ }
+
+ [Test]
+ public virtual void TestPosIncrementAnalyzer()
+ {
+ Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("", new PosIncrementAnalyzer(this));
+ Assert.AreEqual("quick brown", qp.Parse("the quick brown").ToString());
+ Assert.AreEqual("\"quick brown\"", qp.Parse("\"the quick brown\"").ToString());
+ Assert.AreEqual("quick brown fox", qp.Parse("the quick brown fox").ToString());
+ Assert.AreEqual("\"quick brown fox\"", qp.Parse("\"the quick brown fox\"").ToString());
+ }
+
+ /// <summary> Expands "multi" to "multi" and "multi2", both at the same position,
+ /// and expands "triplemulti" to "triplemulti", "multi3", and "multi2".
+ /// </summary>
+ private class MultiAnalyzer : Analyzer
+ {
+ private void InitBlock(TestMultiAnalyzer enclosingInstance)
+ {
+ this.enclosingInstance = enclosingInstance;
+ }
+ private TestMultiAnalyzer enclosingInstance;
+
+ public TestMultiAnalyzer Enclosing_Instance
+ {
+ get
+ {
+ return enclosingInstance;
+ }
+
+ }
+
+ public MultiAnalyzer(TestMultiAnalyzer enclosingInstance)
+ {
+ InitBlock(enclosingInstance);
+ }
+
+ public override TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
+ {
+ TokenStream result = new StandardTokenizer(reader);
+ result = new TestFilter(enclosingInstance, result);
+ result = new LowerCaseFilter(result);
+ return result;
+ }
+ }
+
+ private sealed class TestFilter : TokenFilter
+ {
+ private void InitBlock(TestMultiAnalyzer enclosingInstance)
+ {
+ this.enclosingInstance = enclosingInstance;
+ }
+ private TestMultiAnalyzer enclosingInstance;
+ public TestMultiAnalyzer Enclosing_Instance
+ {
+ get
+ {
+ return enclosingInstance;
+ }
+
+ }
+
+ private Lucene.Net.Analysis.Token prevToken;
+
+ public TestFilter(TestMultiAnalyzer enclosingInstance, TokenStream in_Renamed) : base(in_Renamed)
+ {
+ InitBlock(enclosingInstance);
+ }
+
+ public override Lucene.Net.Analysis.Token Next()
+ {
+ if (Lucene.Net.QueryParser.TestMultiAnalyzer.multiToken > 0)
+ {
+ Lucene.Net.Analysis.Token token = new Lucene.Net.Analysis.Token("multi" + (Lucene.Net.QueryParser.TestMultiAnalyzer.multiToken + 1), prevToken.StartOffset(), prevToken.EndOffset(), prevToken.Type());
+ token.SetPositionIncrement(0);
+ Lucene.Net.QueryParser.TestMultiAnalyzer.multiToken--;
+ return token;
+ }
+ else
+ {
+ Lucene.Net.Analysis.Token t = input.Next();
+ prevToken = t;
+ if (t == null)
+ return null;
+ System.String text = t.TermText();
+ if (text.Equals("triplemulti"))
+ {
+ Lucene.Net.QueryParser.TestMultiAnalyzer.multiToken = 2;
+ return t;
+ }
+ else if (text.Equals("multi"))
+ {
+ Lucene.Net.QueryParser.TestMultiAnalyzer.multiToken = 1;
+ return t;
+ }
+ else
+ {
+ return t;
+ }
+ }
+ }
+ }
+
+ /// <summary> Analyzes "the quick brown" as: quick(incr=2) brown(incr=1).
+ /// Does not work correctly for input other than "the quick brown ...".
+ /// </summary>
+ private class PosIncrementAnalyzer : Analyzer
+ {
+ private void InitBlock(TestMultiAnalyzer enclosingInstance)
+ {
+ this.enclosingInstance = enclosingInstance;
+ }
+ private TestMultiAnalyzer enclosingInstance;
+ public TestMultiAnalyzer Enclosing_Instance
+ {
+ get
+ {
+ return enclosingInstance;
+ }
+
+ }
+
+ public PosIncrementAnalyzer(TestMultiAnalyzer enclosingInstance)
+ {
+ InitBlock(enclosingInstance);
+ }
+
+ public override TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
+ {
+ TokenStream result = new StandardTokenizer(reader);
+ result = new TestPosIncrementFilter(enclosingInstance, result);
+ result = new LowerCaseFilter(result);
+ return result;
+ }
+ }
+
+ private sealed class TestPosIncrementFilter : TokenFilter
+ {
+ private void InitBlock(TestMultiAnalyzer enclosingInstance)
+ {
+ this.enclosingInstance = enclosingInstance;
+ }
+ private TestMultiAnalyzer enclosingInstance;
+ public TestMultiAnalyzer Enclosing_Instance
+ {
+ get
+ {
+ return enclosingInstance;
+ }
+
+ }
+
+ public TestPosIncrementFilter(TestMultiAnalyzer enclosingInstance, TokenStream in_Renamed) : base(in_Renamed)
+ {
+ InitBlock(enclosingInstance);
+ }
+
+ public override Lucene.Net.Analysis.Token Next()
+ {
+ for (Token t = input.Next(); t != null; t = input.Next())
+ {
+ if (t.TermText().Equals("the"))
+ {
+ // stopword, do nothing
+ }
+ else if (t.TermText().Equals("quick"))
+ {
+ Lucene.Net.Analysis.Token token = new Lucene.Net.Analysis.Token(t.TermText(), t.StartOffset(), t.EndOffset(), t.Type());
+ token.SetPositionIncrement(2);
+ return token;
+ }
+ else
+ {
+ Lucene.Net.Analysis.Token token = new Lucene.Net.Analysis.Token(t.TermText(), t.StartOffset(), t.EndOffset(), t.Type());
+ token.SetPositionIncrement(1);
+ return token;
+ }
+ }
+ return null;
+ }
+ }
+ }
+}
\ No newline at end of file
Added: incubator/lucene.net/trunk/C#/src/Test/QueryParser/TestMultiFieldQueryParser.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/QueryParser/TestMultiFieldQueryParser.cs?rev=411501&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/QueryParser/TestMultiFieldQueryParser.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/QueryParser/TestMultiFieldQueryParser.cs Sat Jun 3 19:41:13 2006
@@ -0,0 +1,309 @@
+/*
+ * Copyright 2004 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using NUnit.Framework;
+using Analyzer = Lucene.Net.Analysis.Analyzer;
+using Token = Lucene.Net.Analysis.Token;
+using TokenStream = Lucene.Net.Analysis.TokenStream;
+using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using MultiFieldQueryParser = Lucene.Net.QueryParsers.MultiFieldQueryParser;
+using ParseException = Lucene.Net.QueryParsers.ParseException;
+using QueryParser = Lucene.Net.QueryParsers.QueryParser;
+using BooleanClause = Lucene.Net.Search.BooleanClause;
+using Hits = Lucene.Net.Search.Hits;
+using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+using Query = Lucene.Net.Search.Query;
+using Directory = Lucene.Net.Store.Directory;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+
+namespace Lucene.Net.QueryParser
+{
+
+ /// <summary> Tests QueryParser.</summary>
+ /// <author> Daniel Naber
+ /// </author>
+ [TestFixture]
+ public class TestMultiFieldQueryParser
+ {
+ [Test]
+ public virtual void TestSimple()
+ {
+ System.String[] fields = new System.String[]{"b", "t"};
+ MultiFieldQueryParser mfqp = new MultiFieldQueryParser(fields, new StandardAnalyzer());
+
+ Query q = mfqp.Parse("one");
+ Assert.AreEqual("b:one t:one", q.ToString());
+
+ q = mfqp.Parse("one two");
+ Assert.AreEqual("(b:one t:one) (b:two t:two)", q.ToString());
+
+ q = mfqp.Parse("+one +two");
+ Assert.AreEqual("+(b:one t:one) +(b:two t:two)", q.ToString());
+
+ q = mfqp.Parse("+one -two -three)");
+ Assert.AreEqual("+(b:one t:one) -(b:two t:two) -(b:three t:three)", q.ToString());
+
+ q = mfqp.Parse("one^2 two");
+ Assert.AreEqual("((b:one t:one)^2.0) (b:two t:two)", q.ToString());
+
+ q = mfqp.Parse("one~ two");
+ Assert.AreEqual("(b:one~0.5 t:one~0.5) (b:two t:two)", q.ToString());
+
+ q = mfqp.Parse("one~0.8 two^2");
+ Assert.AreEqual("(b:one~0.8 t:one~0.8) ((b:two t:two)^2.0)", q.ToString());
+
+ q = mfqp.Parse("one* two*");
+ Assert.AreEqual("(b:one* t:one*) (b:two* t:two*)", q.ToString());
+
+ q = mfqp.Parse("[a TO c] two");
+ Assert.AreEqual("(b:[a TO c] t:[a TO c]) (b:two t:two)", q.ToString());
+
+ q = mfqp.Parse("w?ldcard");
+ Assert.AreEqual("b:w?ldcard t:w?ldcard", q.ToString());
+
+ q = mfqp.Parse("\"foo bar\"");
+ Assert.AreEqual("b:\"foo bar\" t:\"foo bar\"", q.ToString());
+
+ q = mfqp.Parse("\"aa bb cc\" \"dd ee\"");
+ Assert.AreEqual("(b:\"aa bb cc\" t:\"aa bb cc\") (b:\"dd ee\" t:\"dd ee\")", q.ToString());
+
+ q = mfqp.Parse("\"foo bar\"~4");
+ Assert.AreEqual("b:\"foo bar\"~4 t:\"foo bar\"~4", q.ToString());
+
+ // make sure that terms which have a field are not touched:
+ q = mfqp.Parse("one f:two");
+ Assert.AreEqual("(b:one t:one) f:two", q.ToString());
+
+ // AND mode:
+ mfqp.SetDefaultOperator(Lucene.Net.QueryParsers.QueryParser.AND_OPERATOR);
+ q = mfqp.Parse("one two");
+ Assert.AreEqual("+(b:one t:one) +(b:two t:two)", q.ToString());
+ q = mfqp.Parse("\"aa bb cc\" \"dd ee\"");
+ Assert.AreEqual("+(b:\"aa bb cc\" t:\"aa bb cc\") +(b:\"dd ee\" t:\"dd ee\")", q.ToString());
+ }
+
+ // TODO: remove this for Lucene 2.0
+ [Test]
+ public virtual void TestOldMethods()
+ {
+ // testing the old static calls that are now deprecated:
+ AssertQueryEquals("b:one t:one", "one");
+ AssertQueryEquals("(b:one b:two) (t:one t:two)", "one two");
+ AssertQueryEquals("(b:one -b:two) (t:one -t:two)", "one -two");
+ AssertQueryEquals("(b:one -(b:two b:three)) (t:one -(t:two t:three))", "one -(two three)");
+ AssertQueryEquals("(+b:one +b:two) (+t:one +t:two)", "+one +two");
+ }
+
+ // TODO: remove this for Lucene 2.0
+ private void AssertQueryEquals(System.String expected, System.String query)
+ {
+ System.String[] fields = new System.String[]{"b", "t"};
+ Query q = MultiFieldQueryParser.Parse(query, fields, new StandardAnalyzer());
+ System.String s = q.ToString();
+ Assert.AreEqual(expected, s);
+ }
+
+ [Test]
+ public virtual void TestStaticMethod1()
+ {
+ System.String[] fields = new System.String[]{"b", "t"};
+ System.String[] queries = new System.String[]{"one", "two"};
+ Query q = MultiFieldQueryParser.Parse(queries, fields, new StandardAnalyzer());
+ Assert.AreEqual("b:one t:two", q.ToString());
+
+ System.String[] queries2 = new System.String[]{"+one", "+two"};
+ q = MultiFieldQueryParser.Parse(queries2, fields, new StandardAnalyzer());
+ Assert.AreEqual("(+b:one) (+t:two)", q.ToString());
+
+ System.String[] queries3 = new System.String[]{"one", "+two"};
+ q = MultiFieldQueryParser.Parse(queries3, fields, new StandardAnalyzer());
+ Assert.AreEqual("b:one (+t:two)", q.ToString());
+
+ System.String[] queries4 = new System.String[]{"one +more", "+two"};
+ q = MultiFieldQueryParser.Parse(queries4, fields, new StandardAnalyzer());
+ Assert.AreEqual("(b:one +b:more) (+t:two)", q.ToString());
+
+ System.String[] queries5 = new System.String[]{"blah"};
+ try
+ {
+ q = MultiFieldQueryParser.Parse(queries5, fields, new StandardAnalyzer());
+ Assert.Fail();
+ }
+ catch (System.ArgumentException e)
+ {
+ // expected exception, array length differs
+ }
+ }
+
+ [Test]
+ public virtual void TestStaticMethod2()
+ {
+ System.String[] fields = new System.String[]{"b", "t"};
+ BooleanClause.Occur[] flags = new BooleanClause.Occur[]{BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT};
+ Query q = MultiFieldQueryParser.Parse("one", fields, flags, new StandardAnalyzer());
+ Assert.AreEqual("+b:one -t:one", q.ToString());
+
+ q = MultiFieldQueryParser.Parse("one two", fields, flags, new StandardAnalyzer());
+ Assert.AreEqual("+(b:one b:two) -(t:one t:two)", q.ToString());
+
+ try
+ {
+ BooleanClause.Occur[] flags2 = new BooleanClause.Occur[]{BooleanClause.Occur.MUST};
+ q = MultiFieldQueryParser.Parse("blah", fields, flags2, new StandardAnalyzer());
+ Assert.Fail();
+ }
+ catch (System.ArgumentException e)
+ {
+ // expected exception, array length differs
+ }
+ }
+
+ [Test]
+ public virtual void TestStaticMethod2Old()
+ {
+ System.String[] fields = new System.String[]{"b", "t"};
+ int[] flags = new int[]{MultiFieldQueryParser.REQUIRED_FIELD, MultiFieldQueryParser.PROHIBITED_FIELD};
+ Query q = MultiFieldQueryParser.Parse("one", fields, flags, new StandardAnalyzer());
+ Assert.AreEqual("+b:one -t:one", q.ToString());
+
+ q = MultiFieldQueryParser.Parse("one two", fields, flags, new StandardAnalyzer());
+ Assert.AreEqual("+(b:one b:two) -(t:one t:two)", q.ToString());
+
+ try
+ {
+ int[] flags2 = new int[]{MultiFieldQueryParser.REQUIRED_FIELD};
+ q = MultiFieldQueryParser.Parse("blah", fields, flags2, new StandardAnalyzer());
+ Assert.Fail();
+ }
+ catch (System.ArgumentException e)
+ {
+ // expected exception, array length differs
+ }
+ }
+
+ [Test]
+ public virtual void TestStaticMethod3()
+ {
+ System.String[] queries = new System.String[]{"one", "two", "three"};
+ System.String[] fields = new System.String[]{"f1", "f2", "f3"};
+ BooleanClause.Occur[] flags = new BooleanClause.Occur[]{BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT, BooleanClause.Occur.SHOULD};
+ Query q = MultiFieldQueryParser.Parse(queries, fields, flags, new StandardAnalyzer());
+ Assert.AreEqual("+f1:one -f2:two f3:three", q.ToString());
+
+ try
+ {
+ BooleanClause.Occur[] flags2 = new BooleanClause.Occur[]{BooleanClause.Occur.MUST};
+ q = MultiFieldQueryParser.Parse(queries, fields, flags2, new StandardAnalyzer());
+ Assert.Fail();
+ }
+ catch (System.ArgumentException e)
+ {
+ // expected exception, array length differs
+ }
+ }
+
+ [Test]
+ public virtual void TestStaticMethod3Old()
+ {
+ System.String[] queries = new System.String[]{"one", "two"};
+ System.String[] fields = new System.String[]{"b", "t"};
+ int[] flags = new int[]{MultiFieldQueryParser.REQUIRED_FIELD, MultiFieldQueryParser.PROHIBITED_FIELD};
+ Query q = MultiFieldQueryParser.Parse(queries, fields, flags, new StandardAnalyzer());
+ Assert.AreEqual("+b:one -t:two", q.ToString());
+
+ try
+ {
+ int[] flags2 = new int[]{MultiFieldQueryParser.REQUIRED_FIELD};
+ q = MultiFieldQueryParser.Parse(queries, fields, flags2, new StandardAnalyzer());
+ Assert.Fail();
+ }
+ catch (System.ArgumentException e)
+ {
+ // expected exception, array length differs
+ }
+ }
+
+ [Test]
+ public virtual void TestAnalyzerReturningNull()
+ {
+ System.String[] fields = new System.String[]{"f1", "f2", "f3"};
+ MultiFieldQueryParser parser = new MultiFieldQueryParser(fields, new AnalyzerReturningNull());
+ Query q = parser.Parse("bla AND blo");
+ Assert.AreEqual("+(f2:bla f3:bla) +(f2:blo f3:blo)", q.ToString());
+ // the following queries are not affected as their terms are not analyzed anyway:
+ q = parser.Parse("bla*");
+ Assert.AreEqual("f1:bla* f2:bla* f3:bla*", q.ToString());
+ q = parser.Parse("bla~");
+ Assert.AreEqual("f1:bla~0.5 f2:bla~0.5 f3:bla~0.5", q.ToString());
+ q = parser.Parse("[a TO c]");
+ Assert.AreEqual("f1:[a TO c] f2:[a TO c] f3:[a TO c]", q.ToString());
+ }
+
+ [Test]
+ public virtual void TestStopWordSearching()
+ {
+ Analyzer analyzer = new StandardAnalyzer();
+ Directory ramDir = new RAMDirectory();
+ IndexWriter iw = new IndexWriter(ramDir, analyzer, true);
+ Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+ doc.Add(new Field("body", "blah the footest blah", Field.Store.NO, Field.Index.TOKENIZED));
+ iw.AddDocument(doc);
+ iw.Close();
+
+ MultiFieldQueryParser mfqp = new MultiFieldQueryParser(new System.String[]{"body"}, analyzer);
+ mfqp.SetDefaultOperator(Lucene.Net.QueryParsers.QueryParser.Operator.AND);
+ Query q = mfqp.Parse("the footest");
+ IndexSearcher is_Renamed = new IndexSearcher(ramDir);
+ Hits hits = is_Renamed.Search(q);
+ Assert.AreEqual(1, hits.Length());
+ is_Renamed.Close();
+ }
+
+ /// <summary> Return empty tokens for field "f1".</summary>
+ private class AnalyzerReturningNull:Analyzer
+ {
+ internal StandardAnalyzer stdAnalyzer = new StandardAnalyzer();
+
+ public AnalyzerReturningNull()
+ {
+ }
+
+ public override TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
+ {
+ if ("f1".Equals(fieldName))
+ {
+ return new EmptyTokenStream();
+ }
+ else
+ {
+ return stdAnalyzer.TokenStream(fieldName, reader);
+ }
+ }
+
+ private class EmptyTokenStream:TokenStream
+ {
+ public override Token Next()
+ {
+ return null;
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
Modified: incubator/lucene.net/trunk/C#/src/Test/QueryParser/TestQueryParser.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/QueryParser/TestQueryParser.cs?rev=411501&r1=411500&r2=411501&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/QueryParser/TestQueryParser.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/QueryParser/TestQueryParser.cs Sat Jun 3 19:41:13 2006
@@ -13,6 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+
using System;
using NUnit.Framework;
using Analyzer = Lucene.Net.Analysis.Analyzer;
@@ -24,6 +25,8 @@
using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
using DateField = Lucene.Net.Documents.DateField;
+using ParseException = Lucene.Net.QueryParsers.ParseException;
+using QueryParser = Lucene.Net.QueryParsers.QueryParser;
using BooleanQuery = Lucene.Net.Search.BooleanQuery;
using FuzzyQuery = Lucene.Net.Search.FuzzyQuery;
using PhraseQuery = Lucene.Net.Search.PhraseQuery;
@@ -32,12 +35,13 @@
using RangeQuery = Lucene.Net.Search.RangeQuery;
using TermQuery = Lucene.Net.Search.TermQuery;
using WildcardQuery = Lucene.Net.Search.WildcardQuery;
+
namespace Lucene.Net.QueryParser
{
/// <summary> Tests QueryParser.</summary>
[TestFixture]
- public class TestQueryParser
+ public class TestQueryParser
{
public static Analyzer qpAnalyzer = new QPTestAnalyzer();
@@ -47,7 +51,7 @@
/// <summary> Filter which discards the token 'stop' and which expands the
/// token 'phrase' into 'phrase1 phrase2'
/// </summary>
- public QPTestFilter(TokenStream in_Renamed) : base(in_Renamed)
+ public QPTestFilter(TokenStream in_Renamed):base(in_Renamed)
{
}
@@ -82,43 +86,43 @@
{
/// <summary>Filters LowerCaseTokenizer with StopFilter. </summary>
- public override TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
+ public TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
{
return new QPTestFilter(new LowerCaseTokenizer(reader));
}
}
- public class QPTestParser : QueryParsers.QueryParser
+ public class QPTestParser : Lucene.Net.QueryParsers.QueryParser
{
- public QPTestParser(System.String f, Analyzer a) : base(f, a)
+ public QPTestParser(System.String f, Analyzer a):base(f, a)
{
}
- protected /*internal*/ override Query GetFuzzyQuery(System.String field, System.String termStr)
+ protected override Query GetFuzzyQuery(System.String field, System.String termStr, float minSimilarity)
{
- throw new Lucene.Net.Analysis.Standard.ParseException("Fuzzy queries not allowed");
+ throw new ParseException("Fuzzy queries not allowed");
}
- protected /*internal*/ override Query GetWildcardQuery(System.String field, System.String termStr)
+ protected override Query GetWildcardQuery(System.String field, System.String termStr)
{
- throw new Lucene.Net.Analysis.Standard.ParseException("Wildcard queries not allowed");
+ throw new ParseException("Wildcard queries not allowed");
}
}
private int originalMaxClauses;
- [TestFixtureSetUp]
- public virtual void SetUp()
+ [TestFixtureSetUp]
+ public virtual void SetUp()
{
originalMaxClauses = BooleanQuery.GetMaxClauseCount();
}
- public virtual QueryParsers.QueryParser GetParser(Analyzer a)
+ public virtual Lucene.Net.QueryParsers.QueryParser GetParser(Analyzer a)
{
if (a == null)
a = new SimpleAnalyzer();
- QueryParsers.QueryParser qp = new QueryParsers.QueryParser("Field", a);
- qp.SetOperator(QueryParsers.QueryParser.DEFAULT_OPERATOR_OR);
+ Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("field", a);
+ qp.SetDefaultOperator(Lucene.Net.QueryParsers.QueryParser.OR_OPERATOR);
return qp;
}
@@ -130,7 +134,7 @@
public virtual void AssertQueryEquals(System.String query, Analyzer a, System.String result)
{
Query q = GetQuery(query, a);
- System.String s = q.ToString("Field");
+ System.String s = q.ToString("field");
if (!s.Equals(result))
{
Assert.Fail("Query /" + query + "/ yielded /" + s + "/, expecting /" + result + "/");
@@ -139,10 +143,21 @@
public virtual void AssertWildcardQueryEquals(System.String query, bool lowercase, System.String result)
{
- QueryParsers.QueryParser qp = GetParser(null);
- qp.SetLowercaseWildcardTerms(lowercase);
+ Lucene.Net.QueryParsers.QueryParser qp = GetParser(null);
+ qp.SetLowercaseExpandedTerms(lowercase);
+ Query q = qp.Parse(query);
+ System.String s = q.ToString("field");
+ if (!s.Equals(result))
+ {
+ Assert.Fail("WildcardQuery /" + query + "/ yielded /" + s + "/, expecting /" + result + "/");
+ }
+ }
+
+ public virtual void AssertWildcardQueryEquals(System.String query, System.String result)
+ {
+ Lucene.Net.QueryParsers.QueryParser qp = GetParser(null);
Query q = qp.Parse(query);
- System.String s = q.ToString("Field");
+ System.String s = q.ToString("field");
if (!s.Equals(result))
{
Assert.Fail("WildcardQuery /" + query + "/ yielded /" + s + "/, expecting /" + result + "/");
@@ -153,23 +168,23 @@
{
if (a == null)
a = new SimpleAnalyzer();
- QueryParsers.QueryParser qp = new QueryParsers.QueryParser("Field", a);
- qp.SetOperator(QueryParsers.QueryParser.DEFAULT_OPERATOR_AND);
+ Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("field", a);
+ qp.SetDefaultOperator(Lucene.Net.QueryParsers.QueryParser.AND_OPERATOR);
return qp.Parse(query);
}
public virtual void AssertQueryEqualsDOA(System.String query, Analyzer a, System.String result)
{
Query q = GetQueryDOA(query, a);
- System.String s = q.ToString("Field");
+ System.String s = q.ToString("field");
if (!s.Equals(result))
{
Assert.Fail("Query /" + query + "/ yielded /" + s + "/, expecting /" + result + "/");
}
}
- [Test]
- public virtual void TestSimple()
+ [Test]
+ public virtual void TestSimple()
{
AssertQueryEquals("term term term", null, "term term term");
AssertQueryEquals("türm term term", null, "türm term term");
@@ -191,7 +206,7 @@
AssertQueryEquals("a OR -b", null, "a -b");
AssertQueryEquals("+term -term term", null, "+term -term term");
- AssertQueryEquals("foo:term AND Field:anotherTerm", null, "+foo:term +anotherterm");
+ AssertQueryEquals("foo:term AND field:anotherTerm", null, "+foo:term +anotherterm");
AssertQueryEquals("term AND \"phrase phrase\"", null, "+term +\"phrase phrase\"");
AssertQueryEquals("\"hello there\"", null, "\"hello there\"");
Assert.IsTrue(GetQuery("a AND b", null) is BooleanQuery);
@@ -210,10 +225,18 @@
AssertQueryEquals("((a OR b) AND NOT c) OR d", null, "(+(a b) -c) d");
AssertQueryEquals("+(apple \"steve jobs\") -(foo bar baz)", null, "+(apple \"steve jobs\") -(foo bar baz)");
AssertQueryEquals("+title:(dog OR cat) -author:\"bob dole\"", null, "+(title:dog title:cat) -author:\"bob dole\"");
+
+ Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("field", new StandardAnalyzer());
+ // make sure OR is the default:
+ Assert.AreEqual(Lucene.Net.QueryParsers.QueryParser.OR_OPERATOR, qp.GetDefaultOperator());
+ qp.SetDefaultOperator(Lucene.Net.QueryParsers.QueryParser.AND_OPERATOR);
+ Assert.AreEqual(Lucene.Net.QueryParsers.QueryParser.AND_OPERATOR, qp.GetDefaultOperator());
+ qp.SetDefaultOperator(Lucene.Net.QueryParsers.QueryParser.OR_OPERATOR);
+ Assert.AreEqual(Lucene.Net.QueryParsers.QueryParser.OR_OPERATOR, qp.GetDefaultOperator());
}
- [Test]
- public virtual void TestPunct()
+ [Test]
+ public virtual void TestPunct()
{
Analyzer a = new WhitespaceAnalyzer();
AssertQueryEquals("a&b", a, "a&b");
@@ -221,8 +244,8 @@
AssertQueryEquals(".NET", a, ".NET");
}
- [Test]
- public virtual void TestSlop()
+ [Test]
+ public virtual void TestSlop()
{
AssertQueryEquals("\"term germ\"~2", null, "\"term germ\"~2");
AssertQueryEquals("\"term germ\"~2 flork", null, "\"term germ\"~2 flork");
@@ -231,8 +254,8 @@
AssertQueryEquals("\"term germ\"~2^2", null, "\"term germ\"~2^2.0");
}
- [Test]
- public virtual void TestNumber()
+ [Test]
+ public virtual void TestNumber()
{
// The numbers go away because SimpleAnalzyer ignores them
AssertQueryEquals("3", null, "");
@@ -245,62 +268,78 @@
AssertQueryEquals("term term1 term2", a, "term term1 term2");
}
- [Test]
- public virtual void TestWildcard()
+ [Test]
+ public virtual void TestWildcard()
{
AssertQueryEquals("term*", null, "term*");
AssertQueryEquals("term*^2", null, "term*^2.0");
AssertQueryEquals("term~", null, "term~0.5");
- AssertQueryEquals("term~0.7", null, "term~0.7");
- AssertQueryEquals("term~^2", null, "term^2.0~0.5");
- AssertQueryEquals("term^2~", null, "term^2.0~0.5");
- AssertQueryEquals("term*germ", null, "term*germ");
- AssertQueryEquals("term*germ^3", null, "term*germ^3.0");
-
- Assert.IsTrue(GetQuery("term*", null) is PrefixQuery);
- Assert.IsTrue(GetQuery("term*^2", null) is PrefixQuery);
- Assert.IsTrue(GetQuery("term~", null) is FuzzyQuery);
- Assert.IsTrue(GetQuery("term~0.7", null) is FuzzyQuery);
- FuzzyQuery fq = (FuzzyQuery) GetQuery("term~0.7", null);
- Assert.AreEqual(0.7f, fq.GetMinSimilarity(), 0.1f);
- Assert.AreEqual(0, fq.GetPrefixLength());
- fq = (FuzzyQuery) GetQuery("term~", null);
- Assert.AreEqual(0.5f, fq.GetMinSimilarity(), 0.1f);
- Assert.AreEqual(0, fq.GetPrefixLength());
- try
- {
- GetQuery("term~1.1", null); // value > 1, throws exception
- Assert.Fail();
- }
- catch (Lucene.Net.QueryParsers.ParseException pe)
- {
- // expected exception
- }
- Assert.IsTrue(GetQuery("term*germ", null) is WildcardQuery);
+ AssertQueryEquals("term~0.7", null, "term~0.7");
+ AssertQueryEquals("term~^2", null, "term~0.5^2.0");
+ AssertQueryEquals("term^2~", null, "term~0.5^2.0");
+ AssertQueryEquals("term*germ", null, "term*germ");
+ AssertQueryEquals("term*germ^3", null, "term*germ^3.0");
+
+ Assert.IsTrue(GetQuery("term*", null) is PrefixQuery);
+ Assert.IsTrue(GetQuery("term*^2", null) is PrefixQuery);
+ Assert.IsTrue(GetQuery("term~", null) is FuzzyQuery);
+ Assert.IsTrue(GetQuery("term~0.7", null) is FuzzyQuery);
+ FuzzyQuery fq = (FuzzyQuery) GetQuery("term~0.7", null);
+ Assert.AreEqual(0.7f, fq.GetMinSimilarity(), 0.1f);
+ Assert.AreEqual(FuzzyQuery.defaultPrefixLength, fq.GetPrefixLength());
+ fq = (FuzzyQuery) GetQuery("term~", null);
+ Assert.AreEqual(0.5f, fq.GetMinSimilarity(), 0.1f);
+ Assert.AreEqual(FuzzyQuery.defaultPrefixLength, fq.GetPrefixLength());
+ try
+ {
+ GetQuery("term~1.1", null); // value > 1, throws exception
+ Assert.Fail();
+ }
+ catch (ParseException pe)
+ {
+ // expected exception
+ }
+ Assert.IsTrue(GetQuery("term*germ", null) is WildcardQuery);
/* Tests to see that wild card terms are (or are not) properly
* lower-cased with propery parser configuration
*/
// First prefix queries:
+ // by default, convert to lowercase:
+ AssertWildcardQueryEquals("Term*", true, "term*");
+ // explicitly set lowercase:
AssertWildcardQueryEquals("term*", true, "term*");
AssertWildcardQueryEquals("Term*", true, "term*");
AssertWildcardQueryEquals("TERM*", true, "term*");
+ // explicitly disable lowercase conversion:
AssertWildcardQueryEquals("term*", false, "term*");
AssertWildcardQueryEquals("Term*", false, "Term*");
AssertWildcardQueryEquals("TERM*", false, "TERM*");
// Then 'full' wildcard queries:
+ // by default, convert to lowercase:
+ AssertWildcardQueryEquals("Te?m", "te?m");
+ // explicitly set lowercase:
AssertWildcardQueryEquals("te?m", true, "te?m");
AssertWildcardQueryEquals("Te?m", true, "te?m");
AssertWildcardQueryEquals("TE?M", true, "te?m");
AssertWildcardQueryEquals("Te?m*gerM", true, "te?m*germ");
+ // explicitly disable lowercase conversion:
AssertWildcardQueryEquals("te?m", false, "te?m");
AssertWildcardQueryEquals("Te?m", false, "Te?m");
AssertWildcardQueryEquals("TE?M", false, "TE?M");
AssertWildcardQueryEquals("Te?m*gerM", false, "Te?m*gerM");
+ // Fuzzy queries:
+ AssertWildcardQueryEquals("Term~", "term~0.5");
+ AssertWildcardQueryEquals("Term~", true, "term~0.5");
+ AssertWildcardQueryEquals("Term~", false, "Term~0.5");
+ // Range queries:
+ AssertWildcardQueryEquals("[A TO C]", "[a TO c]");
+ AssertWildcardQueryEquals("[A TO C]", true, "[a TO c]");
+ AssertWildcardQueryEquals("[A TO C]", false, "[A TO C]");
}
- [Test]
- public virtual void TestQPA()
+ [Test]
+ public virtual void TestQPA()
{
AssertQueryEquals("term term term", qpAnalyzer, "term term term");
AssertQueryEquals("term +stop term", qpAnalyzer, "term term");
@@ -313,8 +352,8 @@
Assert.IsTrue(GetQuery("term +stop", qpAnalyzer) is TermQuery);
}
- [Test]
- public virtual void TestRange()
+ [Test]
+ public virtual void TestRange()
{
AssertQueryEquals("[ a TO z]", null, "[a TO z]");
Assert.IsTrue(GetQuery("[ a TO z]", null) is RangeQuery);
@@ -330,16 +369,16 @@
public virtual System.String GetDate(System.String s)
{
- return DateField.DateToString(DateTime.Parse(s));
+ return DateField.DateToString(System.DateTime.Parse(s)); // {{Aroush-1.9}} We want a format of "MMM d, yyy" how is it done in .NET?
}
public virtual System.String GetLocalizedDate(int year, int month, int day)
{
- return new DateTime(year,month,day).ToShortDateString();
+ return new System.DateTime(year, month, day).ToString("MMM d, yyy"); // {{Aroush-1.9}} We want a format of "MMM d, yyy" will this do?
}
- [Test]
- public virtual void TestDateRange()
+ [Test]
+ public virtual void TestDateRange()
{
System.String startDate = GetLocalizedDate(2002, 1, 1);
System.String endDate = GetLocalizedDate(2002, 1, 4);
@@ -347,34 +386,34 @@
AssertQueryEquals("{ " + startDate + " " + endDate + " }", null, "{" + GetDate(startDate) + " TO " + GetDate(endDate) + "}");
}
- [Test]
- public virtual void TestEscaped()
+ [Test]
+ public virtual void TestEscaped()
{
Analyzer a = new WhitespaceAnalyzer();
-
- /*AssertQueryEquals("\\[brackets", a, "\\[brackets");
- AssertQueryEquals("\\[brackets", null, "brackets");
- AssertQueryEquals("\\\\", a, "\\\\");
- AssertQueryEquals("\\+blah", a, "\\+blah");
- AssertQueryEquals("\\(blah", a, "\\(blah");
-
- AssertQueryEquals("\\-blah", a, "\\-blah");
- AssertQueryEquals("\\!blah", a, "\\!blah");
- AssertQueryEquals("\\{blah", a, "\\{blah");
- AssertQueryEquals("\\}blah", a, "\\}blah");
- AssertQueryEquals("\\:blah", a, "\\:blah");
- AssertQueryEquals("\\^blah", a, "\\^blah");
- AssertQueryEquals("\\[blah", a, "\\[blah");
- AssertQueryEquals("\\]blah", a, "\\]blah");
- AssertQueryEquals("\\\"blah", a, "\\\"blah");
- AssertQueryEquals("\\(blah", a, "\\(blah");
- AssertQueryEquals("\\)blah", a, "\\)blah");
- AssertQueryEquals("\\~blah", a, "\\~blah");
- AssertQueryEquals("\\*blah", a, "\\*blah");
- AssertQueryEquals("\\?blah", a, "\\?blah");
- //AssertQueryEquals("foo \\&\\& bar", a, "foo \\&\\& bar");
- //AssertQueryEquals("foo \\|| bar", a, "foo \\|| bar");
- //AssertQueryEquals("foo \\AND bar", a, "foo \\AND bar");*/
+
+ /*assertQueryEquals("\\[brackets", a, "\\[brackets");
+ assertQueryEquals("\\[brackets", null, "brackets");
+ assertQueryEquals("\\\\", a, "\\\\");
+ assertQueryEquals("\\+blah", a, "\\+blah");
+ assertQueryEquals("\\(blah", a, "\\(blah");
+
+ assertQueryEquals("\\-blah", a, "\\-blah");
+ assertQueryEquals("\\!blah", a, "\\!blah");
+ assertQueryEquals("\\{blah", a, "\\{blah");
+ assertQueryEquals("\\}blah", a, "\\}blah");
+ assertQueryEquals("\\:blah", a, "\\:blah");
+ assertQueryEquals("\\^blah", a, "\\^blah");
+ assertQueryEquals("\\[blah", a, "\\[blah");
+ assertQueryEquals("\\]blah", a, "\\]blah");
+ assertQueryEquals("\\\"blah", a, "\\\"blah");
+ assertQueryEquals("\\(blah", a, "\\(blah");
+ assertQueryEquals("\\)blah", a, "\\)blah");
+ assertQueryEquals("\\~blah", a, "\\~blah");
+ assertQueryEquals("\\*blah", a, "\\*blah");
+ assertQueryEquals("\\?blah", a, "\\?blah");
+ //assertQueryEquals("foo \\&\\& bar", a, "foo \\&\\& bar");
+ //assertQueryEquals("foo \\|| bar", a, "foo \\|| bar");
+ //assertQueryEquals("foo \\AND bar", a, "foo \\AND bar");*/
AssertQueryEquals("a\\-b:c", a, "a-b:c");
AssertQueryEquals("a\\+b:c", a, "a+b:c");
@@ -390,13 +429,13 @@
AssertQueryEquals("a:b\\+c*", a, "a:b+c*");
AssertQueryEquals("a:b\\:c*", a, "a:b:c*");
- AssertQueryEquals("a:b\\\\c*", a, "a:b\\c*");
+ AssertQueryEquals("a:b\\\\c*", a, "a:b\\c*");
AssertQueryEquals("a:b\\-?c", a, "a:b-?c");
AssertQueryEquals("a:b\\+?c", a, "a:b+?c");
AssertQueryEquals("a:b\\:?c", a, "a:b:?c");
- AssertQueryEquals("a:b\\\\?c", a, "a:b\\?c");
+ AssertQueryEquals("a:b\\\\?c", a, "a:b\\?c");
AssertQueryEquals("a:b\\-c~", a, "a:b-c~0.5");
AssertQueryEquals("a:b\\+c~", a, "a:b+c~0.5");
@@ -408,8 +447,8 @@
AssertQueryEquals("[ a\\\\ TO a\\* ]", null, "[a\\ TO a*]");
}
- [Test]
- public virtual void TestTabNewlineCarriageReturn()
+ [Test]
+ public virtual void TestTabNewlineCarriageReturn()
{
AssertQueryEqualsDOA("+weltbank +worlbank", null, "+weltbank +worlbank");
@@ -431,8 +470,8 @@
AssertQueryEqualsDOA("weltbank \t +worlbank", null, "+weltbank +worlbank");
}
- [Test]
- public virtual void TestSimpleDAO()
+ [Test]
+ public virtual void TestSimpleDAO()
{
AssertQueryEqualsDOA("term term term", null, "+term +term +term");
AssertQueryEqualsDOA("term +term term", null, "+term +term +term");
@@ -441,11 +480,11 @@
AssertQueryEqualsDOA("-term term term", null, "-term +term +term");
}
- [Test]
- public virtual void TestBoost()
+ [Test]
+ public virtual void TestBoost()
{
StandardAnalyzer oneStopAnalyzer = new StandardAnalyzer(new System.String[]{"on"});
- QueryParsers.QueryParser qp = new QueryParsers.QueryParser("Field", oneStopAnalyzer);
+ Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("field", oneStopAnalyzer);
Query q = qp.Parse("on^1.0");
Assert.IsNotNull(q);
q = qp.Parse("\"hello\"^2.0");
@@ -457,69 +496,82 @@
q = qp.Parse("\"on\"^1.0");
Assert.IsNotNull(q);
- q = QueryParsers.QueryParser.Parse("the^3", "Field", new StandardAnalyzer());
+ Lucene.Net.QueryParsers.QueryParser qp2 = new Lucene.Net.QueryParsers.QueryParser("field", new StandardAnalyzer());
+ q = qp2.Parse("the^3");
+ // "the" is a stop word so the result is an empty query:
Assert.IsNotNull(q);
+ Assert.AreEqual("", q.ToString());
+ Assert.AreEqual(1.0f, q.GetBoost(), 0.01f);
}
- [Test]
- public virtual void TestException()
+ [Test]
+ public virtual void TestException()
{
try
{
AssertQueryEquals("\"some phrase", null, "abc");
Assert.Fail("ParseException expected, not thrown");
}
- catch (Lucene.Net.QueryParsers.ParseException expected)
+ catch (ParseException expected)
{
}
}
- [Test]
- public virtual void TestCustomQueryParserWildcard()
+ [Test]
+ public virtual void TestCustomQueryParserWildcard()
{
try
{
new QPTestParser("contents", new WhitespaceAnalyzer()).Parse("a?t");
+ Assert.Fail("Wildcard queries should not be allowed");
}
- catch (Lucene.Net.Analysis.Standard.ParseException expected)
+ catch (ParseException expected)
{
- return ;
+ // expected exception
}
- Assert.Fail("Wildcard queries should not be allowed");
}
- [Test]
- public virtual void TestCustomQueryParserFuzzy()
+ [Test]
+ public virtual void TestCustomQueryParserFuzzy()
{
try
{
new QPTestParser("contents", new WhitespaceAnalyzer()).Parse("xunit~");
+ Assert.Fail("Fuzzy queries should not be allowed");
}
- catch (Lucene.Net.Analysis.Standard.ParseException expected)
+ catch (ParseException expected)
{
- return ;
+ // expected exception
}
- Assert.Fail("Fuzzy queries should not be allowed");
}
- [Test]
- public virtual void TestBooleanQuery()
+ [Test]
+ public virtual void TestBooleanQuery()
{
BooleanQuery.SetMaxClauseCount(2);
try
{
- QueryParsers.QueryParser.Parse("one two three", "Field", new WhitespaceAnalyzer());
+ Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("field", new WhitespaceAnalyzer());
+ qp.Parse("one two three");
Assert.Fail("ParseException expected due to too many boolean clauses");
}
- catch (Lucene.Net.QueryParsers.ParseException expected)
+ catch (ParseException expected)
{
// too many boolean clauses, so ParseException is expected
}
-
- BooleanQuery.SetMaxClauseCount(originalMaxClauses);
}
- [TestFixtureTearDown]
+ /// <summary> This test differs from TestPrecedenceQueryParser</summary>
+ [Test]
+ public virtual void TestPrecedence()
+ {
+ Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("field", new WhitespaceAnalyzer());
+ Query query1 = qp.Parse("A AND B OR C AND D");
+ Query query2 = qp.Parse("+A +B +C +D");
+ Assert.AreEqual(query1, query2);
+ }
+
+ [TestFixtureTearDown]
public virtual void TearDown()
{
BooleanQuery.SetMaxClauseCount(originalMaxClauses);
Added: incubator/lucene.net/trunk/C#/src/Test/Search/BaseTestRangeFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/BaseTestRangeFilter.cs?rev=411501&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/BaseTestRangeFilter.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/BaseTestRangeFilter.cs Sat Jun 3 19:41:13 2006
@@ -0,0 +1,127 @@
+/*
+ * Copyright 2004 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using NUnit.Framework;
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+
+namespace Lucene.Net.Search
+{
+ [TestFixture]
+ public class BaseTestRangeFilter
+ {
+
+ public const bool F = false;
+ public const bool T = true;
+
+ internal RAMDirectory index = new RAMDirectory();
+ internal System.Random rand = new System.Random((System.Int32) 101); // use a set seed to test is deterministic
+
+ internal int maxR = System.Int32.MinValue;
+ internal int minR = System.Int32.MaxValue;
+
+ internal int minId = 0;
+ internal int maxId = 10000;
+
+ internal static readonly int intLength = System.Convert.ToString(System.Int32.MaxValue).Length;
+
+ /// <summary> a simple padding function that should work with any int</summary>
+ public static System.String Pad(int n)
+ {
+ System.Text.StringBuilder b = new System.Text.StringBuilder(40);
+ System.String p = "0";
+ if (n < 0)
+ {
+ p = "-";
+ n = System.Int32.MaxValue + n + 1;
+ }
+ b.Append(p);
+ System.String s = System.Convert.ToString(n);
+ for (int i = s.Length; i <= intLength; i++)
+ {
+ b.Append("0");
+ }
+ b.Append(s);
+
+ return b.ToString();
+ }
+
+ public BaseTestRangeFilter(System.String name)
+ {
+ Build();
+ }
+ public BaseTestRangeFilter()
+ {
+ Build();
+ }
+
+ private void Build()
+ {
+ try
+ {
+
+ /* build an index */
+ IndexWriter writer = new IndexWriter(index, new SimpleAnalyzer(), T);
+
+ for (int d = minId; d <= maxId; d++)
+ {
+ Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+ doc.Add(new Field("id", Pad(d), Field.Store.YES, Field.Index.UN_TOKENIZED));
+ int r = rand.Next();
+ if (maxR < r)
+ {
+ maxR = r;
+ }
+ if (r < minR)
+ {
+ minR = r;
+ }
+ doc.Add(new Field("rand", Pad(r), Field.Store.YES, Field.Index.UN_TOKENIZED));
+ doc.Add(new Field("body", "body", Field.Store.YES, Field.Index.UN_TOKENIZED));
+ writer.AddDocument(doc);
+ }
+
+ writer.Optimize();
+ writer.Close();
+ }
+ catch (System.Exception e)
+ {
+ throw new System.Exception("can't build index", e);
+ }
+ }
+
+ [Test]
+ public virtual void TestPad()
+ {
+
+ int[] tests = new int[]{- 9999999, - 99560, - 100, - 3, - 1, 0, 3, 9, 10, 1000, 999999999};
+ for (int i = 0; i < tests.Length - 1; i++)
+ {
+ int a = tests[i];
+ int b = tests[i + 1];
+ System.String aa = Pad(a);
+ System.String bb = Pad(b);
+ System.String label = a + ":" + aa + " vs " + b + ":" + bb;
+ Assert.AreEqual(aa.Length, bb.Length, "length of " + label);
+ Assert.IsTrue(String.CompareOrdinal(aa, bb) < 0, "compare less than " + label);
+ }
+ }
+ }
+}
\ No newline at end of file
Modified: incubator/lucene.net/trunk/C#/src/Test/Search/CheckHits.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/CheckHits.cs?rev=411501&r1=411500&r2=411501&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/CheckHits.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/CheckHits.cs Sat Jun 3 19:41:13 2006
@@ -1,5 +1,5 @@
/*
- * Copyright 2004 The Apache Software Foundation
+ * Copyright 2005 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -14,56 +14,144 @@
* limitations under the License.
*/
-/* 20 May 2004: Factored out of spans tests. Please leave this comment
-until this class is evt. also used by tests in search package.
-*/
using System;
using NUnit.Framework;
+
namespace Lucene.Net.Search
{
- public class CheckHits
+
+ public class CheckHits
{
- public static void CheckHits_(Query query, System.String defaultFieldName, Searcher searcher, int[] results, TestCase testCase)
+ /// <summary>Tests that a query has expected document number results.</summary>
+ public static void CheckHits_Renamed_Method(Query query, System.String defaultFieldName, Searcher searcher, int[] results)
{
- Hits hits = searcher.Search(query);
+ Hits hits = searcher.Search(query);
- System.Collections.Hashtable correct = new System.Collections.Hashtable();
- for (int i = 0; i < results.Length; i++)
- {
- correct.Add((System.Int32) results[i], null);
- }
+ System.Collections.Hashtable correct = new System.Collections.Hashtable();
+ for (int i = 0; i < results.Length; i++)
+ {
+ correct.Add((System.Int32) results[i], (System.Int32) results[i]);
+ }
- System.Collections.Hashtable actual = new System.Collections.Hashtable();
- for (int i = 0; i < hits.Length(); i++)
- {
- actual.Add((System.Int32) hits.Id(i), null);
- }
+ System.Collections.Hashtable actual = new System.Collections.Hashtable();
+ for (int i = 0; i < hits.Length(); i++)
+ {
+ actual.Add((System.Int32) hits.Id(i), (System.Int32) hits.Id(i));
+ }
- //Assert.AreEqual(correct, actual, query.ToString(defaultFieldName));
if (correct.Count != 0)
{
System.Collections.IDictionaryEnumerator iter = correct.GetEnumerator();
- bool status = false;
- while (iter.MoveNext())
+ System.Collections.IDictionaryEnumerator iter2 = actual.GetEnumerator();
+ bool status = true;
+ while (iter2.MoveNext() && iter.MoveNext())
{
- status = actual.ContainsKey(iter.Key);
- if (status == false)
+ if (iter2.Key.ToString() != iter.Key.ToString())
+ {
+ status = false;
break;
+ }
}
Assert.IsTrue(status, query.ToString(defaultFieldName));
}
- }
+ }
- public static void PrintDocNrs(Hits hits)
+ /// <summary>Tests that a Hits has an expected order of documents </summary>
+ public static void CheckDocIds(System.String mes, int[] results, Hits hits)
{
- System.Console.Out.Write("new int[] {");
- for (int i = 0; i < hits.Length(); i++)
+ Assert.AreEqual(results.Length, hits.Length(), mes + " nr of hits");
+ for (int i = 0; i < results.Length; i++)
+ {
+ Assert.AreEqual(results[i], hits.Id(i), mes + " doc nrs for hit " + i);
+ }
+ }
+
+ /// <summary>Tests that two queries have an expected order of documents,
+ /// and that the two queries have the same score values.
+ /// </summary>
+ public static void CheckHitsQuery(Query query, Hits hits1, Hits hits2, int[] results)
+ {
+
+ CheckDocIds("hits1", results, hits1);
+ CheckDocIds("hits2", results, hits2);
+ CheckEqual(query, hits1, hits2);
+ }
+
+ public static void CheckEqual(Query query, Hits hits1, Hits hits2)
+ {
+ float scoreTolerance = 1.0e-6f;
+ if (hits1.Length() != hits2.Length())
+ {
+ Assert.Fail("Unequal lengths: hits1=" + hits1.Length() + ",hits2=" + hits2.Length());
+ }
+ for (int i = 0; i < hits1.Length(); i++)
+ {
+ if (hits1.Id(i) != hits2.Id(i))
+ {
+ Assert.Fail("Hit " + i + " docnumbers don't match\n" + Hits2str(hits1, hits2, 0, 0) + "for query:" + query.ToString());
+ }
+
+ if ((hits1.Id(i) != hits2.Id(i)) || System.Math.Abs(hits1.Score(i) - hits2.Score(i)) > scoreTolerance)
+ {
+ Assert.Fail("Hit " + i + ", doc nrs " + hits1.Id(i) + " and " + hits2.Id(i) + "\nunequal : " + hits1.Score(i) + "\n and: " + hits2.Score(i) + "\nfor query:" + query.ToString());
+ }
+ }
+ }
+
+ public static System.String Hits2str(Hits hits1, Hits hits2, int start, int end)
+ {
+ System.Text.StringBuilder sb = new System.Text.StringBuilder();
+ int len1 = hits1 == null?0:hits1.Length();
+ int len2 = hits2 == null?0:hits2.Length();
+ if (end <= 0)
+ {
+ end = System.Math.Max(len1, len2);
+ }
+
+ sb.Append("Hits length1=" + len1 + "\tlength2=" + len2);
+
+ sb.Append("\n");
+ for (int i = start; i < end; i++)
+ {
+ sb.Append("hit=" + i + ":");
+ if (i < len1)
+ {
+ sb.Append(" doc" + hits1.Id(i) + "=" + hits1.Score(i));
+ }
+ else
+ {
+ sb.Append(" ");
+ }
+ sb.Append(",\t");
+ if (i < len2)
+ {
+ sb.Append(" doc" + hits2.Id(i) + "=" + hits2.Score(i));
+ }
+ sb.Append("\n");
+ }
+ return sb.ToString();
+ }
+
+
+ public static System.String TopdocsString(TopDocs docs, int start, int end)
+ {
+ System.Text.StringBuilder sb = new System.Text.StringBuilder();
+ sb.Append("TopDocs totalHits=" + docs.totalHits + " top=" + docs.scoreDocs.Length + "\n");
+ if (end <= 0)
+ end = docs.scoreDocs.Length;
+ else
+ end = System.Math.Min(end, docs.scoreDocs.Length);
+ for (int i = start; i < end; i++)
{
- System.Console.Out.Write(hits.Id(i));
- if (i != hits.Length() - 1)
- System.Console.Out.Write(", ");
+ sb.Append("\t");
+ sb.Append(i);
+ sb.Append(") doc=");
+ sb.Append(docs.scoreDocs[i].doc);
+ sb.Append("\tscore=");
+ sb.Append(docs.scoreDocs[i].score);
+ sb.Append("\n");
}
- System.Console.Out.WriteLine("}");
+ return sb.ToString();
}
}
}
Modified: incubator/lucene.net/trunk/C#/src/Test/Search/MockFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/MockFilter.cs?rev=411501&r1=411500&r2=411501&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/MockFilter.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/MockFilter.cs Sat Jun 3 19:41:13 2006
@@ -13,30 +13,32 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+
using System;
using IndexReader = Lucene.Net.Index.IndexReader;
+
namespace Lucene.Net.Search
{
[Serializable]
- public class MockFilter:Filter
+ public class MockFilter : Filter
{
- private bool wasCalled_Renamed_Field;
+ private bool wasCalled;
- public override System.Collections.BitArray Bits(IndexReader reader)
+ public override System.Collections.BitArray Bits(IndexReader reader)
{
- wasCalled_Renamed_Field = true;
+ wasCalled = true;
return new System.Collections.BitArray(64);
}
public virtual void Clear()
{
- wasCalled_Renamed_Field = false;
+ wasCalled = false;
}
public virtual bool WasCalled()
{
- return wasCalled_Renamed_Field;
+ return wasCalled;
}
}
}
Added: incubator/lucene.net/trunk/C#/src/Test/Search/QueryUtils.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/QueryUtils.cs?rev=411501&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/QueryUtils.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/QueryUtils.cs Sat Jun 3 19:41:13 2006
@@ -0,0 +1,78 @@
+/*
+ * Copyright 2005 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using NUnit.Framework;
+namespace Lucene.Net.Search
+{
+
+ /// <author> yonik
+ /// </author>
+ public class QueryUtils
+ {
+ [Serializable]
+ private class AnonymousClassQuery : Query
+ {
+ public override System.String ToString(System.String field)
+ {
+ return "My Whacky Query";
+ }
+ override public System.Object Clone()
+ {
+ return null;
+ }
+ }
+
+ /// <summary>Check the types of things query objects should be able to do. </summary>
+ public static void Check(Query q)
+ {
+ CheckHashEquals(q);
+ }
+
+ /// <summary>check very basic hashCode and equals </summary>
+ public static void CheckHashEquals(Query q)
+ {
+ Query q2 = (Query) q.Clone();
+ CheckEqual(q, q2);
+
+ Query q3 = (Query) q.Clone();
+ q3.SetBoost(7.21792348f);
+ CheckUnequal(q, q3);
+
+ // test that a class check is done so that no exception is thrown
+ // in the implementation of equals()
+ Query whacky = new AnonymousClassQuery();
+ whacky.SetBoost(q.GetBoost());
+ CheckUnequal(q, whacky);
+ }
+
+ public static void CheckEqual(Query q1, Query q2)
+ {
+ Assert.AreEqual(q1, q2);
+ Assert.AreEqual(q1.GetHashCode(), q2.GetHashCode());
+ }
+
+ public static void CheckUnequal(Query q1, Query q2)
+ {
+ Assert.IsTrue(!q1.Equals(q2));
+ Assert.IsTrue(!q2.Equals(q1));
+
+ // possible this test can fail on a hash collision... if that
+ // happens, please change test to use a different example.
+ Assert.IsTrue(q1.GetHashCode() != q2.GetHashCode());
+ }
+ }
+}
\ No newline at end of file
Added: incubator/lucene.net/trunk/C#/src/Test/Search/Regex/TestRegexQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/Regex/TestRegexQuery.cs?rev=411501&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/Regex/TestRegexQuery.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/Regex/TestRegexQuery.cs Sat Jun 3 19:41:13 2006
@@ -0,0 +1,136 @@
+/*
+ * Copyright 2005 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using NUnit.Framework;
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using Term = Lucene.Net.Index.Term;
+using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+using Query = Lucene.Net.Search.Query;
+using SpanNearQuery = Lucene.Net.Search.Spans.SpanNearQuery;
+using SpanQuery = Lucene.Net.Search.Spans.SpanQuery;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+
+namespace Lucene.Net.Search.Regex
+{
+ [TestFixture]
+ public class TestRegexQuery
+ {
+ private IndexSearcher searcher;
+ private System.String FN = "field";
+
+
+ [STAThread]
+ public static void Main(System.String[] args)
+ {
+ TestRegexQuery t = new TestRegexQuery();
+ t.SetUp();
+ t.TestRegex1();
+ }
+
+ [TestFixtureSetUp]
+ public virtual void SetUp()
+ {
+ RAMDirectory directory = new RAMDirectory();
+ try
+ {
+ IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(), true);
+ Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+ doc.Add(new Field(FN, "the quick brown fox jumps over the lazy dog", Field.Store.NO, Field.Index.TOKENIZED));
+ writer.AddDocument(doc);
+ writer.Optimize();
+ writer.Close();
+ searcher = new IndexSearcher(directory);
+ }
+ catch (System.Exception e)
+ {
+ Assert.Fail(e.ToString());
+ }
+ }
+
+ [TestFixtureTearDown]
+ public virtual void TearDown()
+ {
+ try
+ {
+ searcher.Close();
+ }
+ catch (System.Exception e)
+ {
+ Assert.Fail(e.ToString());
+ }
+ }
+
+ private Term NewTerm(System.String value_Renamed)
+ {
+ return new Term(FN, value_Renamed);
+ }
+
+ private int RegexQueryNrHits(System.String regex)
+ {
+ Query query = new RegexQuery(NewTerm(regex));
+ return searcher.Search(query).Length();
+ }
+
+ private int SpanRegexQueryNrHits(System.String regex1, System.String regex2, int slop, bool ordered)
+ {
+ SpanRegexQuery srq1 = new SpanRegexQuery(NewTerm(regex1));
+ SpanRegexQuery srq2 = new SpanRegexQuery(NewTerm(regex2));
+ SpanNearQuery query = new SpanNearQuery(new SpanQuery[]{srq1, srq2}, slop, ordered);
+ return searcher.Search(query).Length();
+ }
+
+ [Test]
+ public virtual void TestRegex1()
+ {
+ Assert.AreEqual(1, RegexQueryNrHits("q.[aeiou]c.*"));
+ }
+
+ [Test]
+ public virtual void TestRegex2()
+ {
+ Assert.AreEqual(0, RegexQueryNrHits(".[aeiou]c.*")); // {{Aroush-1.9}} this test is failing
+ }
+
+ [Test]
+ public virtual void TestRegex3()
+ {
+ Assert.AreEqual(0, RegexQueryNrHits("q.[aeiou]c")); // {{Aroush-1.9}} this test is failing
+ }
+
+ [Test]
+ public virtual void TestSpanRegex1()
+ {
+ Assert.AreEqual(1, SpanRegexQueryNrHits("q.[aeiou]c.*", "dog", 6, true));
+ }
+
+ [Test]
+ public virtual void TestSpanRegex2()
+ {
+ Assert.AreEqual(0, SpanRegexQueryNrHits("q.[aeiou]c.*", "dog", 5, true));
+ }
+
+ // public void testPrefix() throws Exception {
+ // This test currently fails because RegexTermEnum picks "r" as the prefix
+ // but the following "?" makes the "r" optional and should be a hit for the
+ // document matching "over".
+ // Assert.AreEqual(1, regexQueryNrHits("r?over"));
+ // }
+ }
+}
\ No newline at end of file
Added: incubator/lucene.net/trunk/C#/src/Test/Search/Regex/TestSpanRegexQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/Regex/TestSpanRegexQuery.cs?rev=411501&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/Regex/TestSpanRegexQuery.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/Regex/TestSpanRegexQuery.cs Sat Jun 3 19:41:13 2006
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2004 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using NUnit.Framework;
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using Term = Lucene.Net.Index.Term;
+using Hits = Lucene.Net.Search.Hits;
+using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+using SpanNearQuery = Lucene.Net.Search.Spans.SpanNearQuery;
+using SpanQuery = Lucene.Net.Search.Spans.SpanQuery;
+using SpanTermQuery = Lucene.Net.Search.Spans.SpanTermQuery;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+
+namespace Lucene.Net.Search.Regex
+{
+ [TestFixture]
+ public class TestSpanRegexQuery
+ {
+ [Test]
+ public virtual void TestSpanRegex()
+ {
+ RAMDirectory directory = new RAMDirectory();
+ IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(), true);
+ Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+ doc.Add(new Field("field", "the quick brown fox jumps over the lazy dog", Field.Store.NO, Field.Index.TOKENIZED));
+ writer.AddDocument(doc);
+ writer.Optimize();
+ writer.Close();
+
+ IndexSearcher searcher = new IndexSearcher(directory);
+ SpanRegexQuery srq = new SpanRegexQuery(new Term("field", "q.[aeiou]c.*"));
+ SpanTermQuery stq = new SpanTermQuery(new Term("field", "dog"));
+ SpanNearQuery query = new SpanNearQuery(new SpanQuery[]{srq, stq}, 6, true);
+ Hits hits = searcher.Search(query);
+ Assert.AreEqual(1, hits.Length());
+ }
+ }
+}
\ No newline at end of file
Modified: incubator/lucene.net/trunk/C#/src/Test/Search/SampleComparable.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/SampleComparable.cs?rev=411501&r1=411500&r2=411501&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/SampleComparable.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/SampleComparable.cs Sat Jun 3 19:41:13 2006
@@ -13,11 +13,13 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+
using System;
using IndexReader = Lucene.Net.Index.IndexReader;
using Term = Lucene.Net.Index.Term;
using TermDocs = Lucene.Net.Index.TermDocs;
using TermEnum = Lucene.Net.Index.TermEnum;
+
namespace Lucene.Net.Search
{
@@ -40,7 +42,7 @@
/// </summary>
/// <author> Tim Jones
/// </author>
- /// <version> $Id: SampleComparable.java,v 1.3 2004/05/19 23:05:27 tjones Exp $
+ /// <version> $Id: SampleComparable.java 150348 2004-05-19 23:05:27Z tjones $
/// </version>
/// <since> 1.4
/// </since>
@@ -52,20 +54,20 @@
{
private class AnonymousClassScoreDocComparator : ScoreDocComparator
{
- public AnonymousClassScoreDocComparator(Lucene.Net.Index.IndexReader reader, Lucene.Net.Index.TermEnum enumerator, System.String field, AnonymousClassSortComparatorSource enclosingInstance)
+ public AnonymousClassScoreDocComparator(IndexReader reader, TermEnum enumerator, System.String field, AnonymousClassSortComparatorSource enclosingInstance)
{
InitBlock(reader, enumerator, field, enclosingInstance);
}
- private void InitBlock(Lucene.Net.Index.IndexReader reader, Lucene.Net.Index.TermEnum enumerator, System.String field, AnonymousClassSortComparatorSource enclosingInstance)
+ private void InitBlock(IndexReader reader, TermEnum enumerator, System.String field, AnonymousClassSortComparatorSource enclosingInstance)
{
this.reader = reader;
this.enumerator = enumerator;
this.field = field;
this.enclosingInstance = enclosingInstance;
- cachedValues = Enclosing_Instance.FillCache(reader, enumerator, field);
+ cachedValues = FillCache(reader, enumerator, field);
}
- private Lucene.Net.Index.IndexReader reader;
- private Lucene.Net.Index.TermEnum enumerator;
+ private IndexReader reader;
+ private TermEnum enumerator;
private System.String field;
private AnonymousClassSortComparatorSource enclosingInstance;
public AnonymousClassSortComparatorSource Enclosing_Instance
@@ -76,7 +78,8 @@
}
}
- protected internal System.IComparable[] cachedValues;
+
+ protected internal System.IComparable[] cachedValues;
public virtual int Compare(ScoreDoc i, ScoreDoc j)
{
@@ -108,19 +111,19 @@
}
/// <summary> Returns an array of objects which represent that natural order
- /// of the term values in the given Field.
+ /// of the term values in the given field.
///
/// </summary>
/// <param name="reader"> Terms are in this index.
/// </param>
/// <param name="enumerator">Use this to get the term values and TermDocs.
/// </param>
- /// <param name="fieldname"> Comparables should be for this Field.
+ /// <param name="fieldname"> Comparables should be for this field.
/// </param>
- /// <returns> Array of objects representing natural order of terms in Field.
+ /// <returns> Array of objects representing natural order of terms in field.
/// </returns>
/// <throws> IOException If an error occurs reading the index. </throws>
- protected internal virtual System.IComparable[] FillCache(IndexReader reader, TermEnum enumerator, System.String fieldname)
+ public static System.IComparable[] FillCache(IndexReader reader, TermEnum enumerator, System.String fieldname)
{
System.String field = String.Intern(fieldname);
System.IComparable[] retArray = new System.IComparable[reader.MaxDoc()];
@@ -131,12 +134,12 @@
{
if (enumerator.Term() == null)
{
- throw new System.SystemException("no terms in Field " + field);
+ throw new System.SystemException("no terms in field " + field);
}
do
{
Term term = enumerator.Term();
- if ((System.Object) term.Field() != (System.Object) field)
+ if (term.Field() != field)
break;
System.IComparable termval = GetComparable(term.Text());
termDocs.Seek(enumerator);
@@ -155,7 +158,7 @@
return retArray;
}
- internal virtual System.IComparable GetComparable(System.String termtext)
+ internal static System.IComparable GetComparable(System.String termtext)
{
return new SampleComparable(termtext);
}
@@ -163,27 +166,11 @@
[Serializable]
private class AnonymousClassSortComparator : SortComparator
{
- public /*protected internal*/ override System.IComparable GetComparable(System.String termtext)
+ public override System.IComparable GetComparable(System.String termtext)
{
return new SampleComparable(termtext);
}
}
- public static SortComparatorSource ComparatorSource
- {
- get
- {
- return new AnonymousClassSortComparatorSource();
- }
-
- }
- public static SortComparator Comparator
- {
- get
- {
- return new AnonymousClassSortComparator();
- }
-
- }
internal System.String string_part;
internal System.Int32 int_part;
@@ -204,6 +191,16 @@
return int_part.CompareTo(otherid.int_part);
}
return i;
+ }
+
+ public static SortComparatorSource GetComparatorSource()
+ {
+ return new AnonymousClassSortComparatorSource();
+ }
+
+ public static SortComparator GetComparator()
+ {
+ return new AnonymousClassSortComparator();
}
}
}