You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by ar...@apache.org on 2008/07/15 23:44:10 UTC
svn commit: r677059 [17/19] - in /incubator/lucene.net/trunk/C#/src: ./
Demo/DeleteFiles/ Demo/DemoLib/ Demo/IndexFiles/ Demo/IndexHtml/
Demo/SearchFiles/ Lucene.Net/ Lucene.Net/Analysis/ Lucene.Net/Index/
Lucene.Net/Search/ Lucene.Net/Search/Function/...
Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestSort.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestSort.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestSort.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestSort.cs Tue Jul 15 14:44:04 2008
@@ -19,11 +19,14 @@
using NUnit.Framework;
-using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using Lucene.Net.Index;
-using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
using Document = Lucene.Net.Documents.Document;
using Field = Lucene.Net.Documents.Field;
+using IndexReader = Lucene.Net.Index.IndexReader;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using Term = Lucene.Net.Index.Term;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
+
using Pattern = System.Text.RegularExpressions.Regex;
namespace Lucene.Net.Search
@@ -38,12 +41,12 @@
/// </author>
/// <since> lucene 1.4
/// </since>
- /// <version> $Id: TestSort.java 332651 2005-11-11 21:19:02Z yonik $
+ /// <version> $Id: TestSort.java 590530 2007-10-31 01:28:25Z gsingers $
/// </version>
[Serializable]
[TestFixture]
- public class TestSort
+ public class TestSort
{
[Serializable]
private class AnonymousClassFilter : Filter
@@ -58,10 +61,10 @@
this.enclosingInstance = enclosingInstance;
}
- private Lucene.Net.Search.TopDocs docs1;
+ private Lucene.Net.Search.TopDocs docs1;
private TestSort enclosingInstance;
- public TestSort Enclosing_Instance
+ public TestSort Enclosing_Instance
{
get
{
@@ -70,7 +73,7 @@
}
- public override System.Collections.BitArray Bits(IndexReader reader)
+ public override System.Collections.BitArray Bits(IndexReader reader)
{
System.Collections.BitArray bs = new System.Collections.BitArray((reader.MaxDoc() % 64 == 0?reader.MaxDoc() / 64:reader.MaxDoc() / 64 + 1) * 64);
bs.Set(docs1.scoreDocs[0].doc, true);
@@ -88,66 +91,68 @@
private Query queryF;
private Query queryG;
private Sort sort;
+
+ private bool serverStarted = false;
+ private int port;
+ //public TestSort(System.String name) : base(name)
+ //{
+ //}
[STAThread]
public static void Main(System.String[] argv)
{
- System.Runtime.Remoting.RemotingConfiguration.Configure("Lucene.Net.Search.TestSort.config");
- System.Runtime.Remoting.Channels.ChannelServices.RegisterChannel(new System.Runtime.Remoting.Channels.Http.HttpChannel(8080));
- if (argv == null || argv.Length < 1)
- {
- // NUnit.Core.TestRunner.Run(Suite()); // {{Aroush-1.9}} where is "Run" in NUnit?
- }
- else if ("server".Equals(argv[0]))
- {
- TestSort test = new TestSort();
- try
- {
- test.StartServer();
- System.Threading.Thread.Sleep(new System.TimeSpan((System.Int64) 10000 * 500000));
- }
- catch (System.Exception e)
- {
- System.Console.Out.WriteLine(e);
- System.Console.Error.WriteLine(e.StackTrace);
- }
- }
+ System.Runtime.Remoting.RemotingConfiguration.Configure("Lucene.Net.Search.TestSort.config", false);
+ System.Runtime.Remoting.Channels.ChannelServices.RegisterChannel(new System.Runtime.Remoting.Channels.Http.HttpChannel(8080), false);
+ if (argv == null || argv.Length < 1)
+ {
+ // NUnit.Core.TestRunner.Run(Suite()); // {{Aroush-1.9}} where is "Run" in NUnit?
+ }
+ else if ("server".Equals(argv[0]))
+ {
+ TestSort test = new TestSort();
+ try
+ {
+ test.StartServer();
+ System.Threading.Thread.Sleep(new System.TimeSpan((System.Int64) 10000 * 500000));
+ }
+ catch (System.Exception e)
+ {
+ System.Console.Out.WriteLine(e);
+ System.Console.Error.WriteLine(e.StackTrace);
+ }
+ }
- System.Console.ReadLine();
- }
-
- public static NUnit.Framework.TestCase Suite()
- {
- return null; // return new NUnit.Core.TestSuite(typeof(TestSort)); {{Aroush-1.9}} how do you do this in NUnit?
+ System.Console.ReadLine();
}
static string NDS = System.Globalization.CultureInfo.CurrentCulture.NumberFormat.NumberDecimalSeparator;
- // document data:
- // the tracer field is used to determine which document was hit
- // the contents field is used to search and sort by relevance
- // the int field to sort by int
- // the float field to sort by float
- // the string field to sort by string
- // the i18n field includes accented characters for testing locale-specific sorting
- private System.String[][] data = new System.String[][]{
- // tracer contents int float string custom i18n
- new System.String[]{ "A", "x a", "5", "4f", "c", "A-3", "p\u00EAche"},
- new System.String[]{ "B", "y a", "5", "3" + NDS + "4028235E38", "i", "B-10", "HAT"},
- new System.String[]{ "C", "x a b c", "2147483647", "1" + NDS + "0", "j", "A-2", "p\u00E9ch\u00E9"},
- new System.String[]{ "D", "y a b c", "-1", "0" + NDS + "0f", "a", "C-0", "HUT"},
- new System.String[]{ "E", "x a b c d", "5", "2f", "h", "B-8", "peach"},
- new System.String[]{ "F", "y a b c d", "2", "3" + NDS + "14159f", "g", "B-1", "H\u00C5T"},
- new System.String[]{ "G", "x a b c d", "3", "-1" + NDS + "0", "f", "C-100", "sin"},
- new System.String[]{ "H", "y a b c d", "0", "1" + NDS + "4E-45", "e", "C-88", "H\u00D8T"},
- new System.String[]{ "I", "x a b c d e f", "-2147483648", "1" + NDS + "0e+0", "d", "A-10", "s\u00EDn"},
- new System.String[]{ "J", "y a b c d e f", "4", "" + NDS + "5", "b", "C-7", "HOT"},
- new System.String[]{ "W", "g", "1", null, null, null, null},
- new System.String[]{ "X", "g", "1", "0" + NDS + "1", null, null, null},
- new System.String[]{ "Y", "g", "1", "0" + NDS + "2", null, null, null},
- new System.String[]{ "Z", "f g", null, null, null, null, null}};
-
+ // document data:
+ // the tracer field is used to determine which document was hit
+ // the contents field is used to search and sort by relevance
+ // the int field to sort by int
+ // the float field to sort by float
+ // the string field to sort by string
+ // the i18n field includes accented characters for testing locale-specific sorting
+ private System.String[][] data = new System.String[][]{
+ // tracer contents int float string custom i18n long double
+ new System.String[]{ "A", "x a", "5", "4f", "c", "A-3", "p\u00EAche", "10", "-4" + NDS + "0"},
+ new System.String[]{ "B", "y a", "5", "3" + NDS + "4028235E38", "i", "B-10", "HAT", "1000000000", "40" + NDS + "0"},
+ new System.String[]{ "C", "x a b c", "2147483647", "1" + NDS + "0", "j", "A-2", "p\u00E9ch\u00E9", "99999999", "40" + NDS + "00002343"},
+ //new System.String[]{ "D", "y a b c", "-1", "0" + NDS + "0f", "a", "C-0", "HUT", System.Convert.ToString(System.Int64.MaxValue), System.Convert.ToString(System.Double.MinValue)},
+ //new System.String[]{ "E", "x a b c d", "5", "2f", "h", "B-8", "peach", System.Convert.ToString(System.Int64.MinValue), System.Convert.ToString(System.Double.MaxValue)},
+ new System.String[]{ "D", "y a b c", "-1", "0" + NDS + "0f", "a", "C-0", "HUT", System.Convert.ToString(System.Int64.MaxValue), System.Double.MinValue.ToString("R")},
+ new System.String[]{ "E", "x a b c d", "5", "2f", "h", "B-8", "peach", System.Convert.ToString(System.Int64.MinValue), System.Double.MaxValue.ToString("R")},
+ new System.String[]{ "F", "y a b c d", "2", "3" + NDS + "14159f", "g", "B-1", "H\u00C5T", "-44", "343" + NDS + "034435444"},
+ new System.String[]{ "G", "x a b c d", "3", "-1" + NDS + "0", "f", "C-100", "sin", "323254543543", "4" + NDS + "043544"},
+ new System.String[]{ "H", "y a b c d", "0", "1" + NDS + "4E-45", "e", "C-88", "H\u00D8T", "1023423423005", "4" + NDS + "043545"},
+ new System.String[]{ "I", "x a b c d e f", "-2147483648", "1" + NDS + "0e+0", "d", "A-10", "s\u00EDn", "332422459999", "4" + NDS + "043546"},
+ new System.String[]{ "J", "y a b c d e f", "4", "" + NDS + "5", "b", "C-7", "HOT", "34334543543", "4" + NDS + "0000220343"},
+ new System.String[]{ "W", "g", "1", null, null, null, null, null, null},
+ new System.String[]{ "X", "g", "1", "0" + NDS + "1", null, null, null, null, null},
+ new System.String[]{ "Y", "g", "1", "0" + NDS + "2", null, null, null, null, null},
+ new System.String[]{ "Z", "f g", null, null, null, null, null, null, null}};
// create an index of all the documents, or just the x, or just the y documents
private Searcher GetIndex(bool even, bool odd)
@@ -169,9 +174,13 @@
doc.Add(new Field("string", data[i][4], Field.Store.NO, Field.Index.UN_TOKENIZED));
if (data[i][5] != null)
doc.Add(new Field("custom", data[i][5], Field.Store.NO, Field.Index.UN_TOKENIZED));
- if (data[i][6] != null)
- doc.Add(new Field("i18n", data[i][6], Field.Store.NO, Field.Index.UN_TOKENIZED));
- doc.SetBoost(2); // produce some scores above 1.0
+ if (data[i][6] != null)
+ doc.Add(new Field("i18n", data[i][6], Field.Store.NO, Field.Index.UN_TOKENIZED));
+ if (data[i][7] != null)
+ doc.Add(new Field("long", data[i][7], Field.Store.NO, Field.Index.UN_TOKENIZED));
+ if (data[i][8] != null)
+ doc.Add(new Field("double", data[i][8], Field.Store.NO, Field.Index.UN_TOKENIZED));
+ doc.SetBoost(2); // produce some scores above 1.0
writer.AddDocument(doc);
}
}
@@ -201,7 +210,7 @@
}
[SetUp]
- public virtual void SetUp()
+ public virtual void SetUp()
{
full = GetFullIndex();
searchX = GetXIndex();
@@ -214,12 +223,13 @@
queryG = new TermQuery(new Term("contents", "g"));
sort = new Sort();
- StartServer();
+ if (!serverStarted)
+ StartServer();
}
// test the sorts by score and document number
[Test]
- public virtual void TestBuiltInSorts()
+ public virtual void TestBuiltInSorts()
{
sort = new Sort();
AssertMatches(full, queryX, sort, "ACEGI");
@@ -232,7 +242,7 @@
// test sorts where the type of field is specified
[Test]
- public virtual void TestTypedSort()
+ public virtual void TestTypedSort()
{
sort.SetSort(new SortField[]{new SortField("int", SortField.INT), SortField.FIELD_DOC});
AssertMatches(full, queryX, sort, "IGAEC");
@@ -245,11 +255,19 @@
sort.SetSort(new SortField[]{new SortField("string", SortField.STRING), SortField.FIELD_DOC});
AssertMatches(full, queryX, sort, "AIGEC");
AssertMatches(full, queryY, sort, "DJHFB");
- }
+
+ sort.SetSort(new SortField[]{new SortField("double", SortField.DOUBLE), SortField.FIELD_DOC});
+ AssertMatches(full, queryX, sort, "AGICE");
+ AssertMatches(full, queryY, sort, "DJHBF");
+
+ sort.SetSort(new SortField[]{new SortField("string", SortField.STRING), SortField.FIELD_DOC});
+ AssertMatches(full, queryX, sort, "AIGEC");
+ AssertMatches(full, queryY, sort, "DJHFB");
+ }
// test sorts when there's nothing in the index
[Test]
- public virtual void TestEmptyIndex()
+ public virtual void TestEmptyIndex()
{
Searcher empty = GetEmptyIndex();
@@ -271,7 +289,7 @@
// test sorts where the type of field is determined dynamically
[Test]
- public virtual void TestAutoSort()
+ public virtual void TestAutoSort()
{
sort.SetSort("int");
AssertMatches(full, queryX, sort, "IGAEC");
@@ -288,7 +306,7 @@
// test sorts in reverse
[Test]
- public virtual void TestReverseSort()
+ public virtual void TestReverseSort()
{
sort.SetSort(new SortField[]{new SortField(null, SortField.SCORE, true), SortField.FIELD_DOC});
AssertMatches(full, queryX, sort, "IEGCA");
@@ -313,7 +331,7 @@
// test sorting when the sort field is empty (undefined) for some of the documents
[Test]
- public virtual void TestEmptyFieldSort()
+ public virtual void TestEmptyFieldSort()
{
sort.SetSort("string");
AssertMatches(full, queryF, sort, "ZJI");
@@ -321,13 +339,13 @@
sort.SetSort("string", true);
AssertMatches(full, queryF, sort, "IJZ");
- sort.SetSort(new SortField("i18n", new System.Globalization.CultureInfo("en")));
- AssertMatches(full, queryF, sort, "ZJI");
+ sort.SetSort(new SortField("i18n", new System.Globalization.CultureInfo("en")));
+ AssertMatches(full, queryF, sort, "ZJI");
- sort.SetSort(new SortField("i18n", new System.Globalization.CultureInfo("en"), true));
- AssertMatches(full, queryF, sort, "IJZ");
+ sort.SetSort(new SortField("i18n", new System.Globalization.CultureInfo("en"), true));
+ AssertMatches(full, queryF, sort, "IJZ");
- sort.SetSort("int");
+ sort.SetSort("int");
AssertMatches(full, queryF, sort, "IZJ");
sort.SetSort("int", true);
@@ -375,7 +393,7 @@
// test sorts using a series of fields
[Test]
- public virtual void TestSortCombos()
+ public virtual void TestSortCombos()
{
sort.SetSort(new System.String[]{"int", "float"});
AssertMatches(full, queryX, sort, "IGEAC");
@@ -389,7 +407,7 @@
// test using a Locale for sorting strings
[Test]
- public virtual void TestLocaleSort()
+ public virtual void TestLocaleSort()
{
sort.SetSort(new SortField[]{new SortField("string", new System.Globalization.CultureInfo("en-US"))});
AssertMatches(full, queryX, sort, "AIGEC");
@@ -400,47 +418,47 @@
AssertMatches(full, queryY, sort, "BFHJD");
}
- // test using various international locales with accented characters
- // (which sort differently depending on locale)
- [Test]
- public virtual void TestInternationalSort()
- {
- sort.SetSort(new SortField("i18n", new System.Globalization.CultureInfo("en-US")));
- AssertMatches(full, queryY, sort, "BFJHD"); // NOTE: this is "BFJDH" in Java's version
-
- sort.SetSort(new SortField("i18n", new System.Globalization.CultureInfo("sv" + "-" + "se")));
- AssertMatches(full, queryY, sort, "BJDFH");
-
- sort.SetSort(new SortField("i18n", new System.Globalization.CultureInfo("da" + "-" + "dk")));
- AssertMatches(full, queryY, sort, "BJDHF");
-
- sort.SetSort(new SortField("i18n", new System.Globalization.CultureInfo("en-US")));
- AssertMatches(full, queryX, sort, "ECAGI");
-
- sort.SetSort(new SortField("i18n", new System.Globalization.CultureInfo("fr-FR")));
- AssertMatches(full, queryX, sort, "EACGI");
- }
-
- // Test the MultiSearcher's ability to preserve locale-sensitive ordering
- // by wrapping it around a single searcher
- [Test]
- public virtual void TestInternationalMultiSearcherSort()
- {
- Searcher multiSearcher = new MultiSearcher(new Lucene.Net.Search.Searchable[]{full});
-
- sort.SetSort(new SortField("i18n", new System.Globalization.CultureInfo("sv" + "-" + "se")));
- AssertMatches(multiSearcher, queryY, sort, "BJDFH");
-
- sort.SetSort(new SortField("i18n", new System.Globalization.CultureInfo("en-US")));
- AssertMatches(multiSearcher, queryY, sort, "BFJHD"); // NOTE: this is "BFJDH" in Java's version
-
- sort.SetSort(new SortField("i18n", new System.Globalization.CultureInfo("da" + "-" + "dk")));
- AssertMatches(multiSearcher, queryY, sort, "BJDHF");
- }
+ // test using various international locales with accented characters
+ // (which sort differently depending on locale)
+ [Test]
+ public virtual void TestInternationalSort()
+ {
+ sort.SetSort(new SortField("i18n", new System.Globalization.CultureInfo("en-US")));
+ AssertMatches(full, queryY, sort, "BFJHD"); // NOTE: this is "BFJDH" in Java's version
+
+ sort.SetSort(new SortField("i18n", new System.Globalization.CultureInfo("sv" + "-" + "se")));
+ AssertMatches(full, queryY, sort, "BJDFH");
+
+ sort.SetSort(new SortField("i18n", new System.Globalization.CultureInfo("da" + "-" + "dk")));
+ AssertMatches(full, queryY, sort, "BJDHF");
+
+ sort.SetSort(new SortField("i18n", new System.Globalization.CultureInfo("en-US")));
+ AssertMatches(full, queryX, sort, "ECAGI");
+
+ sort.SetSort(new SortField("i18n", new System.Globalization.CultureInfo("fr-FR")));
+ AssertMatches(full, queryX, sort, "EACGI");
+ }
- // test a custom sort function
+ // Test the MultiSearcher's ability to preserve locale-sensitive ordering
+ // by wrapping it around a single searcher
[Test]
- public virtual void TestCustomSorts()
+ public virtual void TestInternationalMultiSearcherSort()
+ {
+ Searcher multiSearcher = new MultiSearcher(new Lucene.Net.Search.Searchable[]{full});
+
+ sort.SetSort(new SortField("i18n", new System.Globalization.CultureInfo("sv" + "-" + "se")));
+ AssertMatches(multiSearcher, queryY, sort, "BJDFH");
+
+ sort.SetSort(new SortField("i18n", new System.Globalization.CultureInfo("en-US")));
+ AssertMatches(multiSearcher, queryY, sort, "BFJHD"); // NOTE: this is "BFJDH" in Java's version
+
+ sort.SetSort(new SortField("i18n", new System.Globalization.CultureInfo("da" + "-" + "dk")));
+ AssertMatches(multiSearcher, queryY, sort, "BJDHF");
+ }
+
+ // test a custom sort function
+ [Test]
+ public virtual void TestCustomSorts()
{
sort.SetSort(new SortField("custom", SampleComparable.GetComparatorSource()));
AssertMatches(full, queryX, sort, "CAIEG");
@@ -455,7 +473,7 @@
// test a variety of sorts using more than one searcher
[Test]
- public virtual void TestMultiSort()
+ public virtual void TestMultiSort()
{
MultiSearcher searcher = new MultiSearcher(new Lucene.Net.Search.Searchable[]{searchX, searchY});
RunMultiSorts(searcher);
@@ -463,7 +481,7 @@
// test a variety of sorts using a parallel multisearcher
[Test]
- public virtual void TestParallelMultiSort()
+ public virtual void TestParallelMultiSort()
{
Searcher searcher = new ParallelMultiSearcher(new Lucene.Net.Search.Searchable[]{searchX, searchY});
RunMultiSorts(searcher);
@@ -471,7 +489,7 @@
// test a variety of sorts using a remote searcher
[Test]
- public virtual void TestRemoteSort()
+ public virtual void TestRemoteSort()
{
Lucene.Net.Search.Searchable searcher = GetRemote();
MultiSearcher multi = new MultiSearcher(new Lucene.Net.Search.Searchable[]{searcher});
@@ -480,7 +498,7 @@
// test custom search when remote
[Test]
- public virtual void TestRemoteCustomSort()
+ public virtual void TestRemoteCustomSort()
{
Lucene.Net.Search.Searchable searcher = GetRemote();
MultiSearcher multi = new MultiSearcher(new Lucene.Net.Search.Searchable[]{searcher});
@@ -498,7 +516,7 @@
// test that the relevancy scores are the same even if
// hits are sorted
[Test]
- public virtual void TestNormalizedScores()
+ public virtual void TestNormalizedScores()
{
// capture relevancy scores
@@ -601,7 +619,7 @@
AssertSameValues(scoresA, GetScores(multi.Search(queryA, sort)));
}
- [Test]
+ [Test]
public virtual void TestTopDocsScores()
{
@@ -718,7 +736,7 @@
}
}
// System.out.println ("matching \""+buff+"\" against pattern \""+pattern+"\"");
- Pattern p = new System.Text.RegularExpressions.Regex(pattern);
+ Pattern p = new System.Text.RegularExpressions.Regex(pattern);
Assert.IsTrue(p.Match(buff.ToString()).Success);
}
@@ -761,33 +779,84 @@
private Lucene.Net.Search.Searchable GetRemote()
{
- return LookupRemote();
+ return LookupRemote();
}
-
+
private Lucene.Net.Search.Searchable LookupRemote()
{
- return (Lucene.Net.Search.Searchable) Activator.GetObject(typeof(Lucene.Net.Search.Searchable), @"http://localhost:1099/SortedSearchable");
+ return (Lucene.Net.Search.Searchable)Activator.GetObject(typeof(Lucene.Net.Search.Searchable), string.Format("http://localhost:{0}/SortedSearchable", port));
}
-
- public void StartServer()
+
+ private System.Runtime.Remoting.Channels.Http.HttpChannel GetHttpChannel()
{
- try
- {
- System.Runtime.Remoting.Channels.ChannelServices.RegisterChannel(new System.Runtime.Remoting.Channels.Http.HttpChannel(1099));
- }
- catch (System.Net.Sockets.SocketException ex)
- {
- if (ex.ErrorCode == 10048) return; // EADDRINUSE?
- throw ex;
- }
+ Random rnd = new Random();
+ port = rnd.Next(1099, 9999);
+ System.Runtime.Remoting.Channels.Http.HttpChannel ch = new System.Runtime.Remoting.Channels.Http.HttpChannel(port);
+ return ch;
+ }
+
+ public void StartServer()
+ {
+ System.Runtime.Remoting.Channels.Http.HttpChannel httpChannel = null;
+
+ while (httpChannel == null)
+ {
+ try
+ {
+ httpChannel = GetHttpChannel();
+ }
+ catch (System.Net.Sockets.SocketException ex)
+ {
+ if (ex.ErrorCode != 10048)
+ throw ex;
+ }
+ }
+
+ try
+ {
+ System.Runtime.Remoting.Channels.ChannelServices.RegisterChannel(httpChannel, false);
+ }
+ catch (System.Net.Sockets.SocketException ex)
+ {
+ if (ex.ErrorCode == 10048) return; // EADDRINUSE?
+ throw ex;
+ }
// construct an index
Searcher local = GetFullIndex();
// local.search (queryA, new Sort());
-
+
// publish it
- RemoteSearchable impl = new RemoteSearchable(local);
- System.Runtime.Remoting.RemotingServices.Marshal(impl, "SortedSearchable");
- }
+ RemoteSearchable impl = new RemoteSearchable(local);
+ System.Runtime.Remoting.RemotingServices.Marshal(impl, "SortedSearchable");
+ serverStarted = true;
+ }
+
+
+ //private Lucene.Net.Search.Searchable LookupRemote()
+ //{
+ // return (Lucene.Net.Search.Searchable) Activator.GetObject(typeof(Lucene.Net.Search.Searchable), @"http://localhost:1099/SortedSearchable");
+ //}
+
+ //public void StartServer()
+ //{
+ // try
+ // {
+ // System.Runtime.Remoting.Channels.ChannelServices.RegisterChannel(new System.Runtime.Remoting.Channels.Http.HttpChannel(1099), false);
+ // }
+ // catch (System.Net.Sockets.SocketException ex)
+ // {
+ // if (ex.ErrorCode == 10048) return; // EADDRINUSE?
+ // throw ex;
+ // }
+
+ // // construct an index
+ // Searcher local = GetFullIndex();
+ // // local.search (queryA, new Sort());
+
+ // // publish it
+ // RemoteSearchable impl = new RemoteSearchable(local);
+ // System.Runtime.Remoting.RemotingServices.Marshal(impl, "SortedSearchable");
+ //}
}
}
\ No newline at end of file
Added: incubator/lucene.net/trunk/C#/src/Test/Search/TestSpanQueryFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestSpanQueryFilter.cs?rev=677059&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestSpanQueryFilter.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestSpanQueryFilter.cs Tue Jul 15 14:44:04 2008
@@ -0,0 +1,82 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using IndexReader = Lucene.Net.Index.IndexReader;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using Term = Lucene.Net.Index.Term;
+using Directory = Lucene.Net.Store.Directory;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
+using SpanTermQuery = Lucene.Net.Search.Spans.SpanTermQuery;
+using English = Lucene.Net.Util.English;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+namespace Lucene.Net.Search
+{
+
+ [TestFixture]
+ public class TestSpanQueryFilter : LuceneTestCase
+ {
+
+ [Test]
+ public virtual void TestFilterWorks()
+ {
+ Directory dir = new RAMDirectory();
+ IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(), true);
+ for (int i = 0; i < 500; i++)
+ {
+ Document document = new Document();
+ document.Add(new Field("field", English.IntToEnglish(i) + " equals " + English.IntToEnglish(i), Field.Store.NO, Field.Index.TOKENIZED));
+ writer.AddDocument(document);
+ }
+ writer.Close();
+
+ IndexReader reader = IndexReader.Open(dir);
+
+ SpanTermQuery query = new SpanTermQuery(new Term("field", English.IntToEnglish(10).Trim()));
+ SpanQueryFilter filter = new SpanQueryFilter(query);
+ SpanFilterResult result = filter.BitSpans(reader);
+ System.Collections.BitArray bits = result.GetBits();
+ Assert.IsTrue(bits != null, "bits is null and it shouldn't be");
+ Assert.IsTrue(bits.Get(10), "tenth bit is not on");
+ System.Collections.IList spans = result.GetPositions();
+ Assert.IsTrue(spans != null, "spans is null and it shouldn't be");
+ int cardinality = 0;
+ for (int i = 0; i < bits.Count; i++)
+ {
+ if (bits.Get(i)) cardinality++;
+ }
+ Assert.IsTrue(spans.Count == cardinality, "spans Size: " + spans.Count + " is not: " + cardinality);
+ for (System.Collections.IEnumerator iterator = spans.GetEnumerator(); iterator.MoveNext(); )
+ {
+ SpanFilterResult.PositionInfo info = (SpanFilterResult.PositionInfo) iterator.Current;
+ Assert.IsTrue(info != null, "info is null and it shouldn't be");
+ //The doc should indicate the bit is on
+ Assert.IsTrue(bits.Get(info.GetDoc()), "Bit is not on and it should be");
+ //There should be two positions in each
+ Assert.IsTrue(info.GetPositions().Count == 2, "info.getPositions() Size: " + info.GetPositions().Count + " is not: " + 2);
+ }
+ reader.Close();
+ }
+ }
+}
\ No newline at end of file
Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestTermScorer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestTermScorer.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestTermScorer.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestTermScorer.cs Tue Jul 15 14:44:04 2008
@@ -19,19 +19,20 @@
using NUnit.Framework;
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
using Document = Lucene.Net.Documents.Document;
using Field = Lucene.Net.Documents.Field;
using IndexReader = Lucene.Net.Index.IndexReader;
using IndexWriter = Lucene.Net.Index.IndexWriter;
using Term = Lucene.Net.Index.Term;
using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
namespace Lucene.Net.Search
{
- [TestFixture]
- public class TestTermScorer
+ [TestFixture]
+ public class TestTermScorer : LuceneTestCase
{
private class AnonymousClassHitCollector : HitCollector
{
@@ -68,10 +69,15 @@
protected internal IndexSearcher indexSearcher;
protected internal IndexReader indexReader;
-
+
+ //public TestTermScorer(System.String s) : base(s)
+ //{
+ //}
+
[SetUp]
- public virtual void SetUp()
+ public override void SetUp()
{
+ base.SetUp();
directory = new RAMDirectory();
@@ -87,14 +93,8 @@
indexReader = indexSearcher.GetIndexReader();
}
- [TearDown]
- public virtual void TearDown()
- {
-
- }
-
[Test]
- public virtual void Test()
+ public virtual void Test()
{
Term allTerm = new Term(FIELD, "all");
@@ -135,7 +135,7 @@
Assert.IsTrue(doc0.score == 1.6931472f, doc0.score + " does not equal: " + 1.6931472f);
}
- [Test]
+ [Test]
public virtual void TestNext()
{
@@ -153,7 +153,7 @@
Assert.IsTrue(ts.Next() == false, "next returned a doc and it should not have");
}
- [Test]
+ [Test]
public virtual void TestSkipTo()
{
@@ -169,7 +169,7 @@
Assert.IsTrue(ts.Doc() == 5, "doc should be number 5");
}
- [Test]
+ [Test]
public virtual void TestExplain()
{
Term allTerm = new Term(FIELD, "all");
Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestTermVectors.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestTermVectors.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestTermVectors.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestTermVectors.cs Tue Jul 15 14:44:04 2008
@@ -19,26 +19,30 @@
using NUnit.Framework;
-using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
using Document = Lucene.Net.Documents.Document;
using Field = Lucene.Net.Documents.Field;
using Lucene.Net.Index;
using Directory = Lucene.Net.Store.Directory;
using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
using English = Lucene.Net.Util.English;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
namespace Lucene.Net.Search
{
[TestFixture]
- public class TestTermVectors
+ public class TestTermVectors : LuceneTestCase
{
private IndexSearcher searcher;
private RAMDirectory directory = new RAMDirectory();
-
+ //public TestTermVectors(System.String s) : base(s)
+ //{
+ //}
[SetUp]
- public virtual void SetUp()
+ public override void SetUp()
{
+ base.SetUp();
IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(), true);
//writer.setUseCompoundFile(true);
//writer.infoStream = System.out;
@@ -71,20 +75,14 @@
searcher = new IndexSearcher(directory);
}
- [TearDown]
- public virtual void TearDown()
- {
-
- }
-
[Test]
- public virtual void Test()
+ public virtual void Test()
{
Assert.IsTrue(searcher != null);
}
[Test]
- public virtual void TestTermVectors_Renamed_Method()
+ public virtual void TestTermVectors_Renamed_Method()
{
Query query = new TermQuery(new Term("field", "seventy"));
try
@@ -99,14 +97,14 @@
Assert.IsTrue(vector.Length == 1);
}
}
- catch (System.IO.IOException e)
+ catch (System.IO.IOException)
{
Assert.IsTrue(false);
}
}
[Test]
- public virtual void TestTermPositionVectors()
+ public virtual void TestTermPositionVectors()
{
Query query = new TermQuery(new Term("field", "zero"));
try
@@ -161,7 +159,7 @@
TermPositionVector posVec = (TermPositionVector) vector[0];
Assert.IsTrue(false);
}
- catch (System.InvalidCastException ignore)
+ catch (System.InvalidCastException)
{
TermFreqVector freqVec = vector[0];
System.String[] terms = freqVec.GetTerms();
@@ -170,14 +168,14 @@
}
}
}
- catch (System.IO.IOException e)
+ catch (System.IO.IOException)
{
Assert.IsTrue(false);
}
}
[Test]
- public virtual void TestTermOffsetVectors()
+ public virtual void TestTermOffsetVectors()
{
Query query = new TermQuery(new Term("field", "fifty"));
try
@@ -194,14 +192,14 @@
//Assert.IsTrue();
}
}
- catch (System.IO.IOException e)
+ catch (System.IO.IOException)
{
Assert.IsTrue(false);
}
}
[Test]
- public virtual void TestKnownSetOfDocuments()
+ public virtual void TestKnownSetOfDocuments()
{
System.String test1 = "eating chocolate in a computer lab"; //6 terms
System.String test2 = "computer in a computer lab"; //5 terms
@@ -303,22 +301,47 @@
//System.out.println("Term: " + term);
int freq = freqs2[i];
Assert.IsTrue(test4.IndexOf(term) != - 1);
- System.Int32 freqInt = -1;
- try
- {
- freqInt = (System.Int32) test4Map[term];
- }
- catch (Exception)
- {
- Assert.IsTrue(false);
- }
- Assert.IsTrue(freqInt == freq);
+ System.Int32 freqInt = -1;
+ try
+ {
+ freqInt = (System.Int32) test4Map[term];
+ }
+ catch (Exception)
+ {
+ Assert.IsTrue(false);
+ }
+ Assert.IsTrue(freqInt == freq);
+ }
+ SortedTermVectorMapper mapper = new SortedTermVectorMapper(new TermVectorEntryFreqSortedComparator());
+ knownSearcher.Reader.GetTermFreqVector(hits.Id(1), mapper);
+ System.Collections.Generic.SortedDictionary<Object,Object> vectorEntrySet = mapper.GetTermVectorEntrySet();
+ Assert.IsTrue(vectorEntrySet.Count == 10, "mapper.getTermVectorEntrySet() Size: " + vectorEntrySet.Count + " is not: " + 10);
+ TermVectorEntry last = null;
+ for (System.Collections.IEnumerator iterator = vectorEntrySet.Keys.GetEnumerator(); iterator.MoveNext(); )
+ {
+ TermVectorEntry tve = (TermVectorEntry)iterator.Current;
+ if (tve != null && last != null)
+ {
+ Assert.IsTrue(last.GetFrequency() >= tve.GetFrequency(), "terms are not properly sorted");
+ System.Int32 expectedFreq = (System.Int32)test4Map[tve.GetTerm()];
+ //we expect double the expectedFreq, since there are two fields with the exact same text and we are collapsing all fields
+ Assert.IsTrue(tve.GetFrequency() == 2 * expectedFreq, "Frequency is not correct:");
+ }
+ last = tve;
}
+
+ FieldSortedTermVectorMapper fieldMapper = new FieldSortedTermVectorMapper(new TermVectorEntryFreqSortedComparator());
+ knownSearcher.Reader.GetTermFreqVector(hits.Id(1), fieldMapper);
+ System.Collections.IDictionary map = fieldMapper.GetFieldToTerms();
+ Assert.IsTrue(map.Count == 2, "map Size: " + map.Count + " is not: " + 2);
+ vectorEntrySet = (System.Collections.Generic.SortedDictionary<Object,Object>) map["field"];
+ Assert.IsTrue(vectorEntrySet != null, "vectorEntrySet is null and it shouldn't be");
+ Assert.IsTrue(vectorEntrySet.Count == 10, "vectorEntrySet Size: " + vectorEntrySet.Count + " is not: " + 10);
knownSearcher.Close();
}
catch (System.IO.IOException e)
{
- System.Console.Error.WriteLine(e.StackTrace);
+ System.Console.Error.WriteLine(e.StackTrace);
Assert.IsTrue(false);
}
}
@@ -326,7 +349,85 @@
private void SetupDoc(Lucene.Net.Documents.Document doc, System.String text)
{
doc.Add(new Field("field", text, Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.YES));
+ doc.Add(new Field("field2", text, Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
//System.out.println("Document: " + doc);
}
+
+ // Test only a few docs having vectors
+ [Test]
+ public virtual void TestRareVectors()
+ {
+ IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(), true);
+ for (int i = 0; i < 100; i++)
+ {
+ Document doc = new Document();
+ doc.Add(new Field("field", English.IntToEnglish(i), Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.NO));
+ writer.AddDocument(doc);
+ }
+ for (int i = 0; i < 10; i++)
+ {
+ Document doc = new Document();
+ doc.Add(new Field("field", English.IntToEnglish(100 + i), Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
+ writer.AddDocument(doc);
+ }
+
+ writer.Close();
+ searcher = new IndexSearcher(directory);
+
+ Query query = new TermQuery(new Term("field", "hundred"));
+ Hits hits = searcher.Search(query);
+ Assert.AreEqual(10, hits.Length());
+ for (int i = 0; i < hits.Length(); i++)
+ {
+ TermFreqVector[] vector = searcher.Reader.GetTermFreqVectors(hits.Id(i));
+ Assert.IsTrue(vector != null);
+ Assert.IsTrue(vector.Length == 1);
+ }
+ }
+
+
+ // In a single doc, for the same field, mix the term
+ // vectors up
+ [Test]
+ public virtual void TestMixedVectrosVectors()
+ {
+ IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(), true);
+ Document doc = new Document();
+ doc.Add(new Field("field", "one", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.NO));
+ doc.Add(new Field("field", "one", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.YES));
+ doc.Add(new Field("field", "one", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.WITH_POSITIONS));
+ doc.Add(new Field("field", "one", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.WITH_OFFSETS));
+ doc.Add(new Field("field", "one", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
+ writer.AddDocument(doc);
+ writer.Close();
+
+ searcher = new IndexSearcher(directory);
+
+ Query query = new TermQuery(new Term("field", "one"));
+ Hits hits = searcher.Search(query);
+ Assert.AreEqual(1, hits.Length());
+
+ TermFreqVector[] vector = searcher.Reader.GetTermFreqVectors(hits.Id(0));
+ Assert.IsTrue(vector != null);
+ Assert.IsTrue(vector.Length == 1);
+ TermPositionVector tfv = (TermPositionVector) vector[0];
+ Assert.IsTrue(tfv.GetField().Equals("field"));
+ System.String[] terms = tfv.GetTerms();
+ Assert.AreEqual(1, terms.Length);
+ Assert.AreEqual(terms[0], "one");
+ Assert.AreEqual(5, tfv.GetTermFrequencies()[0]);
+
+ int[] positions = tfv.GetTermPositions(0);
+ Assert.AreEqual(5, positions.Length);
+ for (int i = 0; i < 5; i++)
+ Assert.AreEqual(i, positions[i]);
+ TermVectorOffsetInfo[] offsets = tfv.GetOffsets(0);
+ Assert.AreEqual(5, offsets.Length);
+ for (int i = 0; i < 5; i++)
+ {
+ Assert.AreEqual(4 * i, offsets[i].GetStartOffset());
+ Assert.AreEqual(4 * i + 3, offsets[i].GetEndOffset());
+ }
+ }
}
}
\ No newline at end of file
Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestThreadSafe.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestThreadSafe.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestThreadSafe.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestThreadSafe.cs Tue Jul 15 14:44:04 2008
@@ -19,29 +19,29 @@
using NUnit.Framework;
-using Directory = Lucene.Net.Store.Directory;
-using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using Lucene.Net.Documents;
using IndexReader = Lucene.Net.Index.IndexReader;
using IndexWriter = Lucene.Net.Index.IndexWriter;
+using Directory = Lucene.Net.Store.Directory;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
-using Lucene.Net.Documents;
namespace Lucene.Net.Search
{
- /// <author> yonik
- /// </author>
- /// <version> $Id: TestThreadSafe.java 472959 2006-11-09 16:21:50Z yonik $
+ /// <summary> </summary>
+ /// <version> $Id: TestThreadSafe.java 598296 2007-11-26 14:52:01Z mikemccand $
/// </version>
- [TestFixture]
- public class TestThreadSafe
+ [TestFixture]
+ public class TestThreadSafe : LuceneTestCase
{
internal System.Random r = new System.Random();
internal Directory dir1;
- internal Directory dir2;
+ //internal Directory dir2;
internal IndexReader ir1;
- internal IndexReader ir2;
+ //internal IndexReader ir2;
internal System.String failure = null;
@@ -126,7 +126,7 @@
catch (System.Exception th)
{
Enclosing_Instance.failure = th.ToString();
- TestCase.Fail(Enclosing_Instance.failure);
+ Assert.Fail(Enclosing_Instance.failure);
}
}
@@ -170,7 +170,7 @@
int flen = r.Next(maxFieldLen);
System.Text.StringBuilder sb = new System.Text.StringBuilder("^ ");
while (sb.Length < flen)
- sb.Append(" " + words[r.Next(words.Length)]);
+ sb.Append(' ').Append(words[r.Next(words.Length)]);
sb.Append(" $");
Field.Store store = Field.Store.YES; // make random later
Field.Index index = Field.Index.TOKENIZED; // make random later
@@ -196,11 +196,11 @@
}
if (failure != null)
{
- TestCase.Fail(failure);
+ Assert.Fail(failure);
}
}
- [Test]
+ [Test]
public virtual void TestLazyLoadThreadSafety()
{
dir1 = new RAMDirectory();
Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestWildcard.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestWildcard.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestWildcard.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestWildcard.cs Tue Jul 15 14:44:04 2008
@@ -19,12 +19,15 @@
using NUnit.Framework;
-using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
using Document = Lucene.Net.Documents.Document;
using Field = Lucene.Net.Documents.Field;
using IndexWriter = Lucene.Net.Index.IndexWriter;
using Term = Lucene.Net.Index.Term;
+using QueryParser = Lucene.Net.QueryParsers.QueryParser;
using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
namespace Lucene.Net.Search
{
@@ -32,15 +35,14 @@
/// <summary> TestWildcard tests the '*' and '?' wildcard characters.
///
/// </summary>
- /// <version> $Id: TestWildcard.java 329860 2005-10-31 17:06:29Z bmesser $
+ /// <version> $Id: TestWildcard.java 583534 2007-10-10 16:46:35Z mikemccand $
+ ///
/// </version>
- /// <author> Otis Gospodnetic
- /// </author>
[TestFixture]
- public class TestWildcard
+ public class TestWildcard : LuceneTestCase
{
[Test]
- public virtual void TestEquals()
+ public virtual void TestEquals()
{
WildcardQuery wq1 = new WildcardQuery(new Term("field", "b*a"));
WildcardQuery wq2 = new WildcardQuery(new Term("field", "b*a"));
@@ -61,25 +63,25 @@
Assert.IsFalse(fq.Equals(wq1));
}
- /// <summary> Tests if a WildcardQuery that has no wildcard in the term is rewritten to a single
- /// TermQuery.
- /// </summary>
- [Test]
- public virtual void TestTermWithoutWildcard()
- {
- RAMDirectory indexStore = GetIndexStore("field", new System.String[]{"nowildcard", "nowildcardx"});
- IndexSearcher searcher = new IndexSearcher(indexStore);
-
- Query wq = new WildcardQuery(new Term("field", "nowildcard"));
- AssertMatches(searcher, wq, 1);
-
- wq = searcher.Rewrite(wq);
- Assert.IsTrue(wq is TermQuery);
- }
+ /// <summary> Tests if a WildcardQuery that has no wildcard in the term is rewritten to a single
+ /// TermQuery.
+ /// </summary>
+ [Test]
+ public virtual void TestTermWithoutWildcard()
+ {
+ RAMDirectory indexStore = GetIndexStore("field", new System.String[]{"nowildcard", "nowildcardx"});
+ IndexSearcher searcher = new IndexSearcher(indexStore);
+
+ Query wq = new WildcardQuery(new Term("field", "nowildcard"));
+ AssertMatches(searcher, wq, 1);
+
+ wq = searcher.Rewrite(wq);
+ Assert.IsTrue(wq is TermQuery);
+ }
- /// <summary> Tests Wildcard queries with an asterisk.</summary>
+ /// <summary> Tests Wildcard queries with an asterisk.</summary>
[Test]
- public virtual void TestAsterisk()
+ public virtual void TestAsterisk()
{
RAMDirectory indexStore = GetIndexStore("body", new System.String[]{"metal", "metals"});
IndexSearcher searcher = new IndexSearcher(indexStore);
@@ -117,7 +119,7 @@
/// </summary>
/// <throws> IOException if an error occurs </throws>
[Test]
- public virtual void TestQuestionmark()
+ public virtual void TestQuestionmark()
{
RAMDirectory indexStore = GetIndexStore("body", new System.String[]{"metal", "metals", "mXtals", "mXtXls"});
IndexSearcher searcher = new IndexSearcher(indexStore);
@@ -157,5 +159,106 @@
Hits result = searcher.Search(q);
Assert.AreEqual(expectedMatches, result.Length());
}
+
+ /// <summary> Test that wild card queries are parsed to the correct type and are searched correctly.
+ /// This test looks at both parsing and execution of wildcard queries.
+ /// Although placed here, it also tests prefix queries, verifying that
+ /// prefix queries are not parsed into wild card queries, and viceversa.
+ /// </summary>
+ /// <throws> Exception </throws>
+ [Test]
+ public virtual void TestParsingAndSearching()
+ {
+ System.String field = "content";
+ bool dbg = false;
+ Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser(field, new WhitespaceAnalyzer());
+ qp.SetAllowLeadingWildcard(true);
+ System.String[] docs = new System.String[]{"\\ abcdefg1", "\\79 hijklmn1", "\\\\ opqrstu1"};
+ // queries that should find all docs
+ System.String[] matchAll = new System.String[]{"*", "*1", "**1", "*?", "*?1", "?*1", "**", "***", "\\\\*"};
+ // queries that should find no docs
+ System.String[] matchNone = new System.String[]{"a*h", "a?h", "*a*h", "?a", "a?"};
+ // queries that should be parsed to prefix queries
+ System.String[][] matchOneDocPrefix = new System.String[][]{new System.String[]{"a*", "ab*", "abc*"}, new System.String[]{"h*", "hi*", "hij*", "\\\\7*"}, new System.String[]{"o*", "op*", "opq*", "\\\\\\\\*"}};
+ // queries that should be parsed to wildcard queries
+ System.String[][] matchOneDocWild = new System.String[][]{new System.String[]{"*a*", "*ab*", "*abc**", "ab*e*", "*g?", "*f?1", "abc**"}, new System.String[]{"*h*", "*hi*", "*hij**", "hi*k*", "*n?", "*m?1", "hij**"}, new System.String[]{"*o*", "*op*", "*opq**", "op*q*", "*u?", "*t?1", "opq**"}};
+
+ // prepare the index
+ RAMDirectory dir = new RAMDirectory();
+ IndexWriter iw = new IndexWriter(dir, new WhitespaceAnalyzer());
+ for (int i = 0; i < docs.Length; i++)
+ {
+ Document doc = new Document();
+ doc.Add(new Field(field, docs[i], Field.Store.NO, Field.Index.TOKENIZED));
+ iw.AddDocument(doc);
+ }
+ iw.Close();
+
+ IndexSearcher searcher = new IndexSearcher(dir);
+
+ // test queries that must find all
+ for (int i = 0; i < matchAll.Length; i++)
+ {
+ System.String qtxt = matchAll[i];
+ Query q = qp.Parse(qtxt);
+ if (dbg)
+ {
+ System.Console.Out.WriteLine("matchAll: qtxt=" + qtxt + " q=" + q + " " + q.GetType().FullName);
+ }
+ Hits hits = searcher.Search(q);
+ Assert.AreEqual(docs.Length, hits.Length());
+ }
+
+ // test queries that must find none
+ for (int i = 0; i < matchNone.Length; i++)
+ {
+ System.String qtxt = matchNone[i];
+ Query q = qp.Parse(qtxt);
+ if (dbg)
+ {
+ System.Console.Out.WriteLine("matchNone: qtxt=" + qtxt + " q=" + q + " " + q.GetType().FullName);
+ }
+ Hits hits = searcher.Search(q);
+ Assert.AreEqual(0, hits.Length());
+ }
+
+ // test queries that must be prefix queries and must find only one doc
+ for (int i = 0; i < matchOneDocPrefix.Length; i++)
+ {
+ for (int j = 0; j < matchOneDocPrefix[i].Length; j++)
+ {
+ System.String qtxt = matchOneDocPrefix[i][j];
+ Query q = qp.Parse(qtxt);
+ if (dbg)
+ {
+ System.Console.Out.WriteLine("match 1 prefix: doc=" + docs[i] + " qtxt=" + qtxt + " q=" + q + " " + q.GetType().FullName);
+ }
+ Assert.AreEqual(typeof(PrefixQuery), q.GetType());
+ Hits hits = searcher.Search(q);
+ Assert.AreEqual(1, hits.Length());
+ Assert.AreEqual(i, hits.Id(0));
+ }
+ }
+
+ // test queries that must be wildcard queries and must find only one doc
+ for (int i = 0; i < matchOneDocPrefix.Length; i++)
+ {
+ for (int j = 0; j < matchOneDocWild[i].Length; j++)
+ {
+ System.String qtxt = matchOneDocWild[i][j];
+ Query q = qp.Parse(qtxt);
+ if (dbg)
+ {
+ System.Console.Out.WriteLine("match 1 wild: doc=" + docs[i] + " qtxt=" + qtxt + " q=" + q + " " + q.GetType().FullName);
+ }
+ Assert.AreEqual(typeof(WildcardQuery), q.GetType());
+ Hits hits = searcher.Search(q);
+ Assert.AreEqual(1, hits.Length());
+ Assert.AreEqual(i, hits.Id(0));
+ }
+ }
+
+ searcher.Close();
+ }
}
}
\ No newline at end of file
Modified: incubator/lucene.net/trunk/C#/src/Test/SearchTest.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/SearchTest.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/SearchTest.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/SearchTest.cs Tue Jul 15 14:44:04 2008
@@ -84,8 +84,8 @@
internal static long Time(int year, int month, int day)
{
- System.DateTime calendar = new System.DateTime(year, month, day, 0, 0, 0, 0, new System.Globalization.GregorianCalendar());
- return calendar.Ticks;
+ System.DateTime calendar = new System.DateTime(year, month, day, 0, 0, 0, 0, new System.Globalization.GregorianCalendar());
+ return calendar.Ticks;
}
}
}
\ No newline at end of file
Modified: incubator/lucene.net/trunk/C#/src/Test/Store/MockRAMDirectory.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Store/MockRAMDirectory.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Store/MockRAMDirectory.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Store/MockRAMDirectory.cs Tue Jul 15 14:44:04 2008
@@ -29,49 +29,82 @@
[Serializable]
public class MockRAMDirectory : RAMDirectory
{
- internal long maxSize;
+ internal long maxSize;
// Max actual bytes used. This is set by MockRAMOutputStream:
internal long maxUsedSize;
internal double randomIOExceptionRate;
internal System.Random randomState;
+ internal bool noDeleteOpenFile = true;
+
+ // NOTE: we cannot initialize the Map here due to the
+ // order in which our constructor actually does this
+ // member initialization vs when it calls super. It seems
+ // like super is called, then our members are initialized:
+ internal System.Collections.IDictionary openFiles;
public MockRAMDirectory() : base()
{
+ if (openFiles == null)
+ {
+ openFiles = new System.Collections.Hashtable();
+ }
}
public MockRAMDirectory(System.String dir) : base(dir)
{
+ if (openFiles == null)
+ {
+ openFiles = new System.Collections.Hashtable();
+ }
}
public MockRAMDirectory(Directory dir) : base(dir)
{
+ if (openFiles == null)
+ {
+ openFiles = new System.Collections.Hashtable();
+ }
}
public MockRAMDirectory(System.IO.FileInfo dir) : base(dir)
{
+ if (openFiles == null)
+ {
+ openFiles = new System.Collections.Hashtable();
+ }
+ }
+
+ public virtual void SetMaxSizeInBytes(long maxSize)
+ {
+ this.maxSize = maxSize;
}
-
- virtual public long GetMaxSizeInBytes()
- {
- return this.maxSize;
- }
-
- virtual public void SetMaxSizeInBytes(long maxSize)
- {
- this.maxSize = maxSize;
- }
-
- /// <summary> Returns the peek actual storage used (bytes) in this
- /// directory.
- /// </summary>
- virtual public long GetMaxUsedSizeInBytes()
- {
- return this.maxUsedSize;
- }
-
- public virtual void ResetMaxUsedSizeInBytes()
+ public virtual long GetMaxSizeInBytes()
+ {
+ return this.maxSize;
+ }
+
+ /// <summary> Returns the peek actual storage used (bytes) in this
+ /// directory.
+ /// </summary>
+ public virtual long GetMaxUsedSizeInBytes()
+ {
+ return this.maxUsedSize;
+ }
+ public virtual void ResetMaxUsedSizeInBytes()
{
this.maxUsedSize = GetRecomputedActualSizeInBytes();
}
+ /// <summary> Emulate windows whereby deleting an open file is not
+ /// allowed (raise IOException).
+ /// </summary>
+ public virtual void SetNoDeleteOpenFile(bool value_Renamed)
+ {
+ this.noDeleteOpenFile = value_Renamed;
+ }
+ public virtual bool GetNoDeleteOpenFile()
+ {
+ return noDeleteOpenFile;
+ }
+
/// <summary> If 0.0, no exceptions will be thrown. Else this should
/// be a double 0.0 - 1.0. We will randomly throw an
/// IOException on the first write to an OutputStream based
@@ -83,7 +116,6 @@
// seed so we have deterministic behaviour:
randomState = new System.Random((System.Int32) seed);
}
-
public virtual double GetRandomIOExceptionRate()
{
return randomIOExceptionRate;
@@ -91,62 +123,218 @@
internal virtual void MaybeThrowIOException()
{
- if (randomIOExceptionRate > 0.0)
- {
- int number = System.Math.Abs(randomState.Next() % 1000);
- if (number < randomIOExceptionRate * 1000)
- {
- throw new System.IO.IOException("a random IOException");
- }
- }
- }
+ if (randomIOExceptionRate > 0.0)
+ {
+ int number = System.Math.Abs(randomState.Next() % 1000);
+ if (number < randomIOExceptionRate * 1000)
+ {
+ throw new System.IO.IOException("a random IOException");
+ }
+ }
+ }
+
+ public override void DeleteFile(System.String name)
+ {
+ lock (this)
+ {
+ lock (openFiles.SyncRoot)
+ {
+ if (noDeleteOpenFile && openFiles.Contains(name))
+ {
+ throw new System.IO.IOException("MockRAMDirectory: file \"" + name + "\" is still open: cannot delete");
+ }
+ }
+ base.DeleteFile(name);
+ }
+ }
public override IndexOutput CreateOutput(System.String name)
{
+ if (openFiles == null)
+ {
+ openFiles = new System.Collections.Hashtable();
+ }
+ lock (openFiles.SyncRoot)
+ {
+ if (noDeleteOpenFile && openFiles.Contains(name))
+ throw new System.IO.IOException("MockRAMDirectory: file \"" + name + "\" is still open: cannot overwrite");
+ }
RAMFile file = new RAMFile(this);
lock (this)
{
RAMFile existing = (RAMFile) fileMap_ForNUnitTest[name];
- if (existing != null)
+ // Enforce write once:
+ if (existing != null && !name.Equals("segments.gen"))
+ throw new System.IO.IOException("file " + name + " already exists");
+ else
{
- sizeInBytes_ForNUnitTest -= existing.sizeInBytes_ForNUnitTest;
- existing.directory_ForNUnitTest = null;
+ if (existing != null)
+ {
+ sizeInBytes_ForNUnitTest -= existing.sizeInBytes_ForNUnitTest;
+ existing.directory_ForNUnitTest = null;
+ }
+
+ fileMap_ForNUnitTest[name] = file;
}
- fileMap_ForNUnitTest[name] = file;
}
return new MockRAMOutputStream(this, file);
}
-
- /// <summary>Provided for testing purposes. Use sizeInBytes() instead. </summary>
- virtual internal long GetRecomputedSizeInBytes()
- {
- lock (this)
- {
- long size = 0;
- System.Collections.IEnumerator it = fileMap_ForNUnitTest.Values.GetEnumerator();
- while (it.MoveNext())
- {
- size += ((RAMFile) it.Current).GetSizeInBytes_ForNUnitTest();
- }
- return size;
- }
- }
-
- /// <summary>Like getRecomputedSizeInBytes(), but, uses actual file
- /// lengths rather than buffer allocations (which are
- /// quantized up to nearest
- /// BufferedIndexOutput.BUFFER_SIZE (now 1024) bytes.
- /// </summary>
- virtual internal long GetRecomputedActualSizeInBytes()
- {
- long size = 0;
- System.Collections.IEnumerator it = fileMap_ForNUnitTest.Values.GetEnumerator();
- while (it.MoveNext())
- {
- size += ((RAMFile) it.Current).length_ForNUnitTest;
- }
- return size;
- }
- }
+
+ public override IndexInput OpenInput(System.String name)
+ {
+ RAMFile file;
+ lock (this)
+ {
+ file = (RAMFile)fileMap_ForNUnitTest[name];
+ }
+ if (file == null)
+ throw new System.IO.FileNotFoundException(name);
+ else
+ {
+ lock (openFiles.SyncRoot)
+ {
+ if (openFiles.Contains(name))
+ {
+ System.Int32 v = (System.Int32) openFiles[name];
+ v = (System.Int32) (v + 1);
+ openFiles[name] = v;
+ }
+ else
+ {
+ openFiles[name] = 1;
+ }
+ }
+ }
+ return new MockRAMInputStream(this, name, file);
+ }
+
+ /// <summary>Provided for testing purposes. Use sizeInBytes() instead. </summary>
+ public long GetRecomputedSizeInBytes()
+ {
+ lock (this)
+ {
+ long size = 0;
+ System.Collections.IEnumerator it = fileMap_ForNUnitTest.Values.GetEnumerator();
+ while (it.MoveNext())
+ {
+ size += ((RAMFile)it.Current).GetSizeInBytes_ForNUnitTest();
+ }
+ return size;
+ }
+ }
+
+ /// <summary>Like getRecomputedSizeInBytes(), but, uses actual file
+ /// lengths rather than buffer allocations (which are
+ /// quantized up to nearest
+ /// RAMOutputStream.BUFFER_SIZE (now 1024) bytes.
+ /// </summary>
+
+ public long GetRecomputedActualSizeInBytes()
+ {
+ lock (this)
+ {
+ long size = 0;
+ System.Collections.IEnumerator it = fileMap_ForNUnitTest.Values.GetEnumerator();
+ while (it.MoveNext())
+ {
+ size += ((RAMFile)it.Current).length_ForNUnitTest;
+ }
+ return size;
+ }
+ }
+
+ public override void Close()
+ {
+ if (openFiles == null)
+ {
+ openFiles = new System.Collections.Hashtable();
+ }
+ lock (openFiles.SyncRoot)
+ {
+ if (noDeleteOpenFile && openFiles.Count > 0)
+ {
+ // RuntimeException instead of IOException because
+ // super() does not throw IOException currently:
+ throw new System.SystemException("MockRAMDirectory: cannot close: there are still open files: " + openFiles.ToString());
+ }
+ }
+ }
+
+ /// <summary> Objects that represent fail-able conditions. Objects of a derived
+ /// class are created and registered with the mock directory. After
+ /// register, each object will be invoked once for each first write
+ /// of a file, giving the object a chance to throw an IOException.
+ /// </summary>
+ public class Failure
+ {
+ /// <summary> eval is called on the first write of every new file.</summary>
+ public virtual void Eval(MockRAMDirectory dir)
+ {
+ }
+
+ /// <summary> reset should set the state of the failure to its default
+ /// (freshly constructed) state. Reset is convenient for tests
+ /// that want to create one failure object and then reuse it in
+ /// multiple cases. This, combined with the fact that Failure
+ /// subclasses are often anonymous classes makes reset difficult to
+ /// do otherwise.
+ ///
+ /// A typical example of use is
+ /// Failure failure = new Failure() { ... };
+ /// ...
+ /// mock.failOn(failure.reset())
+ /// </summary>
+ public virtual Failure Reset()
+ {
+ return this;
+ }
+
+ protected internal bool doFail;
+
+ public virtual void SetDoFail()
+ {
+ doFail = true;
+ }
+
+ public virtual void ClearDoFail()
+ {
+ doFail = false;
+ }
+ }
+
+ internal System.Collections.ArrayList failures;
+
+ /// <summary> add a Failure object to the list of objects to be evaluated
+ /// at every potential failure point
+ /// </summary>
+ public virtual void FailOn(Failure fail)
+ {
+ lock (this)
+ {
+ if (failures == null)
+ {
+ failures = new System.Collections.ArrayList();
+ }
+ failures.Add(fail);
+ }
+ }
+
+ /// <summary> Iterate through the failures list, giving each object a
+ /// chance to throw an IOE
+ /// </summary>
+ internal virtual void MaybeThrowDeterministicException()
+ {
+ lock (this)
+ {
+ if (failures != null)
+ {
+ for (int i = 0; i < failures.Count; i++)
+ {
+ ((Failure) failures[i]).Eval(this);
+ }
+ }
+ }
+ }
+ }
}
\ No newline at end of file
Added: incubator/lucene.net/trunk/C#/src/Test/Store/MockRAMInputStream.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Store/MockRAMInputStream.cs?rev=677059&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Store/MockRAMInputStream.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Store/MockRAMInputStream.cs Tue Jul 15 14:44:04 2008
@@ -0,0 +1,86 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+namespace Lucene.Net.Store
+{
+
+ /// <summary> Used by MockRAMDirectory to create an input stream that
+ /// keeps track of when it's been closed.
+ /// </summary>
+
+ public class MockRAMInputStream : RAMInputStream, System.ICloneable
+ {
+ private MockRAMDirectory dir;
+ private System.String name;
+ private bool isClone;
+
+ /// <summary>Construct an empty output buffer. </summary>
+ /// <throws> IOException </throws>
+ public MockRAMInputStream(MockRAMDirectory dir, System.String name, RAMFile f):base(f)
+ {
+ this.name = name;
+ this.dir = dir;
+ }
+
+ public override void Close()
+ {
+ base.Close();
+ // Pending resolution on LUCENE-686 we may want to
+ // remove the conditional check so we also track that
+ // all clones get closed:
+ if (!isClone)
+ {
+ lock (dir.openFiles.SyncRoot)
+ {
+ System.Int32 v = (System.Int32) dir.openFiles[name];
+ if (v == 1)
+ {
+ dir.openFiles.Remove(name);
+ }
+ else
+ {
+ v = (System.Int32) (v - 1);
+ dir.openFiles[name] = v;
+ }
+ }
+ }
+ }
+
+ public override System.Object Clone()
+ {
+ MockRAMInputStream clone = (MockRAMInputStream) base.Clone();
+ clone.isClone = true;
+ // Pending resolution on LUCENE-686 we may want to
+ // uncomment this code so that we also track that all
+ // clones get closed:
+ /*
+ synchronized(dir.openFiles) {
+ if (dir.openFiles.containsKey(name)) {
+ Integer v = (Integer) dir.openFiles.get(name);
+ v = new Integer(v.intValue()+1);
+ dir.openFiles.put(name, v);
+ } else {
+ throw new RuntimeException("BUG: cloned file was not open?");
+ }
+ }
+ */
+ return clone;
+ }
+ }
+}
\ No newline at end of file
Modified: incubator/lucene.net/trunk/C#/src/Test/Store/MockRAMOutputStream.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Store/MockRAMOutputStream.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Store/MockRAMOutputStream.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Store/MockRAMOutputStream.cs Tue Jul 15 14:44:04 2008
@@ -31,13 +31,15 @@
private MockRAMDirectory dir;
private bool first = true;
+ internal byte[] singleByte = new byte[1];
+
/// <summary>Construct an empty output buffer. </summary>
public MockRAMOutputStream(MockRAMDirectory dir, RAMFile f) : base(f)
{
this.dir = dir;
}
- public virtual void Close()
+ public override void Close()
{
base.Close();
@@ -50,7 +52,19 @@
}
}
- public override void FlushBuffer(byte[] src, int len)
+ public override void Flush()
+ {
+ dir.MaybeThrowDeterministicException();
+ base.Flush();
+ }
+
+ public override void WriteByte(byte b)
+ {
+ singleByte[0] = b;
+ WriteBytes(singleByte, 0, 1);
+ }
+
+ public override void WriteBytes(byte[] b, int offset, int len)
{
long freeSpace = dir.maxSize - dir.SizeInBytes();
long realUsage = 0;
@@ -69,19 +83,21 @@
if (freeSpace > 0 && freeSpace < len)
{
realUsage += freeSpace;
- base.FlushBuffer(src, (int) freeSpace);
+ base.WriteBytes(b, offset, (int) freeSpace);
}
if (realUsage > dir.maxUsedSize)
{
dir.maxUsedSize = realUsage;
}
- throw new System.IO.IOException("fake disk full at " + dir.SizeInBytes() + " bytes");
+ throw new System.IO.IOException("fake disk full at " + dir.GetRecomputedActualSizeInBytes() + " bytes");
}
else
{
- base.FlushBuffer(src, len);
+ base.WriteBytes(b, offset, len);
}
+ dir.MaybeThrowDeterministicException();
+
if (first)
{
// Maybe throw random exception; only do this on first
Modified: incubator/lucene.net/trunk/C#/src/Test/Store/TestBufferedIndexInput.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Store/TestBufferedIndexInput.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Store/TestBufferedIndexInput.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Store/TestBufferedIndexInput.cs Tue Jul 15 14:44:04 2008
@@ -19,20 +19,32 @@
using NUnit.Framework;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using IndexReader = Lucene.Net.Index.IndexReader;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using Term = Lucene.Net.Index.Term;
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using Hits = Lucene.Net.Search.Hits;
+using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+using TermQuery = Lucene.Net.Search.TermQuery;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using _TestUtil = Lucene.Net.Util._TestUtil;
+
namespace Lucene.Net.Store
{
[TestFixture]
- public class TestBufferedIndexInput
+ public class TestBufferedIndexInput : LuceneTestCase
{
// Call readByte() repeatedly, past the buffer boundary, and see that it
// is working as expected.
// Our input comes from a dynamically generated/ "file" - see
// MyBufferedIndexInput below.
- [Test]
+ [Test]
public virtual void TestReadByte()
{
MyBufferedIndexInput input = new MyBufferedIndexInput();
- for (int i = 0; i < BufferedIndexInput.BUFFER_SIZE_ForNUnitTest * 10; i++)
+ for (int i = 0; i < BufferedIndexInput.BUFFER_SIZE * 10; i++)
{
Assert.AreEqual(input.ReadByte(), Byten(i));
}
@@ -42,13 +54,13 @@
// larger than the buffer size), and see that it returns the bytes we expect.
// Our input comes from a dynamically generated "file" -
// see MyBufferedIndexInput below.
- [Test]
+ [Test]
public virtual void TestReadBytes()
{
MyBufferedIndexInput input = new MyBufferedIndexInput();
int pos = 0;
// gradually increasing size:
- for (int size = 1; size < BufferedIndexInput.BUFFER_SIZE_ForNUnitTest * 10; size = size + size / 200 + 1)
+ for (int size = 1; size < BufferedIndexInput.BUFFER_SIZE * 10; size = size + size / 200 + 1)
{
CheckReadBytes(input, size, pos);
pos += size;
@@ -65,7 +77,7 @@
pos += size;
}
// constant small size (7 bytes):
- for (int i = 0; i < BufferedIndexInput.BUFFER_SIZE_ForNUnitTest; i++)
+ for (int i = 0; i < BufferedIndexInput.BUFFER_SIZE; i++)
{
CheckReadBytes(input, 7, pos);
pos += 7;
@@ -87,7 +99,7 @@
// This tests that attempts to readBytes() past an EOF will fail, while
// reads up to the EOF will succeed. The EOF is determined by the
// BufferedIndexInput's arbitrary length() value.
- [Test]
+ [Test]
public virtual void TestEOF()
{
MyBufferedIndexInput input = new MyBufferedIndexInput(1024);
@@ -104,7 +116,7 @@
CheckReadBytes(input, 11, pos);
Assert.Fail("Block read past end of file");
}
- catch (System.IO.IOException e)
+ catch (System.IO.IOException)
{
/* success */
}
@@ -114,7 +126,7 @@
CheckReadBytes(input, 50, pos);
Assert.Fail("Block read past end of file");
}
- catch (System.IO.IOException e)
+ catch (System.IO.IOException)
{
/* success */
}
@@ -124,7 +136,7 @@
CheckReadBytes(input, 100000, pos);
Assert.Fail("Block read past end of file");
}
- catch (System.IO.IOException e)
+ catch (System.IO.IOException)
{
/* success */
}
@@ -150,13 +162,13 @@
{
}
- public override void ReadInternal(byte[] b, int offset, int length)
+ protected override void ReadInternal(byte[] b, int offset, int length)
{
for (int i = offset; i < offset + length; i++)
b[i] = Lucene.Net.Store.TestBufferedIndexInput.Byten(pos++);
}
- public override void SeekInternal(long pos)
+ protected override void SeekInternal(long pos)
{
this.pos = pos;
}
@@ -170,5 +182,141 @@
return len;
}
}
+
+ [Test]
+ public virtual void TestSetBufferSize()
+ {
+ System.IO.FileInfo indexDir = new System.IO.FileInfo(System.IO.Path.Combine(SupportClass.AppSettings.Get("tempDir", ""), "testSetBufferSize"));
+ MockFSDirectory dir = new MockFSDirectory(indexDir);
+ try
+ {
+ IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
+ writer.SetUseCompoundFile(false);
+ for (int i = 0; i < 37; i++)
+ {
+ Document doc = new Document();
+ doc.Add(new Field("content", "aaa bbb ccc ddd" + i, Field.Store.YES, Field.Index.TOKENIZED));
+ doc.Add(new Field("id", "" + i, Field.Store.YES, Field.Index.TOKENIZED));
+ writer.AddDocument(doc);
+ }
+ writer.Close();
+
+ dir.allIndexInputs.Clear();
+
+ IndexReader reader = IndexReader.Open(dir);
+ Term aaa = new Term("content", "aaa");
+ Term bbb = new Term("content", "bbb");
+ Term ccc = new Term("content", "ccc");
+ Assert.AreEqual(reader.DocFreq(ccc), 37);
+ reader.DeleteDocument(0);
+ Assert.AreEqual(reader.DocFreq(aaa), 37);
+ dir.TweakBufferSizes();
+ reader.DeleteDocument(4);
+ Assert.AreEqual(reader.DocFreq(bbb), 37);
+ dir.TweakBufferSizes();
+
+ IndexSearcher searcher = new IndexSearcher(reader);
+ Hits hits = searcher.Search(new TermQuery(bbb));
+ dir.TweakBufferSizes();
+ Assert.AreEqual(35, hits.Length());
+ dir.TweakBufferSizes();
+ hits = searcher.Search(new TermQuery(new Term("id", "33")));
+ dir.TweakBufferSizes();
+ Assert.AreEqual(1, hits.Length());
+ hits = searcher.Search(new TermQuery(aaa));
+ dir.TweakBufferSizes();
+ Assert.AreEqual(35, hits.Length());
+ searcher.Close();
+ reader.Close();
+ }
+ finally
+ {
+ _TestUtil.RmDir(indexDir);
+ }
+ }
+
+ private class MockFSDirectory : Directory
+ {
+
+ internal System.Collections.IList allIndexInputs = new System.Collections.ArrayList();
+
+ internal System.Random rand = new System.Random();
+
+ private Directory dir;
+
+ public MockFSDirectory(System.IO.FileInfo path)
+ {
+ lockFactory = new NoLockFactory();
+ dir = FSDirectory.GetDirectory(path);
+ }
+
+ public override IndexInput OpenInput(System.String name)
+ {
+ return OpenInput(name, BufferedIndexInput.BUFFER_SIZE);
+ }
+
+ public virtual void TweakBufferSizes()
+ {
+ System.Collections.IEnumerator it = allIndexInputs.GetEnumerator();
+ int count = 0;
+ while (it.MoveNext())
+ {
+ BufferedIndexInput bii = (BufferedIndexInput) it.Current;
+ int bufferSize = 1024 + (int) System.Math.Abs(rand.Next() % 32768);
+ bii.SetBufferSize(bufferSize);
+ count++;
+ }
+ //System.out.println("tweak'd " + count + " buffer sizes");
+ }
+
+ public override IndexInput OpenInput(System.String name, int bufferSize)
+ {
+ // Make random changes to buffer size
+ bufferSize = 1 + (int) System.Math.Abs(rand.Next() % 10);
+ IndexInput f = dir.OpenInput(name, bufferSize);
+ allIndexInputs.Add(f);
+ return f;
+ }
+
+ public override IndexOutput CreateOutput(System.String name)
+ {
+ return dir.CreateOutput(name);
+ }
+
+ public override void Close()
+ {
+ dir.Close();
+ }
+
+ public override void DeleteFile(System.String name)
+ {
+ dir.DeleteFile(name);
+ }
+ public override void TouchFile(System.String name)
+ {
+ dir.TouchFile(name);
+ }
+ public override long FileModified(System.String name)
+ {
+ return dir.FileModified(name);
+ }
+ public override bool FileExists(System.String name)
+ {
+ return dir.FileExists(name);
+ }
+ public override System.String[] List()
+ {
+ return dir.List();
+ }
+
+ public override long FileLength(System.String name)
+ {
+ return dir.FileLength(name);
+ }
+ public override void RenameFile(System.String from, System.String to)
+ {
+ dir.RenameFile(from, to);
+ }
+ }
}
}
\ No newline at end of file
Added: incubator/lucene.net/trunk/C#/src/Test/Store/TestHugeRamFile.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Store/TestHugeRamFile.cs?rev=677059&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Store/TestHugeRamFile.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Store/TestHugeRamFile.cs Tue Jul 15 14:44:04 2008
@@ -0,0 +1,121 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+namespace Lucene.Net.Store
+{
+
+ /// <summary>Test huge RAMFile with more than Integer.MAX_VALUE bytes. </summary>
+ [TestFixture]
+ public class TestHugeRamFile : LuceneTestCase
+ {
+
+ private static readonly long MAX_VALUE = (long) 2 * (long) System.Int32.MaxValue;
+
+ /// <summary>Fake a huge ram file by using the same byte buffer for all
+ /// buffers under maxint.
+ /// </summary>
+ [Serializable]
+ private class DenseRAMFile : RAMFile
+ {
+ private long capacity = 0;
+ private System.Collections.Hashtable singleBuffers = new System.Collections.Hashtable();
+ protected override byte[] NewBuffer(int size)
+ {
+ capacity += size;
+ if (capacity <= Lucene.Net.Store.TestHugeRamFile.MAX_VALUE)
+ {
+ // below maxint we reuse buffers
+ byte[] buf = (byte[]) singleBuffers[(System.Int32) size];
+ if (buf == null)
+ {
+ buf = new byte[size];
+ //System.out.println("allocate: "+size);
+ singleBuffers[(System.Int32) size] = buf;
+ }
+ return buf;
+ }
+ //System.out.println("allocate: "+size); System.out.flush();
+ return new byte[size];
+ }
+ }
+
+ /// <summary>Test huge RAMFile with more than Integer.MAX_VALUE bytes. (LUCENE-957) </summary>
+ [Test]
+ public virtual void TestHugeFile()
+ {
+ DenseRAMFile f = new DenseRAMFile();
+ // output part
+ RAMOutputStream out_Renamed = new RAMOutputStream(f);
+ byte[] b1 = new byte[RAMOutputStream.BUFFER_SIZE_ForNUnitTest];
+ byte[] b2 = new byte[RAMOutputStream.BUFFER_SIZE_ForNUnitTest / 3];
+ for (int i = 0; i < b1.Length; i++)
+ {
+ b1[i] = (byte) (i & 0x0007F);
+ }
+ for (int i = 0; i < b2.Length; i++)
+ {
+ b2[i] = (byte) (i & 0x0003F);
+ }
+ long n = 0;
+ Assert.AreEqual(n, out_Renamed.Length(), "output length must match");
+ while (n <= MAX_VALUE - b1.Length)
+ {
+ out_Renamed.WriteBytes(b1, 0, b1.Length);
+ out_Renamed.Flush();
+ n += b1.Length;
+ Assert.AreEqual(n, out_Renamed.Length(), "output length must match");
+ }
+ //System.out.println("after writing b1's, length = "+out.length()+" (MAX_VALUE="+MAX_VALUE+")");
+ int m = b2.Length;
+ long L = 12;
+ for (int j = 0; j < L; j++)
+ {
+ for (int i = 0; i < b2.Length; i++)
+ {
+ b2[i]++;
+ }
+ out_Renamed.WriteBytes(b2, 0, m);
+ out_Renamed.Flush();
+ n += m;
+ Assert.AreEqual(n, out_Renamed.Length(), "output length must match");
+ }
+ out_Renamed.Close();
+ // input part
+ RAMInputStream in_Renamed = RAMInputStream.RAMInputStream_ForNUnitTest(f);
+ Assert.AreEqual(n, in_Renamed.Length(), "input length must match");
+ //System.out.println("input length = "+in.length()+" % 1024 = "+in.length()%1024);
+ for (int j = 0; j < L; j++)
+ {
+ long loc = n - (L - j) * m;
+ in_Renamed.Seek(loc / 3);
+ in_Renamed.Seek(loc);
+ for (int i = 0; i < m; i++)
+ {
+ byte bt = in_Renamed.ReadByte();
+ byte expected = (byte) (1 + j + (i & 0x0003F));
+ Assert.AreEqual(expected, bt, "must read same value that was written! j=" + j + " i=" + i);
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
Modified: incubator/lucene.net/trunk/C#/src/Test/Store/TestLock.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Store/TestLock.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Store/TestLock.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Store/TestLock.cs Tue Jul 15 14:44:04 2008
@@ -73,64 +73,66 @@
using NUnit.Framework;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
namespace Lucene.Net.Store
{
- [TestFixture]
- public class TestLock
- {
+ [TestFixture]
+ public class TestLock : LuceneTestCase
+ {
- [Test]
- public virtual void TestObtain()
- {
- LockMock lock_Renamed = new LockMock(this);
- Lucene.Net.Store.Lock.LOCK_POLL_INTERVAL = 10;
+ [Test]
+ public virtual void TestObtain()
+ {
+ LockMock lock_Renamed = new LockMock(this);
+ Lucene.Net.Store.Lock.LOCK_POLL_INTERVAL = 10;
- try
- {
- lock_Renamed.Obtain(Lucene.Net.Store.Lock.LOCK_POLL_INTERVAL);
- Assert.Fail("Should have failed to obtain lock");
- }
- catch (System.IO.IOException e)
- {
- Assert.AreEqual(lock_Renamed.lockAttempts, 2, "should attempt to lock more than once");
- }
- }
+ try
+ {
+ lock_Renamed.Obtain(Lucene.Net.Store.Lock.LOCK_POLL_INTERVAL);
+ Assert.Fail("Should have failed to obtain lock");
+ }
+ catch (System.IO.IOException)
+ {
+ Assert.AreEqual(lock_Renamed.lockAttempts, 2, "should attempt to lock more than once");
+ }
+ }
- private class LockMock : Lucene.Net.Store.Lock
- {
- public LockMock(TestLock enclosingInstance)
- {
- InitBlock(enclosingInstance);
- }
- private void InitBlock(TestLock enclosingInstance)
- {
- this.enclosingInstance = enclosingInstance;
- }
- private TestLock enclosingInstance;
- override public bool IsLocked()
- {
- return false;
- }
- public TestLock Enclosing_Instance
- {
- get
- {
- return enclosingInstance;
- }
+ private class LockMock : Lucene.Net.Store.Lock
+ {
+ public LockMock(TestLock enclosingInstance)
+ {
+ InitBlock(enclosingInstance);
+ }
+ private void InitBlock(TestLock enclosingInstance)
+ {
+ this.enclosingInstance = enclosingInstance;
+ }
+ private TestLock enclosingInstance;
+ override public bool IsLocked()
+ {
+ return false;
+ }
+ public TestLock Enclosing_Instance
+ {
+ get
+ {
+ return enclosingInstance;
+ }
- }
- public int lockAttempts;
+ }
+ public int lockAttempts;
- public override bool Obtain()
- {
- lockAttempts++;
- return false;
- }
- public override void Release()
- {
- // do nothing
- }
- }
- }
+ public override bool Obtain()
+ {
+ lockAttempts++;
+ return false;
+ }
+ public override void Release()
+ {
+ // do nothing
+ }
+ }
+ }
}
\ No newline at end of file