You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by ar...@apache.org on 2009/11/03 19:06:38 UTC
svn commit: r832486 [24/29] - in /incubator/lucene.net/trunk/C#/src: ./
Demo/DeleteFiles/ Demo/DemoLib/ Demo/IndexFiles/ Demo/IndexHtml/
Demo/SearchFiles/ Lucene.Net/ Lucene.Net/Analysis/ Lucene.Net/Document/
Lucene.Net/Index/ Lucene.Net/Search/ Lucene...
Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestSort.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestSort.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestSort.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestSort.cs Tue Nov 3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
@@ -16,18 +16,22 @@
*/
using System;
-using Pattern = System.Text.RegularExpressions.Regex;
using NUnit.Framework;
using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
using Document = Lucene.Net.Documents.Document;
using Field = Lucene.Net.Documents.Field;
+using CorruptIndexException = Lucene.Net.Index.CorruptIndexException;
using IndexReader = Lucene.Net.Index.IndexReader;
using IndexWriter = Lucene.Net.Index.IndexWriter;
using Term = Lucene.Net.Index.Term;
+using ParseException = Lucene.Net.QueryParsers.ParseException;
+using LockObtainFailedException = Lucene.Net.Store.LockObtainFailedException;
using RAMDirectory = Lucene.Net.Store.RAMDirectory;
using DocIdBitSet = Lucene.Net.Util.DocIdBitSet;
+using Occur = Lucene.Net.Search.BooleanClause.Occur;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
namespace Lucene.Net.Search
{
@@ -39,15 +43,165 @@
/// </summary>
/// <since> lucene 1.4
/// </since>
- /// <version> $Id: TestSort.java 590530 2007-10-31 01:28:25Z gsingers $
+ /// <version> $Id: TestSort.java 803676 2009-08-12 19:31:38Z hossman $
/// </version>
[Serializable]
- [TestFixture]
- public class TestSort
+ [TestFixture]
+ public class TestSort:LuceneTestCase
{
[Serializable]
- private class AnonymousClassFilter : Filter
+ private class AnonymousClassIntParser : Lucene.Net.Search.IntParser
+ {
+ public AnonymousClassIntParser(TestSort enclosingInstance)
+ {
+ InitBlock(enclosingInstance);
+ }
+ private void InitBlock(TestSort enclosingInstance)
+ {
+ this.enclosingInstance = enclosingInstance;
+ }
+ private TestSort enclosingInstance;
+ public TestSort Enclosing_Instance
+ {
+ get
+ {
+ return enclosingInstance;
+ }
+
+ }
+ public int ParseInt(System.String val)
+ {
+ return (val[0] - 'A') * 123456;
+ }
+ }
+ [Serializable]
+ private class AnonymousClassFloatParser : Lucene.Net.Search.FloatParser
+ {
+ public AnonymousClassFloatParser(TestSort enclosingInstance)
+ {
+ InitBlock(enclosingInstance);
+ }
+ private void InitBlock(TestSort enclosingInstance)
+ {
+ this.enclosingInstance = enclosingInstance;
+ }
+ private TestSort enclosingInstance;
+ public TestSort Enclosing_Instance
+ {
+ get
+ {
+ return enclosingInstance;
+ }
+
+ }
+ public float ParseFloat(System.String val)
+ {
+ return (float) System.Math.Sqrt(val[0]);
+ }
+ }
+ [Serializable]
+ private class AnonymousClassLongParser : Lucene.Net.Search.LongParser
+ {
+ public AnonymousClassLongParser(TestSort enclosingInstance)
+ {
+ InitBlock(enclosingInstance);
+ }
+ private void InitBlock(TestSort enclosingInstance)
+ {
+ this.enclosingInstance = enclosingInstance;
+ }
+ private TestSort enclosingInstance;
+ public TestSort Enclosing_Instance
+ {
+ get
+ {
+ return enclosingInstance;
+ }
+
+ }
+ public long ParseLong(System.String val)
+ {
+ return (val[0] - 'A') * 1234567890L;
+ }
+ }
+ [Serializable]
+ private class AnonymousClassDoubleParser : Lucene.Net.Search.DoubleParser
+ {
+ public AnonymousClassDoubleParser(TestSort enclosingInstance)
+ {
+ InitBlock(enclosingInstance);
+ }
+ private void InitBlock(TestSort enclosingInstance)
+ {
+ this.enclosingInstance = enclosingInstance;
+ }
+ private TestSort enclosingInstance;
+ public TestSort Enclosing_Instance
+ {
+ get
+ {
+ return enclosingInstance;
+ }
+
+ }
+ public double ParseDouble(System.String val)
+ {
+ return System.Math.Pow(val[0], (val[0] - 'A'));
+ }
+ }
+ [Serializable]
+ private class AnonymousClassByteParser : Lucene.Net.Search.ByteParser
+ {
+ public AnonymousClassByteParser(TestSort enclosingInstance)
+ {
+ InitBlock(enclosingInstance);
+ }
+ private void InitBlock(TestSort enclosingInstance)
+ {
+ this.enclosingInstance = enclosingInstance;
+ }
+ private TestSort enclosingInstance;
+ public TestSort Enclosing_Instance
+ {
+ get
+ {
+ return enclosingInstance;
+ }
+
+ }
+ public byte ParseByte(System.String val)
+ {
+ return (byte) (val[0] - 'A');
+ }
+ }
+ [Serializable]
+ private class AnonymousClassShortParser : Lucene.Net.Search.ShortParser
+ {
+ public AnonymousClassShortParser(TestSort enclosingInstance)
+ {
+ InitBlock(enclosingInstance);
+ }
+ private void InitBlock(TestSort enclosingInstance)
+ {
+ this.enclosingInstance = enclosingInstance;
+ }
+ private TestSort enclosingInstance;
+ public TestSort Enclosing_Instance
+ {
+ get
+ {
+ return enclosingInstance;
+ }
+
+ }
+ public short ParseShort(System.String val)
+ {
+ return (short) (val[0] - 'A');
+ }
+ }
+ [Serializable]
+ private class AnonymousClassFilter:Filter
{
public AnonymousClassFilter(Lucene.Net.Search.TopDocs docs1, TestSort enclosingInstance)
{
@@ -58,10 +212,8 @@
this.docs1 = docs1;
this.enclosingInstance = enclosingInstance;
}
-
private Lucene.Net.Search.TopDocs docs1;
private TestSort enclosingInstance;
-
public TestSort Enclosing_Instance
{
get
@@ -70,22 +222,16 @@
}
}
-
- public override DocIdSet GetDocIdSet(IndexReader reader)
- {
- System.Collections.BitArray bs = new System.Collections.BitArray((reader.MaxDoc() % 64 == 0 ? reader.MaxDoc() / 64 : reader.MaxDoc() / 64 + 1) * 64);
- bs.Set(docs1.scoreDocs[0].doc, true);
- return new DocIdBitSet(bs);
- }
- [System.Obsolete()]
- public override System.Collections.BitArray Bits(IndexReader reader)
+ public override DocIdSet GetDocIdSet(IndexReader reader)
{
System.Collections.BitArray bs = new System.Collections.BitArray((reader.MaxDoc() % 64 == 0?reader.MaxDoc() / 64:reader.MaxDoc() / 64 + 1) * 64);
+ for (int i = 0; i < reader.MaxDoc(); i++) bs.Set(i, true);
bs.Set(docs1.scoreDocs[0].doc, true);
- return bs;
+ return new DocIdBitSet(bs);
}
}
+ private const int NUM_STRINGS = 6000;
private Searcher full;
private Searcher searchX;
private Searcher searchY;
@@ -96,42 +242,17 @@
private Query queryF;
private Query queryG;
private Sort sort;
-
- private bool serverStarted = false;
- private int port;
- //public TestSort(System.String name) : base(name)
- //{
- //}
- [STAThread]
- public static void Main(System.String[] argv)
+ public TestSort(System.String name):base(name)
{
- System.Runtime.Remoting.RemotingConfiguration.Configure("Lucene.Net.Search.TestSort.config", false);
- System.Runtime.Remoting.Channels.ChannelServices.RegisterChannel(new System.Runtime.Remoting.Channels.Http.HttpChannel(8080), false);
- if (argv == null || argv.Length < 1)
- {
- // NUnit.Core.TestRunner.Run(Suite()); // {{Aroush-1.9}} where is "Run" in NUnit?
- }
- else if ("server".Equals(argv[0]))
- {
- TestSort test = new TestSort();
- try
- {
- test.StartServer();
- System.Threading.Thread.Sleep(new System.TimeSpan((System.Int64) 10000 * 500000));
- }
- catch (System.Exception e)
- {
- System.Console.Out.WriteLine(e);
- System.Console.Error.WriteLine(e.StackTrace);
- }
- }
-
- System.Console.ReadLine();
}
- static string NDS = System.Globalization.CultureInfo.CurrentCulture.NumberFormat.NumberDecimalSeparator;
+ /*public static Test Suite()
+ {
+ return new TestSuite(typeof(TestSort));
+ }*/
+
// document data:
// the tracer field is used to determine which document was hit
@@ -140,49 +261,20 @@
// the float field to sort by float
// the string field to sort by string
// the i18n field includes accented characters for testing locale-specific sorting
- private string[][] data = new string[][] {
-
- // {dougsale-2.4.0}
- // As C# doesn't support a signed byte datatype, the byte values of the data array have been adjusted
- // to work as expected (maintained relative sort-ordering of documents).
- // All the byte values had 51 added to them except MinValue and MaxValue, as -50 was the lowest value inside of MinValue).
- // Note: The inconsistent behavior of fields interpreted as byte for sorting purposes could pose a problem for someone
- // using an index across Lucene and Lucene.NET.
-
- // tracer contents int float string custom i18n long double, 'short', byte
- new string[] { "A", "x a", "5", "4f", "c", "A-3", "p\u00EAche", "10", "-4" + NDS + "0", "3", "177"/*"126"*/},//A,x
- new string[] { "B", "y a", "5", "3" + NDS + "4028235E38", "i", "B-10", "HAT", "1000000000", "40" + NDS + "0", "24", "52"/*"1"*/},//B,y
- new string[] { "C", "x a b c", "2147483647", "1" + NDS + "0", "j", "A-2", "p\u00E9ch\u00E9", "99999999", "40" + NDS + "00002343", "125", "66"/*"15"*/},//C,x
-
- // {dougsale-2.4.0}
- // Can't convert from System.Double.MinValue or System.Double.MaxValue to a string and then back to a double
- // without a System.OverflowException occurring on the way back from string to double - so reduced the
- // magnitude of the values while maintaining the relative sort orders. Perhaps we should be using decimal instead
- // of double...
-
- new string[] { "D", "y a b c", "-1", "0" + NDS + "0f", "a", "C-0", "HUT", System.Int64.MaxValue.ToString(), "-1.77971620351369E+308" /*System.Double.MinValue.ToString()*/, System.Int16.MinValue.ToString(), System.Byte.MinValue.ToString() },//D,y
- new string[] { "E", "x a b c d", "5", "2f", "h", "B-8", "peach", System.Int64.MinValue.ToString(), "1.77971620351369E+308" /*System.Double.MaxValue.ToString()*/, System.Int16.MaxValue.ToString(), System.Byte.MaxValue.ToString() },//E,x
- new string[] { "F", "y a b c d", "2", "3" + NDS + "14159f", "g", "B-1", "H\u00C5T", "-44", "343" + NDS + "034435444", "-3", "51"/*"0"*/},//F,y
- new string[] { "G", "x a b c d", "3", "-1" + NDS + "0", "f", "C-100", "sin", "323254543543", "4" + NDS + "043544", "5", "151"/*"100"*/},//G,x
- new string[] { "H", "y a b c d", "0", "1" + NDS + "4E-45", "e", "C-88", "H\u00D8T", "1023423423005","4" + NDS + "043545", "10", "1"/*"-50"*/},//H,y
- new string[] { "I", "x a b c d e f", "-2147483648", "1" + NDS + "0e+0", "d", "A-10", "s\u00EDn", "332422459999", "4" + NDS + "043546", "-340", "102"/*"51"*/},//I,x
- new string[] { "J", "y a b c d e f", "4", "" + NDS + "5", "b", "C-7", "HOT", "34334543543", "4" + NDS + "0000220343", "300", "53"/*"2"*/},//J,y
- new string[] { "W", "g", "1", null, null, null, null, null, null, null, null},
- new string[] { "X", "g", "1", "0" + NDS + "1", null, null, null, null, null, null, null},
- new string[] { "Y", "g", "1", "0" + NDS + "2", null, null, null, null, null, null, null},
- new string[] { "Z", "f g", null, null, null, null, null, null, null, null, null}
- };
+ private System.String[][] data = new System.String[][]{new System.String[]{"A", "x a", "5", "4f", "c", "A-3", "p\u00EAche", "10", "-4.0", "3", "126", "J"}, new System.String[]{"B", "y a", "5", "3.4028235E38", "i", "B-10", "HAT", "1000000000", "40.0", "24", "1", "I"}, new System.String[]{"C", "x a b c", "2147483647", "1.0", "j", "A-2", "p\u00E9ch\u00E9", "99999999", "40.00002343", "125", "15", "H"}, new System.String[]{"D", "y a b c", "-1", "0.0f", "a", "C-0", "HUT", System.Convert.ToString(System.Int64.MaxValue), System.Convert.ToString(System.Double.MinValue), System.Convert.ToString(System.Int16.MinValue), System.Convert.ToString((byte) System.Byte.MinValue), "G"}, new System.String[]{"E", "x a b c d", "5", "2f", "h", "B-8", "peach", System.Convert.ToString(System.Int64.MinValue), System.Convert.ToString(System.Double.MaxValue), System.Convert.ToString(System.Int16.MaxValue), System.Convert.ToString((byte) System.SByte.MaxValue), "F"}, new System.String[]{"F", "y a b c d
", "2", "3.14159f", "g", "B-1", "H\u00C5T", "-44", "343.034435444", "-3", "0", "E"}, new System.String[]{"G", "x a b c d", "3", "-1.0", "f", "C-100", "sin", "323254543543", "4.043544", "5", "100", "D"}, new System.String[]{"H", "y a b c d", "0", "1.4E-45", "e", "C-88", "H\u00D8T", "1023423423005", "4.043545", "10", "-50", "C"}, new System.String[]{"I", "x a b c d e f", "-2147483648", "1.0e+0", "d", "A-10", "s\u00EDn", "332422459999", "4.043546", "-340", "51", "B"}, new System.String[]{"J", "y a b c d e f", "4", ".5", "b", "C-7", "HOT", "34334543543", "4.0000220343", "300", "2", "A"}, new System.String[]{"W", "g", "1", null, null, null, null, null, null, null, null, null}, new System.String[]{"X", "g", "1", "0.1", null, null, null, null, null, null, null, null}, new System.String[]{"Y", "g", "1", "0.2", null, null, null, null, null, null, null, null}, new System.String[]{"Z", "f g", null, null, null, null, null, null, null, null, null, null}};
// create an index of all the documents, or just the x, or just the y documents
private Searcher GetIndex(bool even, bool odd)
{
RAMDirectory indexStore = new RAMDirectory();
IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+ writer.SetMaxBufferedDocs(2);
+ writer.SetMergeFactor(1000);
for (int i = 0; i < data.Length; ++i)
{
if (((i % 2) == 0 && even) || ((i % 2) == 1 && odd))
{
- Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+ Document doc = new Document();
doc.Add(new Field("tracer", data[i][0], Field.Store.YES, Field.Index.NO));
doc.Add(new Field("contents", data[i][1], Field.Store.NO, Field.Index.ANALYZED));
if (data[i][2] != null)
@@ -199,17 +291,21 @@
doc.Add(new Field("long", data[i][7], Field.Store.NO, Field.Index.NOT_ANALYZED));
if (data[i][8] != null)
doc.Add(new Field("double", data[i][8], Field.Store.NO, Field.Index.NOT_ANALYZED));
- if (data[i][8] != null)
- doc.Add(new Field("short", data[i][9], Field.Store.NO, Field.Index.NOT_ANALYZED));
- if (data[i][8] != null)
- doc.Add(new Field("byte", data[i][10], Field.Store.NO, Field.Index.NOT_ANALYZED));
- doc.SetBoost(2); // produce some scores above 1.0
+ if (data[i][9] != null)
+ doc.Add(new Field("short", data[i][9], Field.Store.NO, Field.Index.NOT_ANALYZED));
+ if (data[i][10] != null)
+ doc.Add(new Field("byte", data[i][10], Field.Store.NO, Field.Index.NOT_ANALYZED));
+ if (data[i][11] != null)
+ doc.Add(new Field("parser", data[i][11], Field.Store.NO, Field.Index.NOT_ANALYZED));
+ doc.SetBoost(2); // produce some scores above 1.0
writer.AddDocument(doc);
}
}
- writer.Optimize();
+ //writer.optimize ();
writer.Close();
- return new IndexSearcher(indexStore);
+ IndexSearcher s = new IndexSearcher(indexStore);
+ s.SetDefaultFieldSortScoring(true, true);
+ return s;
}
private Searcher GetFullIndex()
@@ -217,6 +313,67 @@
return GetIndex(true, true);
}
+ private IndexSearcher GetFullStrings()
+ {
+ RAMDirectory indexStore = new RAMDirectory();
+ IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+ writer.SetMaxBufferedDocs(4);
+ writer.SetMergeFactor(97);
+ for (int i = 0; i < NUM_STRINGS; i++)
+ {
+ Document doc = new Document();
+ System.String num = GetRandomCharString(GetRandomNumber(2, 8), 48, 52);
+ doc.Add(new Field("tracer", num, Field.Store.YES, Field.Index.NO));
+ //doc.add (new Field ("contents", Integer.toString(i), Field.Store.NO, Field.Index.ANALYZED));
+ doc.Add(new Field("string", num, Field.Store.NO, Field.Index.NOT_ANALYZED));
+ System.String num2 = GetRandomCharString(GetRandomNumber(1, 4), 48, 50);
+ doc.Add(new Field("string2", num2, Field.Store.NO, Field.Index.NOT_ANALYZED));
+ doc.Add(new Field("tracer2", num2, Field.Store.YES, Field.Index.NO));
+ doc.SetBoost(2); // produce some scores above 1.0
+ writer.SetMaxBufferedDocs(GetRandomNumber(2, 12));
+ writer.AddDocument(doc);
+ }
+ //writer.optimize ();
+ //System.out.println(writer.getSegmentCount());
+ writer.Close();
+ return new IndexSearcher(indexStore);
+ }
+
+ public virtual System.String GetRandomNumberString(int num, int low, int high)
+ {
+ System.Text.StringBuilder sb = new System.Text.StringBuilder();
+ for (int i = 0; i < num; i++)
+ {
+ sb.Append(GetRandomNumber(low, high));
+ }
+ return sb.ToString();
+ }
+
+ public virtual System.String GetRandomCharString(int num)
+ {
+ return GetRandomCharString(num, 48, 122);
+ }
+
+ public virtual System.String GetRandomCharString(int num, int start, int end)
+ {
+ System.Text.StringBuilder sb = new System.Text.StringBuilder();
+ for (int i = 0; i < num; i++)
+ {
+ sb.Append((char) GetRandomNumber(start, end));
+ }
+ return sb.ToString();
+ }
+
+ internal System.Random r;
+
+ public virtual int GetRandomNumber(int low, int high)
+ {
+
+ int randInt = (System.Math.Abs(r.Next()) % (high - low)) + low;
+
+ return randInt;
+ }
+
private Searcher GetXIndex()
{
return GetIndex(true, false);
@@ -232,9 +389,10 @@
return GetIndex(false, false);
}
- [SetUp]
- public virtual void SetUp()
+ [Test]
+ public override void SetUp()
{
+ base.SetUp();
full = GetFullIndex();
searchX = GetXIndex();
searchY = GetYIndex();
@@ -245,9 +403,6 @@
queryF = new TermQuery(new Term("contents", "f"));
queryG = new TermQuery(new Term("contents", "g"));
sort = new Sort();
-
- if (!serverStarted)
- StartServer();
}
// test the sorts by score and document number
@@ -267,34 +422,144 @@
[Test]
public virtual void TestTypedSort()
{
- sort.SetSort(new SortField[] { new SortField("int", SortField.INT), SortField.FIELD_DOC });
- AssertMatches(full, queryX, sort, "IGAEC");
- AssertMatches(full, queryY, sort, "DHFJB");
-
- sort.SetSort(new SortField[] { new SortField("float", SortField.FLOAT), SortField.FIELD_DOC });
- AssertMatches(full, queryX, sort, "GCIEA");
- AssertMatches(full, queryY, sort, "DHJFB");
-
- sort.SetSort(new SortField[] { new SortField("string", SortField.STRING), SortField.FIELD_DOC });
- AssertMatches(full, queryX, sort, "AIGEC");
- AssertMatches(full, queryY, sort, "DJHFB");
-
- sort.SetSort(new SortField[] { new SortField("double", SortField.DOUBLE), SortField.FIELD_DOC });
- AssertMatches(full, queryX, sort, "AGICE");
- AssertMatches(full, queryY, sort, "DJHBF");
-
- sort.SetSort(new SortField[] { new SortField("byte", SortField.BYTE), SortField.FIELD_DOC });
- AssertMatches(full, queryX, sort, "CIGAE");
- AssertMatches(full, queryY, sort, "DHFBJ");
-
- sort.SetSort(new SortField[] { new SortField("short", SortField.SHORT), SortField.FIELD_DOC });
- AssertMatches(full, queryX, sort, "IAGCE");
- AssertMatches(full, queryY, sort, "DFHBJ");
-
- sort.SetSort(new SortField[] { new SortField("string", SortField.STRING), SortField.FIELD_DOC });
- AssertMatches(full, queryX, sort, "AIGEC");
- AssertMatches(full, queryY, sort, "DJHFB");
- }
+ sort.SetSort(new SortField[]{new SortField("int", SortField.INT), SortField.FIELD_DOC});
+ AssertMatches(full, queryX, sort, "IGAEC");
+ AssertMatches(full, queryY, sort, "DHFJB");
+
+ sort.SetSort(new SortField[]{new SortField("float", SortField.FLOAT), SortField.FIELD_DOC});
+ AssertMatches(full, queryX, sort, "GCIEA");
+ AssertMatches(full, queryY, sort, "DHJFB");
+
+ sort.SetSort(new SortField[]{new SortField("long", SortField.LONG), SortField.FIELD_DOC});
+ AssertMatches(full, queryX, sort, "EACGI");
+ AssertMatches(full, queryY, sort, "FBJHD");
+
+ sort.SetSort(new SortField[]{new SortField("double", SortField.DOUBLE), SortField.FIELD_DOC});
+ AssertMatches(full, queryX, sort, "AGICE");
+ AssertMatches(full, queryY, sort, "DJHBF");
+
+ sort.SetSort(new SortField[]{new SortField("byte", SortField.BYTE), SortField.FIELD_DOC});
+ AssertMatches(full, queryX, sort, "CIGAE");
+ AssertMatches(full, queryY, sort, "DHFBJ");
+
+ sort.SetSort(new SortField[]{new SortField("short", SortField.SHORT), SortField.FIELD_DOC});
+ AssertMatches(full, queryX, sort, "IAGCE");
+ AssertMatches(full, queryY, sort, "DFHBJ");
+
+ sort.SetSort(new SortField[]{new SortField("string", SortField.STRING), SortField.FIELD_DOC});
+ AssertMatches(full, queryX, sort, "AIGEC");
+ AssertMatches(full, queryY, sort, "DJHFB");
+ }
+
+ /// <summary> Test String sorting: small queue to many matches, multi field sort, reverse sort</summary>
+ [Test]
+ public virtual void TestStringSort()
+ {
+ r = NewRandom();
+ ScoreDoc[] result = null;
+ IndexSearcher searcher = GetFullStrings();
+ sort.SetSort(new SortField[]{new SortField("string", SortField.STRING), new SortField("string2", SortField.STRING, true), SortField.FIELD_DOC});
+
+ result = searcher.Search(new MatchAllDocsQuery(), null, 500, sort).scoreDocs;
+
+ System.Text.StringBuilder buff = new System.Text.StringBuilder();
+ int n = result.Length;
+ System.String last = null;
+ System.String lastSub = null;
+ int lastDocId = 0;
+ bool fail = false;
+ for (int x = 0; x < n; ++x)
+ {
+ Document doc2 = searcher.Doc(result[x].doc);
+ System.String[] v = doc2.GetValues("tracer");
+ System.String[] v2 = doc2.GetValues("tracer2");
+ for (int j = 0; j < v.Length; ++j)
+ {
+ if (last != null)
+ {
+ int cmp = String.CompareOrdinal(v[j], last);
+ if (!(cmp >= 0))
+ {
+ // ensure first field is in order
+ fail = true;
+ System.Console.Out.WriteLine("fail:" + v[j] + " < " + last);
+ }
+ if (cmp == 0)
+ {
+ // ensure second field is in reverse order
+ cmp = String.CompareOrdinal(v2[j], lastSub);
+ if (cmp > 0)
+ {
+ fail = true;
+ System.Console.Out.WriteLine("rev field fail:" + v2[j] + " > " + lastSub);
+ }
+ else if (cmp == 0)
+ {
+ // ensure docid is in order
+ if (result[x].doc < lastDocId)
+ {
+ fail = true;
+ System.Console.Out.WriteLine("doc fail:" + result[x].doc + " > " + lastDocId);
+ }
+ }
+ }
+ }
+ last = v[j];
+ lastSub = v2[j];
+ lastDocId = result[x].doc;
+ buff.Append(v[j] + "(" + v2[j] + ")(" + result[x].doc + ") ");
+ }
+ }
+ if (fail)
+ {
+ System.Console.Out.WriteLine("topn field1(field2)(docID):" + buff);
+ }
+ Assert.IsFalse(fail, "Found sort results out of order");
+ }
+
+ /// <summary> test sorts where the type of field is specified and a custom field parser
+ /// is used, that uses a simple char encoding. The sorted string contains a
+ /// character beginning from 'A' that is mapped to a numeric value using some
+ /// "funny" algorithm to be different for each data type.
+ /// </summary>
+ [Test]
+ public virtual void TestCustomFieldParserSort()
+ {
+ // since tests explicilty uses different parsers on the same fieldname
+ // we explicitly check/purge the FieldCache between each assertMatch
+ FieldCache fc = Lucene.Net.Search.FieldCache_Fields.DEFAULT;
+
+
+ sort.SetSort(new SortField[]{new SortField("parser", new AnonymousClassIntParser(this)), SortField.FIELD_DOC});
+ AssertMatches(full, queryA, sort, "JIHGFEDCBA");
+ AssertSaneFieldCaches("getName()" + " IntParser"); // {{Aroush-2.9}} String junit.framework.TestCase.getName()
+ fc.PurgeAllCaches();
+
+ sort.SetSort(new SortField[]{new SortField("parser", new AnonymousClassFloatParser(this)), SortField.FIELD_DOC});
+ AssertMatches(full, queryA, sort, "JIHGFEDCBA");
+ AssertSaneFieldCaches("getName()" + " FloatParser"); // {{Aroush-2.9}} String junit.framework.TestCase.getName()
+ fc.PurgeAllCaches();
+
+ sort.SetSort(new SortField[]{new SortField("parser", new AnonymousClassLongParser(this)), SortField.FIELD_DOC});
+ AssertMatches(full, queryA, sort, "JIHGFEDCBA");
+ AssertSaneFieldCaches("getName()" + " LongParser"); // {{Aroush-2.9}} String junit.framework.TestCase.getName()
+ fc.PurgeAllCaches();
+
+ sort.SetSort(new SortField[]{new SortField("parser", new AnonymousClassDoubleParser(this)), SortField.FIELD_DOC});
+ AssertMatches(full, queryA, sort, "JIHGFEDCBA");
+ AssertSaneFieldCaches("getName()" + " DoubleParser"); // {{Aroush-2.9}} String junit.framework.TestCase.getName()
+ fc.PurgeAllCaches();
+
+ sort.SetSort(new SortField[]{new SortField("parser", new AnonymousClassByteParser(this)), SortField.FIELD_DOC});
+ AssertMatches(full, queryA, sort, "JIHGFEDCBA");
+ AssertSaneFieldCaches("getName()" + " ByteParser"); // {{Aroush-2.9}} String junit.framework.TestCase.getName()
+ fc.PurgeAllCaches();
+
+ sort.SetSort(new SortField[]{new SortField("parser", new AnonymousClassShortParser(this)), SortField.FIELD_DOC});
+ AssertMatches(full, queryA, sort, "JIHGFEDCBA");
+ AssertSaneFieldCaches("getName()" + " ShortParser"); // {{Aroush-2.9}} String junit.framework.TestCase.getName()
+ fc.PurgeAllCaches();
+ }
// test sorts when there's nothing in the index
[Test]
@@ -318,6 +583,90 @@
AssertMatches(empty, queryX, sort, "");
}
+ internal class MyFieldComparator:FieldComparator
+ {
+ [Serializable]
+ private class AnonymousClassIntParser1 : Lucene.Net.Search.IntParser
+ {
+ public AnonymousClassIntParser1(MyFieldComparator enclosingInstance)
+ {
+ InitBlock(enclosingInstance);
+ }
+ private void InitBlock(MyFieldComparator enclosingInstance)
+ {
+ this.enclosingInstance = enclosingInstance;
+ }
+ private MyFieldComparator enclosingInstance;
+ public MyFieldComparator Enclosing_Instance
+ {
+ get
+ {
+ return enclosingInstance;
+ }
+
+ }
+ public int ParseInt(System.String val)
+ {
+ return (val[0] - 'A') * 123456;
+ }
+ }
+ internal int[] docValues;
+ internal int[] slotValues;
+ internal int bottomValue;
+
+ internal MyFieldComparator(int numHits)
+ {
+ slotValues = new int[numHits];
+ }
+
+ public override void Copy(int slot, int doc)
+ {
+ slotValues[slot] = docValues[doc];
+ }
+
+ public override int Compare(int slot1, int slot2)
+ {
+ return slotValues[slot1] - slotValues[slot2];
+ }
+
+ public override int CompareBottom(int doc)
+ {
+ return bottomValue - docValues[doc];
+ }
+
+ public override void SetBottom(int bottom)
+ {
+ bottomValue = slotValues[bottom];
+ }
+
+ public override void SetNextReader(IndexReader reader, int docBase)
+ {
+ docValues = Lucene.Net.Search.FieldCache_Fields.DEFAULT.GetInts(reader, "parser", new AnonymousClassIntParser1(this));
+ }
+
+ public override System.IComparable Value(int slot)
+ {
+ return (System.Int32) slotValues[slot];
+ }
+ }
+
+ [Serializable]
+ internal class MyFieldComparatorSource:FieldComparatorSource
+ {
+ public override FieldComparator NewComparator(System.String fieldname, int numHits, int sortPos, bool reversed)
+ {
+ return new MyFieldComparator(numHits);
+ }
+ }
+
+ // Test sorting w/ custom FieldComparator
+ [Test]
+ public virtual void TestNewCustomFieldParserSort()
+ {
+ sort.SetSort(new SortField[]{new SortField("parser", new MyFieldComparatorSource())});
+ AssertMatches(full, queryA, sort, "JIHGFEDCBA");
+ }
+
// test sorts where the type of field is determined dynamically
[Test]
public virtual void TestAutoSort()
@@ -402,7 +751,7 @@
AssertMatches(full, queryG, sort, "ZYXW");
// Do the same for a MultiSearcher
- Searcher multiSearcher = new MultiSearcher(new Lucene.Net.Search.Searchable[]{full});
+ Searcher multiSearcher = new MultiSearcher(new Searchable[]{full});
sort.SetSort(new SortField[]{new SortField("int"), new SortField("string", SortField.STRING), new SortField("float")});
AssertMatches(multiSearcher, queryG, sort, "ZWXY");
@@ -412,7 +761,7 @@
// Don't close the multiSearcher. it would close the full searcher too!
// Do the same for a ParallelMultiSearcher
- Searcher parallelSearcher = new ParallelMultiSearcher(new Lucene.Net.Search.Searchable[]{full});
+ Searcher parallelSearcher = new ParallelMultiSearcher(new Searchable[]{full});
sort.SetSort(new SortField[]{new SortField("int"), new SortField("string", SortField.STRING), new SortField("float")});
AssertMatches(parallelSearcher, queryG, sort, "ZWXY");
@@ -455,7 +804,7 @@
public virtual void TestInternationalSort()
{
sort.SetSort(new SortField("i18n", new System.Globalization.CultureInfo("en-US")));
- AssertMatches(full, queryY, sort, "BFJHD"); // NOTE: this is "BFJDH" in Java's version
+ AssertMatches(full, queryY, sort, "BFJDH");
sort.SetSort(new SortField("i18n", new System.Globalization.CultureInfo("sv" + "-" + "se")));
AssertMatches(full, queryY, sort, "BJDFH");
@@ -475,13 +824,13 @@
[Test]
public virtual void TestInternationalMultiSearcherSort()
{
- Searcher multiSearcher = new MultiSearcher(new Lucene.Net.Search.Searchable[]{full});
+ Searcher multiSearcher = new MultiSearcher(new Searchable[]{full});
sort.SetSort(new SortField("i18n", new System.Globalization.CultureInfo("sv" + "-" + "se")));
AssertMatches(multiSearcher, queryY, sort, "BJDFH");
sort.SetSort(new SortField("i18n", new System.Globalization.CultureInfo("en-US")));
- AssertMatches(multiSearcher, queryY, sort, "BFJHD"); // NOTE: this is "BFJDH" in Java's version
+ AssertMatches(multiSearcher, queryY, sort, "BFJDH");
sort.SetSort(new SortField("i18n", new System.Globalization.CultureInfo("da" + "-" + "dk")));
AssertMatches(multiSearcher, queryY, sort, "BJDHF");
@@ -506,42 +855,16 @@
[Test]
public virtual void TestMultiSort()
{
- MultiSearcher searcher = new MultiSearcher(new Lucene.Net.Search.Searchable[]{searchX, searchY});
- RunMultiSorts(searcher);
+ MultiSearcher searcher = new MultiSearcher(new Searchable[]{searchX, searchY});
+ RunMultiSorts(searcher, false);
}
// test a variety of sorts using a parallel multisearcher
[Test]
public virtual void TestParallelMultiSort()
{
- Searcher searcher = new ParallelMultiSearcher(new Lucene.Net.Search.Searchable[]{searchX, searchY});
- RunMultiSorts(searcher);
- }
-
- // test a variety of sorts using a remote searcher
- [Test]
- public virtual void TestRemoteSort()
- {
- Lucene.Net.Search.Searchable searcher = GetRemote();
- MultiSearcher multi = new MultiSearcher(new Lucene.Net.Search.Searchable[]{searcher});
- RunMultiSorts(multi);
- }
-
- // test custom search when remote
- [Test]
- public virtual void TestRemoteCustomSort()
- {
- Lucene.Net.Search.Searchable searcher = GetRemote();
- MultiSearcher multi = new MultiSearcher(new Lucene.Net.Search.Searchable[]{searcher});
- sort.SetSort(new SortField("custom", SampleComparable.GetComparatorSource()));
- AssertMatches(multi, queryX, sort, "CAIEG");
- sort.SetSort(new SortField("custom", SampleComparable.GetComparatorSource(), true));
- AssertMatches(multi, queryY, sort, "HJDBF");
- SortComparator custom = SampleComparable.GetComparator();
- sort.SetSort(new SortField("custom", custom));
- AssertMatches(multi, queryX, sort, "CAIEG");
- sort.SetSort(new SortField("custom", custom, true));
- AssertMatches(multi, queryY, sort, "HJDBF");
+ Searcher searcher = new ParallelMultiSearcher(new Searchable[]{searchX, searchY});
+ RunMultiSorts(searcher, false);
}
// test that the relevancy scores are the same even if
@@ -551,102 +874,78 @@
{
// capture relevancy scores
- System.Collections.Hashtable scoresX = GetScores(full.Search(queryX, null, 1000).scoreDocs, full);
- System.Collections.Hashtable scoresY = GetScores(full.Search(queryY, null, 1000).scoreDocs, full);
- System.Collections.Hashtable scoresA = GetScores(full.Search(queryA, null, 1000).scoreDocs, full);
+ System.Collections.Hashtable scoresX = GetScores(full.Search(queryX, null, 1000).scoreDocs, full);
+ System.Collections.Hashtable scoresY = GetScores(full.Search(queryY, null, 1000).scoreDocs, full);
+ System.Collections.Hashtable scoresA = GetScores(full.Search(queryA, null, 1000).scoreDocs, full);
// we'll test searching locally, remote and multi
- MultiSearcher remote = new MultiSearcher(new Lucene.Net.Search.Searchable[]{GetRemote()});
- MultiSearcher multi = new MultiSearcher(new Lucene.Net.Search.Searchable[]{searchX, searchY});
+
+ MultiSearcher multi = new MultiSearcher(new Searchable[]{searchX, searchY});
// change sorting and make sure relevancy stays the same
sort = new Sort();
AssertSameValues(scoresX, GetScores(full.Search(queryX, null, 1000, sort).scoreDocs, full));
- AssertSameValues(scoresX, GetScores(remote.Search(queryX, null, 1000, sort).scoreDocs, remote));
AssertSameValues(scoresX, GetScores(multi.Search(queryX, null, 1000, sort).scoreDocs, multi));
AssertSameValues(scoresY, GetScores(full.Search(queryY, null, 1000, sort).scoreDocs, full));
- AssertSameValues(scoresY, GetScores(remote.Search(queryY, null, 1000, sort).scoreDocs, remote));
AssertSameValues(scoresY, GetScores(multi.Search(queryY, null, 1000, sort).scoreDocs, multi));
AssertSameValues(scoresA, GetScores(full.Search(queryA, null, 1000, sort).scoreDocs, full));
- AssertSameValues(scoresA, GetScores(remote.Search(queryA, null, 1000, sort).scoreDocs, remote));
AssertSameValues(scoresA, GetScores(multi.Search(queryA, null, 1000, sort).scoreDocs, multi));
sort.SetSort(SortField.FIELD_DOC);
AssertSameValues(scoresX, GetScores(full.Search(queryX, null, 1000, sort).scoreDocs, full));
- AssertSameValues(scoresX, GetScores(remote.Search(queryX, null, 1000, sort).scoreDocs, remote));
AssertSameValues(scoresX, GetScores(multi.Search(queryX, null, 1000, sort).scoreDocs, multi));
AssertSameValues(scoresY, GetScores(full.Search(queryY, null, 1000, sort).scoreDocs, full));
- AssertSameValues(scoresY, GetScores(remote.Search(queryY, null, 1000, sort).scoreDocs, remote));
AssertSameValues(scoresY, GetScores(multi.Search(queryY, null, 1000, sort).scoreDocs, multi));
AssertSameValues(scoresA, GetScores(full.Search(queryA, null, 1000, sort).scoreDocs, full));
- AssertSameValues(scoresA, GetScores(remote.Search(queryA, null, 1000, sort).scoreDocs, remote));
AssertSameValues(scoresA, GetScores(multi.Search(queryA, null, 1000, sort).scoreDocs, multi));
sort.SetSort("int");
AssertSameValues(scoresX, GetScores(full.Search(queryX, null, 1000, sort).scoreDocs, full));
- AssertSameValues(scoresX, GetScores(remote.Search(queryX, null, 1000, sort).scoreDocs, remote));
AssertSameValues(scoresX, GetScores(multi.Search(queryX, null, 1000, sort).scoreDocs, multi));
AssertSameValues(scoresY, GetScores(full.Search(queryY, null, 1000, sort).scoreDocs, full));
- AssertSameValues(scoresY, GetScores(remote.Search(queryY, null, 1000, sort).scoreDocs, remote));
AssertSameValues(scoresY, GetScores(multi.Search(queryY, null, 1000, sort).scoreDocs, multi));
AssertSameValues(scoresA, GetScores(full.Search(queryA, null, 1000, sort).scoreDocs, full));
- AssertSameValues(scoresA, GetScores(remote.Search(queryA, null, 1000, sort).scoreDocs, remote));
AssertSameValues(scoresA, GetScores(multi.Search(queryA, null, 1000, sort).scoreDocs, multi));
sort.SetSort("float");
AssertSameValues(scoresX, GetScores(full.Search(queryX, null, 1000, sort).scoreDocs, full));
- AssertSameValues(scoresX, GetScores(remote.Search(queryX, null, 1000, sort).scoreDocs, remote));
AssertSameValues(scoresX, GetScores(multi.Search(queryX, null, 1000, sort).scoreDocs, multi));
AssertSameValues(scoresY, GetScores(full.Search(queryY, null, 1000, sort).scoreDocs, full));
- AssertSameValues(scoresY, GetScores(remote.Search(queryY, null, 1000, sort).scoreDocs, remote));
AssertSameValues(scoresY, GetScores(multi.Search(queryY, null, 1000, sort).scoreDocs, multi));
AssertSameValues(scoresA, GetScores(full.Search(queryA, null, 1000, sort).scoreDocs, full));
- AssertSameValues(scoresA, GetScores(remote.Search(queryA, null, 1000, sort).scoreDocs, remote));
AssertSameValues(scoresA, GetScores(multi.Search(queryA, null, 1000, sort).scoreDocs, multi));
sort.SetSort("string");
AssertSameValues(scoresX, GetScores(full.Search(queryX, null, 1000, sort).scoreDocs, full));
- AssertSameValues(scoresX, GetScores(remote.Search(queryX, null, 1000, sort).scoreDocs, remote));
AssertSameValues(scoresX, GetScores(multi.Search(queryX, null, 1000, sort).scoreDocs, multi));
AssertSameValues(scoresY, GetScores(full.Search(queryY, null, 1000, sort).scoreDocs, full));
- AssertSameValues(scoresY, GetScores(remote.Search(queryY, null, 1000, sort).scoreDocs, remote));
AssertSameValues(scoresY, GetScores(multi.Search(queryY, null, 1000, sort).scoreDocs, multi));
AssertSameValues(scoresA, GetScores(full.Search(queryA, null, 1000, sort).scoreDocs, full));
- AssertSameValues(scoresA, GetScores(remote.Search(queryA, null, 1000, sort).scoreDocs, remote));
AssertSameValues(scoresA, GetScores(multi.Search(queryA, null, 1000, sort).scoreDocs, multi));
sort.SetSort(new System.String[]{"int", "float"});
AssertSameValues(scoresX, GetScores(full.Search(queryX, null, 1000, sort).scoreDocs, full));
- AssertSameValues(scoresX, GetScores(remote.Search(queryX, null, 1000, sort).scoreDocs, remote));
AssertSameValues(scoresX, GetScores(multi.Search(queryX, null, 1000, sort).scoreDocs, multi));
AssertSameValues(scoresY, GetScores(full.Search(queryY, null, 1000, sort).scoreDocs, full));
- AssertSameValues(scoresY, GetScores(remote.Search(queryY, null, 1000, sort).scoreDocs, remote));
AssertSameValues(scoresY, GetScores(multi.Search(queryY, null, 1000, sort).scoreDocs, multi));
AssertSameValues(scoresA, GetScores(full.Search(queryA, null, 1000, sort).scoreDocs, full));
- AssertSameValues(scoresA, GetScores(remote.Search(queryA, null, 1000, sort).scoreDocs, remote));
AssertSameValues(scoresA, GetScores(multi.Search(queryA, null, 1000, sort).scoreDocs, multi));
sort.SetSort(new SortField[]{new SortField("int", true), new SortField(null, SortField.DOC, true)});
AssertSameValues(scoresX, GetScores(full.Search(queryX, null, 1000, sort).scoreDocs, full));
- AssertSameValues(scoresX, GetScores(remote.Search(queryX, null, 1000, sort).scoreDocs, remote));
AssertSameValues(scoresX, GetScores(multi.Search(queryX, null, 1000, sort).scoreDocs, multi));
AssertSameValues(scoresY, GetScores(full.Search(queryY, null, 1000, sort).scoreDocs, full));
- AssertSameValues(scoresY, GetScores(remote.Search(queryY, null, 1000, sort).scoreDocs, remote));
AssertSameValues(scoresY, GetScores(multi.Search(queryY, null, 1000, sort).scoreDocs, multi));
AssertSameValues(scoresA, GetScores(full.Search(queryA, null, 1000, sort).scoreDocs, full));
- AssertSameValues(scoresA, GetScores(remote.Search(queryA, null, 1000, sort).scoreDocs, remote));
AssertSameValues(scoresA, GetScores(multi.Search(queryA, null, 1000, sort).scoreDocs, multi));
sort.SetSort(new System.String[]{"float", "string"});
AssertSameValues(scoresX, GetScores(full.Search(queryX, null, 1000, sort).scoreDocs, full));
- AssertSameValues(scoresX, GetScores(remote.Search(queryX, null, 1000, sort).scoreDocs, remote));
AssertSameValues(scoresX, GetScores(multi.Search(queryX, null, 1000, sort).scoreDocs, multi));
AssertSameValues(scoresY, GetScores(full.Search(queryY, null, 1000, sort).scoreDocs, full));
- AssertSameValues(scoresY, GetScores(remote.Search(queryY, null, 1000, sort).scoreDocs, remote));
AssertSameValues(scoresY, GetScores(multi.Search(queryY, null, 1000, sort).scoreDocs, multi));
AssertSameValues(scoresA, GetScores(full.Search(queryA, null, 1000, sort).scoreDocs, full));
- AssertSameValues(scoresA, GetScores(remote.Search(queryA, null, 1000, sort).scoreDocs, remote));
AssertSameValues(scoresA, GetScores(multi.Search(queryA, null, 1000, sort).scoreDocs, multi));
}
@@ -671,46 +970,201 @@
Assert.AreEqual(docs1.scoreDocs[0].score, docs2.scoreDocs[0].score, 1e-6);
}
+ [Test]
+ public virtual void TestSortWithoutFillFields()
+ {
+
+ // There was previously a bug in TopFieldCollector when fillFields was set
+ // to false - the same doc and score was set in ScoreDoc[] array. This test
+ // asserts that if fillFields is false, the documents are set properly. It
+ // does not use Searcher's default search methods (with Sort) since all set
+ // fillFields to true.
+ Sort[] sort = new Sort[]{new Sort(SortField.FIELD_DOC), new Sort()};
+ for (int i = 0; i < sort.Length; i++)
+ {
+ Query q = new MatchAllDocsQuery();
+ TopDocsCollector tdc = TopFieldCollector.create(sort[i], 10, false, false, false, true);
+
+ full.Search(q, tdc);
+
+ ScoreDoc[] sd = tdc.TopDocs().scoreDocs;
+ for (int j = 1; j < sd.Length; j++)
+ {
+ Assert.IsTrue(sd[j].doc != sd[j - 1].doc);
+ }
+ }
+ }
+
+ [Test]
+ public virtual void TestSortWithoutScoreTracking()
+ {
+
+ // Two Sort criteria to instantiate the multi/single comparators.
+ Sort[] sort = new Sort[]{new Sort(SortField.FIELD_DOC), new Sort()};
+ for (int i = 0; i < sort.Length; i++)
+ {
+ Query q = new MatchAllDocsQuery();
+ TopDocsCollector tdc = TopFieldCollector.create(sort[i], 10, true, false, false, true);
+
+ full.Search(q, tdc);
+
+ TopDocs td = tdc.TopDocs();
+ ScoreDoc[] sd = td.scoreDocs;
+ for (int j = 0; j < sd.Length; j++)
+ {
+ Assert.IsTrue(System.Single.IsNaN(sd[j].score));
+ }
+ Assert.IsTrue(System.Single.IsNaN(td.GetMaxScore()));
+ }
+ }
+
+ [Test]
+ public virtual void TestSortWithScoreNoMaxScoreTracking()
+ {
+
+ // Two Sort criteria to instantiate the multi/single comparators.
+ Sort[] sort = new Sort[]{new Sort(SortField.FIELD_DOC), new Sort()};
+ for (int i = 0; i < sort.Length; i++)
+ {
+ Query q = new MatchAllDocsQuery();
+ TopDocsCollector tdc = TopFieldCollector.create(sort[i], 10, true, true, false, true);
+
+ full.Search(q, tdc);
+
+ TopDocs td = tdc.TopDocs();
+ ScoreDoc[] sd = td.scoreDocs;
+ for (int j = 0; j < sd.Length; j++)
+ {
+ Assert.IsTrue(!System.Single.IsNaN(sd[j].score));
+ }
+ Assert.IsTrue(System.Single.IsNaN(td.GetMaxScore()));
+ }
+ }
+
+ [Test]
+ public virtual void TestSortWithScoreAndMaxScoreTracking()
+ {
+
+ // Two Sort criteria to instantiate the multi/single comparators.
+ Sort[] sort = new Sort[]{new Sort(SortField.FIELD_DOC), new Sort()};
+ for (int i = 0; i < sort.Length; i++)
+ {
+ Query q = new MatchAllDocsQuery();
+ TopDocsCollector tdc = TopFieldCollector.create(sort[i], 10, true, true, true, true);
+
+ full.Search(q, tdc);
+
+ TopDocs td = tdc.TopDocs();
+ ScoreDoc[] sd = td.scoreDocs;
+ for (int j = 0; j < sd.Length; j++)
+ {
+ Assert.IsTrue(!System.Single.IsNaN(sd[j].score));
+ }
+ Assert.IsTrue(!System.Single.IsNaN(td.GetMaxScore()));
+ }
+ }
+
+ [Test]
+ public virtual void TestOutOfOrderDocsScoringSort()
+ {
+
+ // Two Sort criteria to instantiate the multi/single comparators.
+ Sort[] sort = new Sort[]{new Sort(SortField.FIELD_DOC), new Sort()};
+ bool[][] tfcOptions = new bool[][]{new bool[]{false, false, false}, new bool[]{false, false, true}, new bool[]{false, true, false}, new bool[]{false, true, true}, new bool[]{true, false, false}, new bool[]{true, false, true}, new bool[]{true, true, false}, new bool[]{true, true, true}};
+ System.String[] actualTFCClasses = new System.String[]{"OutOfOrderOneComparatorNonScoringCollector", "OutOfOrderOneComparatorScoringMaxScoreCollector", "OutOfOrderOneComparatorScoringNoMaxScoreCollector", "OutOfOrderOneComparatorScoringMaxScoreCollector", "OutOfOrderOneComparatorNonScoringCollector", "OutOfOrderOneComparatorScoringMaxScoreCollector", "OutOfOrderOneComparatorScoringNoMaxScoreCollector", "OutOfOrderOneComparatorScoringMaxScoreCollector"};
+
+ // Save the original value to set later.
+ bool origVal = BooleanQuery.GetAllowDocsOutOfOrder();
+
+ BooleanQuery.SetAllowDocsOutOfOrder(true);
+
+ BooleanQuery bq = new BooleanQuery();
+ // Add a Query with SHOULD, since bw.scorer() returns BooleanScorer2
+ // which delegates to BS if there are no mandatory clauses.
+ bq.Add(new MatchAllDocsQuery(), Occur.SHOULD);
+ // Set minNrShouldMatch to 1 so that BQ will not optimize rewrite to return
+ // the clause instead of BQ.
+ bq.SetMinimumNumberShouldMatch(1);
+ try
+ {
+ for (int i = 0; i < sort.Length; i++)
+ {
+ for (int j = 0; j < tfcOptions.Length; j++)
+ {
+ TopDocsCollector tdc = TopFieldCollector.create(sort[i], 10, tfcOptions[j][0], tfcOptions[j][1], tfcOptions[j][2], false);
+
+ Assert.IsTrue(tdc.GetType().FullName.EndsWith("$" + actualTFCClasses[j]));
+
+ full.Search(bq, tdc);
+
+ TopDocs td = tdc.TopDocs();
+ ScoreDoc[] sd = td.scoreDocs;
+ Assert.AreEqual(10, sd.Length);
+ }
+ }
+ }
+ finally
+ {
+ // Whatever happens, reset BooleanQuery.allowDocsOutOfOrder to the
+ // original value. Don't set it to false in case the implementation in BQ
+ // will change some day.
+ BooleanQuery.SetAllowDocsOutOfOrder(origVal);
+ }
+ }
+
+ [Test]
+ public virtual void TestSortWithScoreAndMaxScoreTrackingNoResults()
+ {
+
+ // Two Sort criteria to instantiate the multi/single comparators.
+ Sort[] sort = new Sort[]{new Sort(SortField.FIELD_DOC), new Sort()};
+ for (int i = 0; i < sort.Length; i++)
+ {
+ TopDocsCollector tdc = TopFieldCollector.create(sort[i], 10, true, true, true, true);
+ TopDocs td = tdc.TopDocs();
+ Assert.AreEqual(0, td.totalHits);
+ Assert.IsTrue(System.Single.IsNaN(td.GetMaxScore()));
+ }
+ }
// runs a variety of sorts useful for multisearchers
- private void RunMultiSorts(Searcher multi)
+ private void RunMultiSorts(Searcher multi, bool isFull)
{
sort.SetSort(SortField.FIELD_DOC);
- AssertMatchesPattern(multi, queryA, sort, "[AB]{2}[CD]{2}[EF]{2}[GH]{2}[IJ]{2}");
+ System.String expected = isFull?"ABCDEFGHIJ":"ACEGIBDFHJ";
+ AssertMatches(multi, queryA, sort, expected);
sort.SetSort(new SortField("int", SortField.INT));
- AssertMatchesPattern(multi, queryA, sort, "IDHFGJ[ABE]{3}C");
+ expected = isFull?"IDHFGJABEC":"IDHFGJAEBC";
+ AssertMatches(multi, queryA, sort, expected);
sort.SetSort(new SortField[]{new SortField("int", SortField.INT), SortField.FIELD_DOC});
- AssertMatchesPattern(multi, queryA, sort, "IDHFGJ[AB]{2}EC");
+ expected = isFull?"IDHFGJABEC":"IDHFGJAEBC";
+ AssertMatches(multi, queryA, sort, expected);
sort.SetSort("int");
- AssertMatchesPattern(multi, queryA, sort, "IDHFGJ[AB]{2}EC");
+ expected = isFull?"IDHFGJABEC":"IDHFGJAEBC";
+ AssertMatches(multi, queryA, sort, expected);
sort.SetSort(new SortField[]{new SortField("float", SortField.FLOAT), SortField.FIELD_DOC});
- AssertMatchesPattern(multi, queryA, sort, "GDHJ[CI]{2}EFAB");
+ AssertMatches(multi, queryA, sort, "GDHJCIEFAB");
sort.SetSort("float");
- AssertMatchesPattern(multi, queryA, sort, "GDHJ[CI]{2}EFAB");
+ AssertMatches(multi, queryA, sort, "GDHJCIEFAB");
sort.SetSort("string");
AssertMatches(multi, queryA, sort, "DJAIHGFEBC");
sort.SetSort("int", true);
- AssertMatchesPattern(multi, queryA, sort, "C[AB]{2}EJGFHDI");
+ expected = isFull?"CABEJGFHDI":"CAEBJGFHDI";
+ AssertMatches(multi, queryA, sort, expected);
sort.SetSort("float", true);
- AssertMatchesPattern(multi, queryA, sort, "BAFE[IC]{2}JHDG");
+ AssertMatches(multi, queryA, sort, "BAFECIJHDG");
sort.SetSort("string", true);
AssertMatches(multi, queryA, sort, "CBEFGHIAJD");
- sort.SetSort(new SortField[]{new SortField("string", new System.Globalization.CultureInfo("en-US"))});
- AssertMatches(multi, queryA, sort, "DJAIHGFEBC");
-
- sort.SetSort(new SortField[]{new SortField("string", new System.Globalization.CultureInfo("en-US"), true)});
- AssertMatches(multi, queryA, sort, "CBEFGHIAJD");
-
sort.SetSort(new System.String[]{"int", "float"});
AssertMatches(multi, queryA, sort, "IDHFGJEABC");
@@ -731,17 +1185,38 @@
sort.SetSort("string", true);
AssertMatches(multi, queryF, sort, "IJZ");
+
+ // up to this point, all of the searches should have "sane"
+ // FieldCache behavior, and should have reused hte cache in several cases
+ AssertSaneFieldCaches("getName()" + " various"); // {{Aroush-2.9}} String junit.framework.TestCase.getName()
+ // next we'll check Locale based (String[]) for 'string', so purge first
+ Lucene.Net.Search.FieldCache_Fields.DEFAULT.PurgeAllCaches();
+
+ sort.SetSort(new SortField[]{new SortField("string", new System.Globalization.CultureInfo("en-US"))});
+ AssertMatches(multi, queryA, sort, "DJAIHGFEBC");
+
+ sort.SetSort(new SortField[]{new SortField("string", new System.Globalization.CultureInfo("en-US"), true)});
+ AssertMatches(multi, queryA, sort, "CBEFGHIAJD");
+
+ sort.SetSort(new SortField[]{new SortField("string", new System.Globalization.CultureInfo("en-GB"))});
+ AssertMatches(multi, queryA, sort, "DJAIHGFEBC");
+
+ AssertSaneFieldCaches("getName()" + " Locale.US + Locale.UK"); // {{Aroush-2.9}} String junit.framework.TestCase.getName()
+ Lucene.Net.Search.FieldCache_Fields.DEFAULT.PurgeAllCaches();
}
// make sure the documents returned by the search match the expected list
private void AssertMatches(Searcher searcher, Query query, Sort sort, System.String expectedResult)
{
- ScoreDoc[] result = searcher.Search(query, null, 1000, sort).scoreDocs;
+ //ScoreDoc[] result = searcher.search (query, null, 1000, sort).scoreDocs;
+ TopDocs hits = searcher.Search(query, null, expectedResult.Length, sort);
+ ScoreDoc[] result = hits.scoreDocs;
+ Assert.AreEqual(hits.totalHits, expectedResult.Length);
System.Text.StringBuilder buff = new System.Text.StringBuilder(10);
int n = result.Length;
for (int i = 0; i < n; ++i)
{
- Lucene.Net.Documents.Document doc = searcher.Doc(result[i].doc);
+ Document doc = searcher.Doc(result[i].doc);
System.String[] v = doc.GetValues("tracer");
for (int j = 0; j < v.Length; ++j)
{
@@ -751,36 +1226,16 @@
Assert.AreEqual(expectedResult, buff.ToString());
}
- // make sure the documents returned by the search match the expected list pattern
- private void AssertMatchesPattern(Searcher searcher, Query query, Sort sort, System.String pattern)
- {
- ScoreDoc[] result = searcher.Search(query, null, 1000, sort).scoreDocs;
- System.Text.StringBuilder buff = new System.Text.StringBuilder(10);
- int n = result.Length;
- for (int i = 0; i < n; ++i)
- {
- Lucene.Net.Documents.Document doc = searcher.Doc(result[i].doc);
- System.String[] v = doc.GetValues("tracer");
- for (int j = 0; j < v.Length; ++j)
- {
- buff.Append(v[j]);
- }
- }
- // System.out.println ("matching \""+buff+"\" against pattern \""+pattern+"\"");
- Pattern p = new System.Text.RegularExpressions.Regex(pattern);
- Assert.IsTrue(p.Match(buff.ToString()).Success);
- }
-
private System.Collections.Hashtable GetScores(ScoreDoc[] hits, Searcher searcher)
{
System.Collections.Hashtable scoreMap = new System.Collections.Hashtable();
int n = hits.Length;
for (int i = 0; i < n; ++i)
{
- Lucene.Net.Documents.Document doc = searcher.Doc(hits[i].doc);
+ Document doc = searcher.Doc(hits[i].doc);
System.String[] v = doc.GetValues("tracer");
Assert.AreEqual(v.Length, 1);
- scoreMap[v[0]] = (float)hits[i].score;
+ scoreMap[v[0]] = (float) hits[i].score;
}
return scoreMap;
}
@@ -791,7 +1246,7 @@
int n = m1.Count;
int m = m2.Count;
Assert.AreEqual(n, m);
- System.Collections.IEnumerator iter = new System.Collections.Hashtable().GetEnumerator();
+ System.Collections.IEnumerator iter = m1.Keys.GetEnumerator();
while (iter.MoveNext())
{
System.Object key = iter.Current;
@@ -807,87 +1262,5 @@
}
}
}
-
- private Lucene.Net.Search.Searchable GetRemote()
- {
- return LookupRemote();
- }
-
- private Lucene.Net.Search.Searchable LookupRemote()
- {
- return (Lucene.Net.Search.Searchable)Activator.GetObject(typeof(Lucene.Net.Search.Searchable), string.Format("http://localhost:{0}/SortedSearchable", port));
- }
-
- private System.Runtime.Remoting.Channels.Http.HttpChannel GetHttpChannel()
- {
- Random rnd = new Random((int) (DateTime.Now.Ticks & 0x7fffffff));
- port = rnd.Next(System.Net.IPEndPoint.MinPort, System.Net.IPEndPoint.MaxPort);
- System.Runtime.Remoting.Channels.Http.HttpChannel ch = new System.Runtime.Remoting.Channels.Http.HttpChannel(port);
- return ch;
- }
-
- public void StartServer()
- {
- System.Runtime.Remoting.Channels.Http.HttpChannel httpChannel = null;
-
- while (httpChannel == null)
- {
- try
- {
- httpChannel = GetHttpChannel();
- }
- catch (System.Net.Sockets.SocketException ex)
- {
- if (ex.ErrorCode != 10048)
- throw ex;
- }
- }
-
- try
- {
- System.Runtime.Remoting.Channels.ChannelServices.RegisterChannel(httpChannel, false);
- }
- catch (System.Net.Sockets.SocketException ex)
- {
- if (ex.ErrorCode == 10048) return; // EADDRINUSE?
- throw ex;
- }
-
- // construct an index
- Searcher local = GetFullIndex();
- // local.search (queryA, new Sort());
-
- // publish it
- RemoteSearchable impl = new RemoteSearchable(local);
- System.Runtime.Remoting.RemotingServices.Marshal(impl, "SortedSearchable");
- serverStarted = true;
- }
-
-
- //private Lucene.Net.Search.Searchable LookupRemote()
- //{
- // return (Lucene.Net.Search.Searchable) Activator.GetObject(typeof(Lucene.Net.Search.Searchable), @"http://localhost:1099/SortedSearchable");
- //}
-
- //public void StartServer()
- //{
- // try
- // {
- // System.Runtime.Remoting.Channels.ChannelServices.RegisterChannel(new System.Runtime.Remoting.Channels.Http.HttpChannel(1099), false);
- // }
- // catch (System.Net.Sockets.SocketException ex)
- // {
- // if (ex.ErrorCode == 10048) return; // EADDRINUSE?
- // throw ex;
- // }
-
- // // construct an index
- // Searcher local = GetFullIndex();
- // // local.search (queryA, new Sort());
-
- // // publish it
- // RemoteSearchable impl = new RemoteSearchable(local);
- // System.Runtime.Remoting.RemotingServices.Marshal(impl, "SortedSearchable");
- //}
}
}
\ No newline at end of file
Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestSpanQueryFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestSpanQueryFilter.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestSpanQueryFilter.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestSpanQueryFilter.cs Tue Nov 3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
@@ -19,6 +19,7 @@
using NUnit.Framework;
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
using Document = Lucene.Net.Documents.Document;
using Field = Lucene.Net.Documents.Field;
using IndexReader = Lucene.Net.Index.IndexReader;
@@ -26,18 +27,21 @@
using Term = Lucene.Net.Index.Term;
using Directory = Lucene.Net.Store.Directory;
using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
using SpanTermQuery = Lucene.Net.Search.Spans.SpanTermQuery;
using English = Lucene.Net.Util.English;
using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
-
namespace Lucene.Net.Search
{
- [TestFixture]
- public class TestSpanQueryFilter : LuceneTestCase
+ [TestFixture]
+ public class TestSpanQueryFilter:LuceneTestCase
{
+
+ public TestSpanQueryFilter(System.String s):base(s)
+ {
+ }
+
[Test]
public virtual void TestFilterWorks()
{
@@ -56,39 +60,41 @@
SpanTermQuery query = new SpanTermQuery(new Term("field", English.IntToEnglish(10).Trim()));
SpanQueryFilter filter = new SpanQueryFilter(query);
SpanFilterResult result = filter.BitSpans(reader);
- DocIdSet docIdSet = result.GetDocIdSet();
+ DocIdSet docIdSet = result.GetDocIdSet();
Assert.IsTrue(docIdSet != null, "docIdSet is null and it shouldn't be");
AssertContainsDocId("docIdSet doesn't contain docId 10", docIdSet, 10);
System.Collections.IList spans = result.GetPositions();
Assert.IsTrue(spans != null, "spans is null and it shouldn't be");
- int size = GetDocIdSetSize(docIdSet);
- Assert.IsTrue(spans.Count == size, "spans Size: " + spans.Count + " is not: " + size);
+ int size = GetDocIdSetSize(docIdSet);
+ Assert.IsTrue(spans.Count == size, "spans Size: " + spans.Count + " is not: " + size);
for (System.Collections.IEnumerator iterator = spans.GetEnumerator(); iterator.MoveNext(); )
{
SpanFilterResult.PositionInfo info = (SpanFilterResult.PositionInfo) iterator.Current;
Assert.IsTrue(info != null, "info is null and it shouldn't be");
//The doc should indicate the bit is on
- AssertContainsDocId("docIdSet doesn't contain docId " + info.GetDoc(), docIdSet, info.GetDoc());
+ AssertContainsDocId("docIdSet doesn't contain docId " + info.GetDoc(), docIdSet, info.GetDoc());
//There should be two positions in each
Assert.IsTrue(info.GetPositions().Count == 2, "info.getPositions() Size: " + info.GetPositions().Count + " is not: " + 2);
}
reader.Close();
}
-
- internal int GetDocIdSetSize(DocIdSet docIdSet)
- {
- int size = 0;
- DocIdSetIterator it = docIdSet.Iterator();
- while (it.Next())
- size++;
- return size;
- }
-
- public void AssertContainsDocId(string msg, DocIdSet docIdSet, int docId)
- {
- DocIdSetIterator it = docIdSet.Iterator();
- Assert.IsTrue(it.SkipTo(docId), msg);
- Assert.IsTrue(it.Doc() == docId, msg);
- }
+
+ internal virtual int GetDocIdSetSize(DocIdSet docIdSet)
+ {
+ int size = 0;
+ DocIdSetIterator it = docIdSet.Iterator();
+ while (it.NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
+ {
+ size++;
+ }
+ return size;
+ }
+
+ public virtual void AssertContainsDocId(System.String msg, DocIdSet docIdSet, int docId)
+ {
+ DocIdSetIterator it = docIdSet.Iterator();
+ Assert.IsTrue(it.Advance(docId) != DocIdSetIterator.NO_MORE_DOCS, msg);
+ Assert.IsTrue(it.DocID() == docId, msg);
+ }
}
}
\ No newline at end of file
Added: incubator/lucene.net/trunk/C#/src/Test/Search/TestStressSort.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestStressSort.cs?rev=832486&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestStressSort.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestStressSort.cs Tue Nov 3 18:06:27 2009
@@ -0,0 +1,471 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using Term = Lucene.Net.Index.Term;
+using Directory = Lucene.Net.Store.Directory;
+using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory;
+using FieldCacheSanityChecker = Lucene.Net.Util.FieldCacheSanityChecker;
+using Insanity = Lucene.Net.Util.FieldCacheSanityChecker.Insanity;
+using InsanityType = Lucene.Net.Util.FieldCacheSanityChecker.InsanityType;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using _TestUtil = Lucene.Net.Util._TestUtil;
+
+namespace Lucene.Net.Search
+{
+
+ [TestFixture]
+ public class TestStressSort:LuceneTestCase
+ {
+
+ private const int NUM_DOCS = 5000;
+ // NOTE: put seed in here to make failures
+ // deterministic, but do not commit with a seed (to
+ // better test):
+ private System.Random r;
+ private Directory dir, dir2, dir3;
+ private IndexSearcher searcherMultiSegment;
+ private IndexSearcher searcherFewSegment;
+ private IndexSearcher searcherSingleSegment;
+
+ private const bool VERBOSE = false;
+
+ // min..max
+ private int NextInt(int min, int max)
+ {
+ return min + r.Next(max - min + 1);
+ }
+
+ // 0..(lim-1)
+ private int NextInt(int lim)
+ {
+ return r.Next(lim);
+ }
+
+ internal char[] buffer = new char[20];
+ private System.String RandomString(int size)
+ {
+ System.Diagnostics.Debug.Assert(size < 20);
+ for (int i = 0; i < size; i++)
+ {
+ buffer[i] = (char) NextInt(48, 122);
+ }
+ return new System.String(buffer, 0, size);
+ }
+
+ private void Create()
+ {
+
+ // NOTE: put seed in here to make failures
+ // deterministic, but do not commit with a seed (to
+ // better test):
+ dir = new MockRAMDirectory();
+ IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+ writer.SetMaxBufferedDocs(17);
+
+ Document doc = new Document();
+ Document doc2 = new Document();
+
+ Field id = new Field("id", "", Field.Store.YES, Field.Index.NO);
+ doc.Add(id);
+ doc2.Add(id);
+
+ Field contents = new Field("contents", "", Field.Store.NO, Field.Index.ANALYZED);
+ doc.Add(contents);
+ doc2.Add(contents);
+
+ Field byteField = new Field("byte", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
+ doc.Add(byteField);
+ doc2.Add(byteField);
+
+ Field shortField = new Field("short", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
+ doc.Add(shortField);
+ doc2.Add(shortField);
+
+ Field intField = new Field("int", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
+ doc.Add(intField);
+ doc2.Add(intField);
+
+ Field longField = new Field("long", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
+ doc.Add(longField);
+ doc2.Add(longField);
+
+ Field floatField = new Field("float", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
+ doc.Add(floatField);
+ doc2.Add(floatField);
+
+ Field doubleField = new Field("double", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
+ doc.Add(doubleField);
+ doc2.Add(doubleField);
+
+ // we use two diff string fields so our FieldCache usage
+ // is less suspicious to cache inspection
+ Field stringField = new Field("string", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
+ doc.Add(stringField);
+ Field stringFieldIdx = new Field("stringIdx", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
+ doc.Add(stringFieldIdx);
+ // doc2 doesn't have stringField or stringFieldIdx, so we get nulls
+
+ for (int i = 0; i < NUM_DOCS; i++)
+ {
+ id.SetValue("" + i);
+ if (i % 1000 == 0)
+ {
+ contents.SetValue("a b c z");
+ }
+ else if (i % 100 == 0)
+ {
+ contents.SetValue("a b c y");
+ }
+ else if (i % 10 == 0)
+ {
+ contents.SetValue("a b c x");
+ }
+ else
+ {
+ contents.SetValue("a b c");
+ }
+ byteField.SetValue("" + NextInt((sbyte) System.SByte.MinValue, (sbyte) System.SByte.MaxValue));
+ if (NextInt(10) == 3)
+ {
+ shortField.SetValue("" + System.Int16.MinValue);
+ }
+ else if (NextInt(10) == 7)
+ {
+ shortField.SetValue("" + System.Int16.MaxValue);
+ }
+ else
+ {
+ shortField.SetValue("" + NextInt(System.Int16.MinValue, System.Int16.MaxValue));
+ }
+
+ if (NextInt(10) == 3)
+ {
+ intField.SetValue("" + System.Int32.MinValue);
+ }
+ else if (NextInt(10) == 7)
+ {
+ intField.SetValue("" + System.Int32.MaxValue);
+ }
+ else
+ {
+ intField.SetValue("" + this.r.Next());
+ }
+
+ if (NextInt(10) == 3)
+ {
+ longField.SetValue("" + System.Int64.MinValue);
+ }
+ else if (NextInt(10) == 7)
+ {
+ longField.SetValue("" + System.Int64.MaxValue);
+ }
+ else
+ {
+ longField.SetValue("" + this.r.Next(System.Int32.MaxValue));
+ }
+ floatField.SetValue("" + (float) this.r.NextDouble());
+ doubleField.SetValue("" + this.r.NextDouble());
+ if (i % 197 == 0)
+ {
+ writer.AddDocument(doc2);
+ }
+ else
+ {
+ System.String r = RandomString(NextInt(20));
+ stringField.SetValue(r);
+ stringFieldIdx.SetValue(r);
+ writer.AddDocument(doc);
+ }
+ }
+ writer.Close();
+ searcherMultiSegment = new IndexSearcher(dir);
+ searcherMultiSegment.SetDefaultFieldSortScoring(true, true);
+
+ dir2 = new MockRAMDirectory(dir);
+ writer = new IndexWriter(dir2, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+ writer.Optimize();
+ writer.Close();
+ searcherSingleSegment = new IndexSearcher(dir2);
+ searcherSingleSegment.SetDefaultFieldSortScoring(true, true);
+ dir3 = new MockRAMDirectory(dir);
+ writer = new IndexWriter(dir3, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+ writer.Optimize(3);
+ writer.Close();
+ searcherFewSegment = new IndexSearcher(dir3);
+ searcherFewSegment.SetDefaultFieldSortScoring(true, true);
+ }
+
+ private void Close()
+ {
+ searcherMultiSegment.Close();
+ searcherFewSegment.Close();
+ searcherSingleSegment.Close();
+ dir.Close();
+ dir2.Close();
+ }
+
+ [Test]
+ public virtual void TestSort()
+ {
+ this.r = NewRandom();
+
+ // reverse & not
+ // all types
+ // restrictive & non restrictive searches (on contents)
+
+ Create();
+
+ Sort[] sorts = new Sort[50];
+ int sortCount = 0;
+
+ for (int r = 0; r < 2; r++)
+ {
+ Sort sort;
+ bool reverse = 1 == r;
+
+ sorts[sortCount++] = sort = new Sort();
+ sort.SetSort(new SortField[]{new SortField("byte", SortField.BYTE, reverse)});
+
+ sorts[sortCount++] = sort = new Sort();
+ sort.SetSort(new SortField[]{new SortField("short", SortField.SHORT, reverse)});
+
+ sorts[sortCount++] = sort = new Sort();
+ sort.SetSort(new SortField[]{new SortField("int", SortField.INT, reverse)});
+
+ sorts[sortCount++] = sort = new Sort();
+ sort.SetSort(new SortField[]{new SortField("long", SortField.LONG, reverse)});
+
+ sorts[sortCount++] = sort = new Sort();
+ sort.SetSort(new SortField[]{new SortField("float", SortField.FLOAT, reverse)});
+
+ sorts[sortCount++] = sort = new Sort();
+ sort.SetSort(new SortField[]{new SortField("double", SortField.DOUBLE, reverse)});
+
+ sorts[sortCount++] = sort = new Sort();
+ sort.SetSort(new SortField[]{new SortField("string", SortField.STRING_VAL, reverse)});
+
+ sorts[sortCount++] = sort = new Sort();
+ sort.SetSort(new SortField[]{new SortField("stringIdx", SortField.STRING, reverse)});
+
+ //sorts[sortCount++] = sort = new Sort();
+ //sort.setSort(new SortField[] {new SortField("string", SortField.STRING_ORD, reverse)});
+
+ //sorts[sortCount++] = sort = new Sort();
+ //sort.setSort(new SortField[] {new SortField("string", SortField.STRING_ORD_VAL, reverse)});
+
+ //sorts[sortCount++] = sort = new Sort();
+ //sort.setSort(new SortField[] {new SortField("string", SortField.STRING_ORD_VAL_DEM, reverse)});
+
+ //sorts[sortCount++] = sort = new Sort();
+ //sort.setSort(new SortField[] {new SortField("string", SortField.STRING_ORD_VAL_DEM2, reverse)});
+
+ sorts[sortCount++] = sort = new Sort();
+ sort.SetSort(new SortField[]{new SortField(null, SortField.SCORE, reverse)});
+
+ sorts[sortCount++] = sort = new Sort();
+ sort.SetSort(new SortField[]{new SortField(null, SortField.DOC, reverse)});
+ }
+
+ Query[] queries = new Query[4];
+ queries[0] = new MatchAllDocsQuery();
+ queries[1] = new TermQuery(new Term("contents", "x")); // matches every 10th doc
+ queries[2] = new TermQuery(new Term("contents", "y")); // matches every 100th doc
+ queries[3] = new TermQuery(new Term("contents", "z")); // matches every 1000th doc
+
+ for (int sx = 0; sx < 3; sx++)
+ {
+ IndexSearcher searcher;
+ if (sx == 0)
+ {
+ searcher = searcherSingleSegment;
+ }
+ else if (sx == 1)
+ {
+ searcher = searcherFewSegment;
+ }
+ else
+ {
+ searcher = searcherMultiSegment;
+ }
+
+ for (int qx = 0; qx < queries.Length; qx++)
+ {
+ Query query = queries[qx];
+
+ for (int q = 0; q < 3; q++)
+ {
+
+ int queueSize;
+ if (q == 0)
+ {
+ queueSize = 10;
+ }
+ else if (q == 1)
+ {
+ queueSize = 100;
+ }
+ else
+ {
+ queueSize = 1000;
+ }
+
+ for (int s = 0; s < sortCount; s++)
+ {
+ Sort sort1 = sorts[s];
+
+ for (int s2 = - 1; s2 < sortCount; s2++)
+ {
+ Sort sort;
+ if (s2 == - 1)
+ {
+ // Single field sort
+ sort = sort1;
+ }
+ else
+ {
+ sort = new Sort(new SortField[]{sort1.GetSort()[0], sorts[s2].GetSort()[0]});
+ }
+
+ // Old
+ Sort oldSort = GetOldSort(sort);
+
+ if (VERBOSE)
+ {
+ System.Console.Out.WriteLine("query=" + query);
+ if (sx == 0)
+ {
+ System.Console.Out.WriteLine(" single-segment index");
+ }
+ else if (sx == 1)
+ {
+ System.Console.Out.WriteLine(" few-segment index");
+ }
+ else
+ {
+ System.Console.Out.WriteLine(" many-segment index");
+ }
+ System.Console.Out.WriteLine(" numHit=" + queueSize);
+ System.Console.Out.WriteLine(" old=" + oldSort);
+ System.Console.Out.WriteLine(" new=" + sort);
+ }
+
+ TopDocs newHits = searcher.Search(query, null, queueSize, sort);
+ TopDocs oldHits = searcher.Search(query, null, queueSize, oldSort);
+
+ Compare(oldHits, newHits);
+ }
+ }
+ }
+ }
+ }
+
+ // we explicitly test the old sort method and
+ // compare with the new, so we expect to see SUBREADER
+ // sanity checks fail.
+ Insanity[] insanity = FieldCacheSanityChecker.CheckSanity(Lucene.Net.Search.FieldCache_Fields.DEFAULT);
+ try
+ {
+ int ignored = 0;
+ for (int i = 0; i < insanity.Length; i++)
+ {
+ if (insanity[i].GetType() == InsanityType.SUBREADER)
+ {
+ insanity[i] = new Insanity(InsanityType.EXPECTED, insanity[i].GetMsg(), insanity[i].GetCacheEntries());
+ ignored++;
+ }
+ }
+ Assert.AreEqual(ignored, insanity.Length, "Not all insane field cache usage was expected");
+
+ insanity = null;
+ }
+ finally
+ {
+ // report this in the event of any exception/failure
+ // if no failure, then insanity will be null
+ if (null != insanity)
+ {
+ System.IO.StreamWriter temp_writer;
+ temp_writer = new System.IO.StreamWriter(System.Console.OpenStandardError(), System.Console.Error.Encoding);
+ temp_writer.AutoFlush = true;
+ DumpArray(GetTestLabel() + ": Insane FieldCache usage(s)", insanity, temp_writer);
+ }
+ }
+ // we've already checked FieldCache, purge so tearDown doesn't complain
+ PurgeFieldCache(Lucene.Net.Search.FieldCache_Fields.DEFAULT); // so
+
+ Close();
+ }
+
+ private Sort GetOldSort(Sort sort)
+ {
+ SortField[] fields = sort.GetSort();
+ SortField[] oldFields = new SortField[fields.Length];
+ for (int i = 0; i < fields.Length; i++)
+ {
+ int sortType;
+ if (fields[i].GetField() != null && fields[i].GetField().Equals("string"))
+ {
+ sortType = SortField.STRING;
+ }
+ else
+ {
+ sortType = fields[i].GetType();
+ }
+ oldFields[i] = new SortField(fields[i].GetField(), sortType, fields[i].GetReverse());
+ oldFields[i].SetUseLegacySearch(true);
+ }
+ return new Sort(oldFields);
+ }
+
+ private void Compare(TopDocs oldHits, TopDocs newHits)
+ {
+ Assert.AreEqual(oldHits.totalHits, newHits.totalHits);
+ Assert.AreEqual(oldHits.scoreDocs.Length, newHits.scoreDocs.Length);
+ ScoreDoc[] oldDocs = oldHits.scoreDocs;
+ ScoreDoc[] newDocs = newHits.scoreDocs;
+
+ for (int i = 0; i < oldDocs.Length; i++)
+ {
+ if (oldDocs[i] is FieldDoc)
+ {
+ System.Diagnostics.Debug.Assert(newDocs[i] is FieldDoc);
+ FieldDoc oldHit = (FieldDoc) oldDocs[i];
+ FieldDoc newHit = (FieldDoc) newDocs[i];
+ Assert.AreEqual(oldHit.doc, newHit.doc, "hit " + i + " of " + oldDocs.Length + " differs: oldDoc=" + oldHit.doc + " vs newDoc=" + newHit.doc + " oldFields=" + _TestUtil.ArrayToString(oldHit.fields) + " newFields=" + _TestUtil.ArrayToString(newHit.fields));
+
+ Assert.AreEqual(oldHit.score, newHit.score, 0.00001);
+ Assert.IsTrue(SupportClass.CollectionsHelper.Equals(oldHit.fields, newHit.fields));
+ }
+ else
+ {
+ ScoreDoc oldHit = oldDocs[i];
+ ScoreDoc newHit = newDocs[i];
+ Assert.AreEqual(oldHit.doc, newHit.doc);
+ Assert.AreEqual(oldHit.score, newHit.score, 0.00001);
+ }
+ }
+ }
+ }
+}
\ No newline at end of file