You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by cc...@apache.org on 2011/11/13 00:51:44 UTC
[Lucene.Net] svn commit: r1201357 [1/4] - in
/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk: src/core/
src/core/Search/ src/core/Search/Spans/ src/core/Support/ src/core/Util/
test/core/ test/core/Document/ test/core/Index/ test/core/Search/
test/core/Search...
Author: ccurrens
Date: Sat Nov 12 23:51:42 2011
New Revision: 1201357
URL: http://svn.apache.org/viewvc?rev=1201357&view=rev
Log:
ported more tests in Index, Document, and Search
Added:
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestIndexCommit.cs
Modified:
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/AssemblyInfo.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Search/HitQueue.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Search/PhraseQuery.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Search/Similarity.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Search/Spans/SpanWeight.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Support/FileSupport.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Util/PriorityQueue.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Document/TestBinaryDocument.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Document/TestDocument.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/DocHelper.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestAddIndexesNoOptimize.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestAtomicUpdate.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestByteSlices.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestCheckIndex.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestCompoundFile.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestConcurrentMergeScheduler.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestCrash.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestDeletionPolicy.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestDirectoryReader.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestDoc.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestDocumentWriter.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestFieldsReader.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestFilterIndexReader.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestIndexFileDeleter.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestIndexReader.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestIndexReaderClone.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestIndexReaderCloneNorms.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestIndexReaderReopen.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestIndexWriter.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Lucene.Net.Test.csproj
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/CheckHits.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/Function/FunctionTestSetup.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/Function/TestCustomScoreQuery.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/Function/TestFieldScoreQuery.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/Function/TestOrdValues.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/JustCompileSearch.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/Payloads/PayloadHelper.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/Payloads/TestPayloadNearQuery.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/Payloads/TestPayloadTermQuery.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/QueryUtils.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/Spans/JustCompileSearchSpans.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/Spans/TestBasics.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/Spans/TestFieldMaskingSpanQuery.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/Spans/TestNearSpansOrdered.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/Spans/TestPayloadSpans.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/Spans/TestSpans.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/Spans/TestSpansAdvanced.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/Spans/TestSpansAdvanced2.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestSimilarity.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestTopDocsCollector.cs
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/UpdatedTests.txt
incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Util/_TestUtil.cs
Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/AssemblyInfo.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/AssemblyInfo.cs?rev=1201357&r1=1201356&r2=1201357&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/AssemblyInfo.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/AssemblyInfo.cs Sat Nov 12 23:51:42 2011
@@ -38,6 +38,12 @@ using System.Runtime.CompilerServices;
[assembly: AssemblyInformationalVersionAttribute("2.9.4")]
+// for testing
+[assembly: InternalsVisibleTo("Lucene.Net.Test, PublicKey=002400000480000094000000060200000024000052534131000400000100010075a07ce602f88e" +
+ "f263c7db8cb342c58ebd49ecdcc210fac874260b0213fb929ac3dcaf4f5b39744b800f99073eca" +
+ "72aebfac5f7284e1d5f2c82012a804a140f06d7d043d83e830cdb606a04da2ad5374cc92c0a495" +
+ "08437802fb4f8fb80a05e59f80afb99f4ccd0dfe44065743543c4b053b669509d29d332cd32a0c" +
+ "b1e97e84")]
//
// Version information for an assembly consists of the following four values:
Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Search/HitQueue.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Search/HitQueue.cs?rev=1201357&r1=1201356&r2=1201357&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Search/HitQueue.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Search/HitQueue.cs Sat Nov 12 23:51:42 2011
@@ -21,7 +21,7 @@ using Lucene.Net.Util;
namespace Lucene.Net.Search
{
- public sealed class HitQueue :PriorityQueue<ScoreDoc>
+ public sealed class HitQueue : PriorityQueue<ScoreDoc>
{
private bool prePopulate;
Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Search/PhraseQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Search/PhraseQuery.cs?rev=1201357&r1=1201356&r2=1201357&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Search/PhraseQuery.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Search/PhraseQuery.cs Sat Nov 12 23:51:42 2011
@@ -147,7 +147,7 @@ namespace Lucene.Net.Search
InitBlock(enclosingInstance);
this.similarity = Enclosing_Instance.GetSimilarity(searcher);
- idfExp = similarity.idfExplain(Enclosing_Instance.terms, searcher);
+ idfExp = similarity.IdfExplain(Enclosing_Instance.terms, searcher);
idf = idfExp.GetIdf();
}
Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Search/Similarity.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Search/Similarity.cs?rev=1201357&r1=1201356&r2=1201357&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Search/Similarity.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Search/Similarity.cs Sat Nov 12 23:51:42 2011
@@ -619,7 +619,7 @@ namespace Lucene.Net.Search
/// for each term.
/// </returns>
/// <throws> IOException </throws>
- public virtual IDFExplanation idfExplain(ICollection<Term> terms, Searcher searcher)
+ public virtual IDFExplanation IdfExplain(ICollection<Term> terms, Searcher searcher)
{
int max = searcher.MaxDoc();
float idf2 = 0.0f;
Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Search/Spans/SpanWeight.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Search/Spans/SpanWeight.cs?rev=1201357&r1=1201356&r2=1201357&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Search/Spans/SpanWeight.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Search/Spans/SpanWeight.cs Sat Nov 12 23:51:42 2011
@@ -47,7 +47,7 @@ namespace Lucene.Net.Search.Spans
terms = new HashSet<Term>();
query.ExtractTerms(terms);
- idfExp = similarity.idfExplain(terms, searcher);
+ idfExp = similarity.IdfExplain(terms, searcher);
idf = idfExp.GetIdf();
}
Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Support/FileSupport.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Support/FileSupport.cs?rev=1201357&r1=1201356&r2=1201357&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Support/FileSupport.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Support/FileSupport.cs Sat Nov 12 23:51:42 2011
@@ -27,6 +27,11 @@ namespace Lucene.Net.Support
return null;
}
+ public static System.IO.FileInfo[] GetFiles(System.IO.DirectoryInfo path)
+ {
+ return path.GetFiles();
+ }
+
/// <summary>
/// Returns a list of files in a give directory.
/// </summary>
Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Util/PriorityQueue.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Util/PriorityQueue.cs?rev=1201357&r1=1201356&r2=1201357&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Util/PriorityQueue.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Util/PriorityQueue.cs Sat Nov 12 23:51:42 2011
@@ -29,7 +29,10 @@ namespace Lucene.Net.Util
/// length <c>maxSize+1</c>, in <see cref="Initialize" />.
///
/// </summary>
- public abstract class PriorityQueue<T> where T : class
+ // TODO: T needs to be able to return null. Behavior might be unexpected otherwise, since it returns default(T)
+ // I only see a non-nullable type used in PriorityQueue in the tests. may be possible to re-write tests to
+ // use an IComparable class, and this can be changed back to constraining on class, to return null
+ public abstract class PriorityQueue<T> //where T : class
{
private int size;
private int maxSize;
@@ -81,7 +84,7 @@ namespace Lucene.Net.Util
/// </returns>
protected internal virtual T GetSentinelObject()
{
- return null;
+ return default(T);
}
/// <summary>Subclass constructors must call this. </summary>
@@ -158,7 +161,7 @@ namespace Lucene.Net.Util
if (size < maxSize)
{
Add(element);
- return null; // TODO: java returns null, this shouldn't be an issue? - cc
+ return default(T); // TODO: java returns null, C# can't, TestPriorityQueue relies on a nullable int - cc
}
else if (size > 0 && !LessThan(element, heap[1]))
{
@@ -192,13 +195,13 @@ namespace Lucene.Net.Util
{
T result = heap[1]; // save first value
heap[1] = heap[size]; // move last to first
- heap[size] = null; // permit GC of objects
+ heap[size] = default(T); // permit GC of objects
size--;
DownHeap(); // adjust heap
return result;
}
else
- return null;
+ return default(T);
}
/// <summary> Should be called when the Object at top changes values.
@@ -232,7 +235,7 @@ namespace Lucene.Net.Util
{
for (int i = 0; i <= size; i++)
{
- heap[i] = null;
+ heap[i] = default(T);
}
size = 0;
}
Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Document/TestBinaryDocument.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Document/TestBinaryDocument.cs?rev=1201357&r1=1201356&r2=1201357&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Document/TestBinaryDocument.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Document/TestBinaryDocument.cs Sat Nov 12 23:51:42 2011
@@ -28,12 +28,7 @@ using LuceneTestCase = Lucene.Net.Util.L
namespace Lucene.Net.Documents
{
- /// <summary> Tests {@link Document} class.
- ///
- ///
- /// </summary>
- /// <version> $Id: TestBinaryDocument.java 756760 2009-03-20 21:10:12Z mikemccand $
- /// </version>
+ /// <summary>Tests {@link Document} class.</summary>
[TestFixture]
public class TestBinaryDocument:LuceneTestCase
{
@@ -45,14 +40,12 @@ namespace Lucene.Net.Documents
public virtual void TestBinaryFieldInIndex()
{
Fieldable binaryFldStored = new Field("binaryStored", System.Text.UTF8Encoding.UTF8.GetBytes(binaryValStored), Field.Store.YES);
- Fieldable binaryFldCompressed = new Field("binaryCompressed", System.Text.UTF8Encoding.UTF8.GetBytes(binaryValCompressed), Field.Store.COMPRESS);
Fieldable stringFldStored = new Field("stringStored", binaryValStored, Field.Store.YES, Field.Index.NO, Field.TermVector.NO);
- Fieldable stringFldCompressed = new Field("stringCompressed", binaryValCompressed, Field.Store.COMPRESS, Field.Index.NO, Field.TermVector.NO);
try
{
// binary fields with store off are not allowed
- new Field("fail", System.Text.UTF8Encoding.UTF8.GetBytes(binaryValCompressed), Field.Store.NO);
+ new Field("fail", System.Text.UTF8Encoding.UTF8.GetBytes(binaryValStored), Field.Store.NO);
Assert.Fail();
}
catch (System.ArgumentException iae)
@@ -63,22 +56,20 @@ namespace Lucene.Net.Documents
Document doc = new Document();
doc.Add(binaryFldStored);
- doc.Add(binaryFldCompressed);
doc.Add(stringFldStored);
- doc.Add(stringFldCompressed);
/** test for field count */
- Assert.AreEqual(4, doc.fields_ForNUnit.Count);
+ Assert.AreEqual(2, doc.fields_ForNUnit.Count);
/** add the doc to a ram index */
MockRAMDirectory dir = new MockRAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer.AddDocument(doc);
writer.Close();
/** open a reader and fetch the document */
- IndexReader reader = IndexReader.Open(dir);
+ IndexReader reader = IndexReader.Open(dir, false);
Document docFromReader = reader.Document(0);
Assert.IsTrue(docFromReader != null);
@@ -86,18 +77,10 @@ namespace Lucene.Net.Documents
System.String binaryFldStoredTest = new System.String(System.Text.UTF8Encoding.UTF8.GetChars(docFromReader.GetBinaryValue("binaryStored")));
Assert.IsTrue(binaryFldStoredTest.Equals(binaryValStored));
- /** fetch the binary compressed field and compare it's content with the original one */
- System.String binaryFldCompressedTest = new System.String(System.Text.UTF8Encoding.UTF8.GetChars(docFromReader.GetBinaryValue("binaryCompressed")));
- Assert.IsTrue(binaryFldCompressedTest.Equals(binaryValCompressed));
-
/** fetch the string field and compare it's content with the original one */
System.String stringFldStoredTest = docFromReader.Get("stringStored");
Assert.IsTrue(stringFldStoredTest.Equals(binaryValStored));
- /** fetch the compressed string field and compare it's content with the original one */
- System.String stringFldCompressedTest = docFromReader.Get("stringCompressed");
- Assert.IsTrue(stringFldCompressedTest.Equals(binaryValCompressed));
-
/** delete the document from index */
reader.DeleteDocument(0);
Assert.AreEqual(0, reader.NumDocs());
@@ -119,12 +102,12 @@ namespace Lucene.Net.Documents
/** add the doc to a ram index */
MockRAMDirectory dir = new MockRAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer.AddDocument(doc);
writer.Close();
/** open a reader and fetch the document */
- IndexReader reader = IndexReader.Open(dir);
+ IndexReader reader = IndexReader.Open(dir, false);
Document docFromReader = reader.Document(0);
Assert.IsTrue(docFromReader != null);
Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Document/TestDocument.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Document/TestDocument.cs?rev=1201357&r1=1201356&r2=1201357&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Document/TestDocument.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Document/TestDocument.cs Sat Nov 12 23:51:42 2011
@@ -33,12 +33,7 @@ using LuceneTestCase = Lucene.Net.Util.L
namespace Lucene.Net.Documents
{
- /// <summary> Tests {@link Document} class.
- ///
- ///
- /// </summary>
- /// <version> $Id: TestDocument.java 754789 2009-03-15 23:24:39Z mikemccand $
- /// </version>
+ /// <summary>Tests {@link Document} class.</summary>
[TestFixture]
public class TestDocument:LuceneTestCase
{
@@ -170,11 +165,11 @@ namespace Lucene.Net.Documents
public virtual void TestGetValuesForIndexedDocument()
{
RAMDirectory dir = new RAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer.AddDocument(MakeDocumentWithFields());
writer.Close();
- Searcher searcher = new IndexSearcher(dir);
+ Searcher searcher = new IndexSearcher(dir, true);
// search for something that does exists
Query query = new TermQuery(new Term("keyword", "test1"));
@@ -243,7 +238,7 @@ namespace Lucene.Net.Documents
doc.Add(new Field("keyword", "test", Field.Store.YES, Field.Index.NOT_ANALYZED));
RAMDirectory dir = new RAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer.AddDocument(doc);
field.SetValue("id2");
writer.AddDocument(doc);
@@ -251,7 +246,7 @@ namespace Lucene.Net.Documents
writer.AddDocument(doc);
writer.Close();
- Searcher searcher = new IndexSearcher(dir);
+ Searcher searcher = new IndexSearcher(dir, true);
Query query = new TermQuery(new Term("keyword", "test"));
Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/DocHelper.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/DocHelper.cs?rev=1201357&r1=1201356&r2=1201357&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/DocHelper.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/DocHelper.cs Sat Nov 12 23:51:42 2011
@@ -40,12 +40,6 @@ namespace Lucene.Net.Index
public const System.String TEXT_FIELD_2_KEY = "textField2";
public static Field textField2;
- public const System.String FIELD_2_COMPRESSED_TEXT = "field field field two text";
- //Fields will be lexicographically sorted. So, the order is: field, text, two
- public static readonly int[] COMPRESSED_FIELD_2_FREQS = new int[]{3, 1, 1};
- public const System.String COMPRESSED_TEXT_FIELD_2_KEY = "compressedTextField2";
- public static Field compressedTextField2;
-
public const System.String FIELD_3_TEXT = "aaaNoNorms aaaNoNorms bbbNoNorms";
public const System.String TEXT_FIELD_3_KEY = "textField3";
@@ -170,7 +164,7 @@ namespace Lucene.Net.Index
writer.SetSimilarity(similarity);
//writer.setUseCompoundFile(false);
writer.AddDocument(doc);
- writer.Flush();
+ writer.Commit();
SegmentInfo info = writer.NewestSegment();
writer.Close();
return info;
@@ -184,7 +178,6 @@ namespace Lucene.Net.Index
{
textField1 = new Field(TEXT_FIELD_1_KEY, FIELD_1_TEXT, Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.NO);
textField2 = new Field(TEXT_FIELD_2_KEY, FIELD_2_TEXT, Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS);
- compressedTextField2 = new Field(COMPRESSED_TEXT_FIELD_2_KEY, FIELD_2_COMPRESSED_TEXT, Field.Store.COMPRESS, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS);
textField3 = new Field(TEXT_FIELD_3_KEY, FIELD_3_TEXT, Field.Store.YES, Field.Index.ANALYZED);
{
textField3.SetOmitNorms(true);
@@ -201,7 +194,7 @@ namespace Lucene.Net.Index
lazyField = new Field(LAZY_FIELD_KEY, LAZY_FIELD_TEXT, Field.Store.YES, Field.Index.ANALYZED);
textUtfField1 = new Field(TEXT_FIELD_UTF1_KEY, FIELD_UTF1_TEXT, Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.NO);
textUtfField2 = new Field(TEXT_FIELD_UTF2_KEY, FIELD_UTF2_TEXT, Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS);
- fields = new Field[] { textField1, textField2, textField3, compressedTextField2, keyField, noNormsField, noTFField, unIndField, unStoredField1, unStoredField2, textUtfField1, textUtfField2, lazyField, lazyFieldBinary, largeLazyField };
+ fields = new Field[] { textField1, textField2, textField3, keyField, noNormsField, noTFField, unIndField, unStoredField1, unStoredField2, textUtfField1, textUtfField2, lazyField, lazyFieldBinary, largeLazyField };
{
//Initialize the large Lazy Field
System.Text.StringBuilder buffer = new System.Text.StringBuilder();
@@ -241,7 +234,7 @@ namespace Lucene.Net.Index
Add(unstored, f);
if (f.GetOmitNorms())
Add(noNorms, f);
- if (f.GetOmitTf())
+ if (f.GetOmitTermFreqAndPositions())
Add(noTf, f);
if (f.IsLazy())
Add(lazy, f);
@@ -251,7 +244,6 @@ namespace Lucene.Net.Index
nameValues = new System.Collections.Hashtable();
nameValues[TEXT_FIELD_1_KEY] = FIELD_1_TEXT;
nameValues[TEXT_FIELD_2_KEY] = FIELD_2_TEXT;
- nameValues[COMPRESSED_TEXT_FIELD_2_KEY] = FIELD_2_COMPRESSED_TEXT;
nameValues[TEXT_FIELD_3_KEY] = FIELD_3_TEXT;
nameValues[KEYWORD_FIELD_KEY] = KEYWORD_TEXT;
nameValues[NO_NORMS_KEY] = NO_NORMS_TEXT;
Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestAddIndexesNoOptimize.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestAddIndexesNoOptimize.cs?rev=1201357&r1=1201356&r2=1201357&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestAddIndexesNoOptimize.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestAddIndexesNoOptimize.cs Sat Nov 12 23:51:42 2011
@@ -48,27 +48,27 @@ namespace Lucene.Net.Index
writer = NewWriter(dir, true);
// add 100 documents
AddDocs(writer, 100);
- Assert.AreEqual(100, writer.DocCount());
+ Assert.AreEqual(100, writer.MaxDoc());
writer.Close();
writer = NewWriter(aux, true);
writer.SetUseCompoundFile(false); // use one without a compound file
// add 40 documents in separate files
AddDocs(writer, 40);
- Assert.AreEqual(40, writer.DocCount());
+ Assert.AreEqual(40, writer.MaxDoc());
writer.Close();
writer = NewWriter(aux2, true);
// add 40 documents in compound files
AddDocs2(writer, 50);
- Assert.AreEqual(50, writer.DocCount());
+ Assert.AreEqual(50, writer.MaxDoc());
writer.Close();
// test doc count before segments are merged
writer = NewWriter(dir, false);
- Assert.AreEqual(100, writer.DocCount());
+ Assert.AreEqual(100, writer.MaxDoc());
writer.AddIndexesNoOptimize(new Directory[]{aux, aux2});
- Assert.AreEqual(190, writer.DocCount());
+ Assert.AreEqual(190, writer.MaxDoc());
writer.Close();
// make sure the old index is correct
@@ -82,14 +82,14 @@ namespace Lucene.Net.Index
writer = NewWriter(aux3, true);
// add 40 documents
AddDocs(writer, 40);
- Assert.AreEqual(40, writer.DocCount());
+ Assert.AreEqual(40, writer.MaxDoc());
writer.Close();
// test doc count before segments are merged/index is optimized
writer = NewWriter(dir, false);
- Assert.AreEqual(190, writer.DocCount());
+ Assert.AreEqual(190, writer.MaxDoc());
writer.AddIndexesNoOptimize(new Directory[]{aux3});
- Assert.AreEqual(230, writer.DocCount());
+ Assert.AreEqual(230, writer.MaxDoc());
writer.Close();
// make sure the new index is correct
@@ -118,9 +118,9 @@ namespace Lucene.Net.Index
writer.Close();
writer = NewWriter(dir, false);
- Assert.AreEqual(230, writer.DocCount());
+ Assert.AreEqual(230, writer.MaxDoc());
writer.AddIndexesNoOptimize(new Directory[]{aux4});
- Assert.AreEqual(231, writer.DocCount());
+ Assert.AreEqual(231, writer.MaxDoc());
writer.Close();
VerifyNumDocs(dir, 231);
@@ -156,6 +156,7 @@ namespace Lucene.Net.Index
writer.DeleteDocuments(q);
writer.Optimize();
+ writer.Commit();
VerifyNumDocs(dir, 1039);
VerifyTermDocs(dir, new Term("content", "aaa"), 1030);
@@ -196,6 +197,7 @@ namespace Lucene.Net.Index
writer.DeleteDocuments(q);
writer.Optimize();
+ writer.Commit();
VerifyNumDocs(dir, 1039);
VerifyTermDocs(dir, new Term("content", "aaa"), 1030);
@@ -236,6 +238,7 @@ namespace Lucene.Net.Index
writer.AddIndexesNoOptimize(new Directory[]{aux});
writer.Optimize();
+ writer.Commit();
VerifyNumDocs(dir, 1039);
VerifyTermDocs(dir, new Term("content", "aaa"), 1030);
@@ -260,7 +263,7 @@ namespace Lucene.Net.Index
writer = NewWriter(dir, true);
// add 100 documents
AddDocs(writer, 100);
- Assert.AreEqual(100, writer.DocCount());
+ Assert.AreEqual(100, writer.MaxDoc());
writer.Close();
writer = NewWriter(aux, true);
@@ -284,7 +287,7 @@ namespace Lucene.Net.Index
}
catch (System.ArgumentException e)
{
- Assert.AreEqual(100, writer.DocCount());
+ Assert.AreEqual(100, writer.MaxDoc());
}
writer.Close();
@@ -311,7 +314,7 @@ namespace Lucene.Net.Index
AddDocs(writer, 10);
writer.AddIndexesNoOptimize(new Directory[]{aux});
- Assert.AreEqual(1040, writer.DocCount());
+ Assert.AreEqual(1040, writer.MaxDoc());
Assert.AreEqual(2, writer.GetSegmentCount());
Assert.AreEqual(1000, writer.GetDocCount(0));
writer.Close();
@@ -337,7 +340,7 @@ namespace Lucene.Net.Index
AddDocs(writer, 2);
writer.AddIndexesNoOptimize(new Directory[]{aux});
- Assert.AreEqual(1032, writer.DocCount());
+ Assert.AreEqual(1032, writer.MaxDoc());
Assert.AreEqual(2, writer.GetSegmentCount());
Assert.AreEqual(1000, writer.GetDocCount(0));
writer.Close();
@@ -362,7 +365,7 @@ namespace Lucene.Net.Index
writer.SetMergeFactor(4);
writer.AddIndexesNoOptimize(new Directory[]{aux, new RAMDirectory(aux)});
- Assert.AreEqual(1060, writer.DocCount());
+ Assert.AreEqual(1060, writer.MaxDoc());
Assert.AreEqual(1000, writer.GetDocCount(0));
writer.Close();
@@ -381,7 +384,7 @@ namespace Lucene.Net.Index
SetUpDirs(dir, aux);
- IndexReader reader = IndexReader.Open(aux);
+ IndexReader reader = IndexReader.Open(aux, false);
for (int i = 0; i < 20; i++)
{
reader.DeleteDocument(i);
@@ -394,7 +397,7 @@ namespace Lucene.Net.Index
writer.SetMergeFactor(4);
writer.AddIndexesNoOptimize(new Directory[]{aux, new RAMDirectory(aux)});
- Assert.AreEqual(1020, writer.DocCount());
+ Assert.AreEqual(1020, writer.MaxDoc());
Assert.AreEqual(1000, writer.GetDocCount(0));
writer.Close();
@@ -418,11 +421,11 @@ namespace Lucene.Net.Index
writer.SetMaxBufferedDocs(100);
writer.SetMergeFactor(10);
writer.AddIndexesNoOptimize(new Directory[]{aux});
- Assert.AreEqual(30, writer.DocCount());
+ Assert.AreEqual(30, writer.MaxDoc());
Assert.AreEqual(3, writer.GetSegmentCount());
writer.Close();
- IndexReader reader = IndexReader.Open(aux);
+ IndexReader reader = IndexReader.Open(aux, false);
for (int i = 0; i < 27; i++)
{
reader.DeleteDocument(i);
@@ -430,7 +433,7 @@ namespace Lucene.Net.Index
Assert.AreEqual(3, reader.NumDocs());
reader.Close();
- reader = IndexReader.Open(aux2);
+ reader = IndexReader.Open(aux2, false);
for (int i = 0; i < 8; i++)
{
reader.DeleteDocument(i);
@@ -443,7 +446,7 @@ namespace Lucene.Net.Index
writer.SetMergeFactor(4);
writer.AddIndexesNoOptimize(new Directory[]{aux, aux2});
- Assert.AreEqual(1025, writer.DocCount());
+ Assert.AreEqual(1025, writer.MaxDoc());
Assert.AreEqual(1000, writer.GetDocCount(0));
writer.Close();
@@ -453,7 +456,7 @@ namespace Lucene.Net.Index
private IndexWriter NewWriter(Directory dir, bool create)
{
- IndexWriter writer = new IndexWriter(dir, true, new WhitespaceAnalyzer(), create);
+ IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), create, IndexWriter.MaxFieldLength.UNLIMITED);
writer.SetMergePolicy(new LogDocMergePolicy(writer));
return writer;
}
@@ -480,7 +483,7 @@ namespace Lucene.Net.Index
private void VerifyNumDocs(Directory dir, int numDocs)
{
- IndexReader reader = IndexReader.Open(dir);
+ IndexReader reader = IndexReader.Open(dir, true);
Assert.AreEqual(numDocs, reader.MaxDoc());
Assert.AreEqual(numDocs, reader.NumDocs());
reader.Close();
@@ -488,7 +491,7 @@ namespace Lucene.Net.Index
private void VerifyTermDocs(Directory dir, Term term, int numDocs)
{
- IndexReader reader = IndexReader.Open(dir);
+ IndexReader reader = IndexReader.Open(dir, true);
TermDocs termDocs = reader.TermDocs(term);
int count = 0;
while (termDocs.Next())
@@ -505,7 +508,7 @@ namespace Lucene.Net.Index
writer.SetMaxBufferedDocs(1000);
// add 1000 documents in 1 segment
AddDocs(writer, 1000);
- Assert.AreEqual(1000, writer.DocCount());
+ Assert.AreEqual(1000, writer.MaxDoc());
Assert.AreEqual(1, writer.GetSegmentCount());
writer.Close();
@@ -523,7 +526,7 @@ namespace Lucene.Net.Index
writer.SetMaxBufferedDocs(100);
writer.SetMergeFactor(10);
}
- Assert.AreEqual(30, writer.DocCount());
+ Assert.AreEqual(30, writer.MaxDoc());
Assert.AreEqual(3, writer.GetSegmentCount());
writer.Close();
}
Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestAtomicUpdate.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestAtomicUpdate.cs?rev=1201357&r1=1201356&r2=1201357&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestAtomicUpdate.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestAtomicUpdate.cs Sat Nov 12 23:51:42 2011
@@ -53,8 +53,9 @@ namespace Lucene.Net.Index
}
}
-
- public MockIndexWriter(TestAtomicUpdate enclosingInstance, Directory dir, bool autoCommit, Analyzer a, bool create):base(dir, autoCommit, a, create)
+
+ public MockIndexWriter(TestAtomicUpdate enclosingInstance, Directory dir, Analyzer a, bool create, IndexWriter.MaxFieldLength mfl)
+ : base(dir, a, create, mfl)
{
InitBlock(enclosingInstance);
}
@@ -147,7 +148,7 @@ namespace Lucene.Net.Index
public override void DoWork()
{
- IndexReader r = IndexReader.Open(directory);
+ IndexReader r = IndexReader.Open(directory, true);
Assert.AreEqual(100, r.NumDocs());
r.Close();
}
@@ -161,8 +162,8 @@ namespace Lucene.Net.Index
{
TimedThread[] threads = new TimedThread[4];
-
- IndexWriter writer = new MockIndexWriter(this, directory, true, ANALYZER, true);
+
+ IndexWriter writer = new MockIndexWriter(this, directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
writer.SetMaxBufferedDocs(7);
writer.SetMergeFactor(3);
@@ -172,11 +173,15 @@ namespace Lucene.Net.Index
Document d = new Document();
d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.NOT_ANALYZED));
d.Add(new Field("contents", English.IntToEnglish(i), Field.Store.NO, Field.Index.ANALYZED));
+ if ((i - 1) % 7 == 0)
+ {
+ writer.Commit();
+ }
writer.AddDocument(d);
}
writer.Commit();
- IndexReader r = IndexReader.Open(directory);
+ IndexReader r = IndexReader.Open(directory, true);
Assert.AreEqual(100, r.NumDocs());
r.Close();
@@ -228,8 +233,7 @@ namespace Lucene.Net.Index
directory.Close();
// Second in an FSDirectory:
- System.String tempDir = System.IO.Path.GetTempPath();
- System.IO.FileInfo dirPath = new System.IO.FileInfo(System.IO.Path.Combine(tempDir, "lucene.test.atomic"));
+ System.IO.DirectoryInfo dirPath = _TestUtil.GetTempDir("lucene.test.atomic");
directory = FSDirectory.Open(dirPath);
RunTest(directory);
directory.Close();
Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestByteSlices.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestByteSlices.cs?rev=1201357&r1=1201356&r2=1201357&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestByteSlices.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestByteSlices.cs Sat Nov 12 23:51:42 2011
@@ -64,13 +64,13 @@ namespace Lucene.Net.Index
}
}
- public override void RecycleByteBlocks(System.Collections.ArrayList blocks)
+ public override void RecycleByteBlocks(System.Collections.Generic.IList<byte[]> blocks)
{
lock (this)
{
int size = blocks.Count;
for (int i = 0; i < size; i++)
- freeByteBlocks.Add((byte[])blocks[i]);
+ freeByteBlocks.Add(blocks[i]);
}
}
}
Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestCheckIndex.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestCheckIndex.cs?rev=1201357&r1=1201356&r2=1201357&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestCheckIndex.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestCheckIndex.cs Sat Nov 12 23:51:42 2011
@@ -46,7 +46,7 @@ namespace Lucene.Net.Index
writer.AddDocument(doc);
}
writer.Close();
- IndexReader reader = IndexReader.Open(dir);
+ IndexReader reader = IndexReader.Open(dir, false);
reader.DeleteDocument(5);
reader.Close();
Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestCompoundFile.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestCompoundFile.cs?rev=1201357&r1=1201356&r2=1201357&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestCompoundFile.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestCompoundFile.cs Sat Nov 12 23:51:42 2011
@@ -29,11 +29,6 @@ using _TestUtil = Lucene.Net.Util._TestU
namespace Lucene.Net.Index
{
-
-
- /// <summary> </summary>
- /// <version> $Id: TestCompoundFile.java 780770 2009-06-01 18:34:10Z uschindler $
- /// </version>
[TestFixture]
public class TestCompoundFile:LuceneTestCase
{
Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestConcurrentMergeScheduler.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestConcurrentMergeScheduler.cs?rev=1201357&r1=1201356&r2=1201357&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestConcurrentMergeScheduler.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestConcurrentMergeScheduler.cs Sat Nov 12 23:51:42 2011
@@ -16,7 +16,7 @@
*/
using System;
-
+using System.Threading;
using NUnit.Framework;
using Analyzer = Lucene.Net.Analysis.Analyzer;
@@ -38,11 +38,13 @@ namespace Lucene.Net.Index
private class FailOnlyOnFlush:MockRAMDirectory.Failure
{
- internal bool doFail = false;
+ internal bool doFail;
+ internal bool hitExc;
public virtual void SetDoFail()
{
- this.doFail = true;
+ this.doFail = true;
+ hitExc = false;
}
public virtual void ClearDoFail()
{
@@ -51,7 +53,7 @@ namespace Lucene.Net.Index
public override void Eval(MockRAMDirectory dir)
{
- if (doFail)
+ if (doFail && Thread.CurrentThread.Name.Equals("main")) // TODO: This may not work -cc
{
System.Diagnostics.StackTrace trace = new System.Diagnostics.StackTrace();
for (int i = 0; i < trace.FrameCount; i++)
@@ -59,6 +61,7 @@ namespace Lucene.Net.Index
System.Diagnostics.StackFrame sf = trace.GetFrame(i);
if ("DoFlush".Equals(sf.GetMethod().Name))
{
+ hitExc = true;
//new RuntimeException().printStackTrace(System.out);
throw new System.IO.IOException("now failing during flush");
}
@@ -76,14 +79,16 @@ namespace Lucene.Net.Index
MockRAMDirectory directory = new MockRAMDirectory();
FailOnlyOnFlush failure = new FailOnlyOnFlush();
directory.FailOn(failure);
-
- IndexWriter writer = new IndexWriter(directory, true, ANALYZER, true);
+
+ IndexWriter writer = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
writer.SetMergeScheduler(cms);
writer.SetMaxBufferedDocs(2);
Document doc = new Document();
Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
doc.Add(idField);
+ int extraCount = 0;
+
for (int i = 0; i < 10; i++)
{
for (int j = 0; j < 20; j++)
@@ -91,24 +96,32 @@ namespace Lucene.Net.Index
idField.SetValue(System.Convert.ToString(i * 20 + j));
writer.AddDocument(doc);
}
-
- writer.AddDocument(doc);
-
- failure.SetDoFail();
- try
- {
- writer.Flush();
- Assert.Fail("failed to hit IOException");
- }
- catch (System.IO.IOException ioe)
- {
- failure.ClearDoFail();
- }
+
+ while (true)
+ {
+ // must cycle here because sometimes the merge flushes
+ // the doc we just added and so there's nothing to
+ // flush, and we don't hit the exception
+ writer.AddDocument(doc);
+ failure.SetDoFail();
+ try
+ {
+ writer.Flush(true, false, true);
+ if(failure.hitExc)
+ Assert.Fail("failed to hit IOException");
+ extraCount++;
+ }
+ catch (System.IO.IOException ioe)
+ {
+ failure.ClearDoFail();
+ break;
+ }
+ }
}
writer.Close();
- IndexReader reader = IndexReader.Open(directory);
- Assert.AreEqual(200, reader.NumDocs());
+ IndexReader reader = IndexReader.Open(directory, true);
+ Assert.AreEqual(200+extraCount, reader.NumDocs());
reader.Close();
directory.Close();
}
@@ -120,8 +133,8 @@ namespace Lucene.Net.Index
{
RAMDirectory directory = new MockRAMDirectory();
-
- IndexWriter writer = new IndexWriter(directory, true, ANALYZER, true);
+
+ IndexWriter writer = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
writer.SetMergeScheduler(cms);
@@ -151,108 +164,94 @@ namespace Lucene.Net.Index
delID += 10;
}
- writer.Flush();
+ writer.Commit();
}
writer.Close();
- IndexReader reader = IndexReader.Open(directory);
+ IndexReader reader = IndexReader.Open(directory, true);
// Verify that we did not lose any deletes...
Assert.AreEqual(450, reader.NumDocs());
reader.Close();
directory.Close();
}
-
- [Test]
- public virtual void TestNoExtraFiles()
- {
-
- RAMDirectory directory = new MockRAMDirectory();
-
- for (int pass = 0; pass < 2; pass++)
- {
-
- bool autoCommit = pass == 0;
- IndexWriter writer = new IndexWriter(directory, autoCommit, ANALYZER, true);
-
- for (int iter = 0; iter < 7; iter++)
- {
- ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
- writer.SetMergeScheduler(cms);
- writer.SetMaxBufferedDocs(2);
-
- for (int j = 0; j < 21; j++)
- {
- Document doc = new Document();
- doc.Add(new Field("content", "a b c", Field.Store.NO, Field.Index.ANALYZED));
- writer.AddDocument(doc);
- }
-
- writer.Close();
- TestIndexWriter.AssertNoUnreferencedFiles(directory, "testNoExtraFiles autoCommit=" + autoCommit);
-
- // Reopen
- writer = new IndexWriter(directory, autoCommit, ANALYZER, false);
- }
-
- writer.Close();
- }
-
- directory.Close();
- }
-
- [Test]
- public virtual void TestNoWaitClose()
- {
- RAMDirectory directory = new MockRAMDirectory();
-
- Document doc = new Document();
- Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
- doc.Add(idField);
-
- for (int pass = 0; pass < 2; pass++)
- {
- bool autoCommit = pass == 0;
- IndexWriter writer = new IndexWriter(directory, autoCommit, ANALYZER, true);
-
- for (int iter = 0; iter < 10; iter++)
- {
- ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
- writer.SetMergeScheduler(cms);
- writer.SetMaxBufferedDocs(2);
- writer.SetMergeFactor(100);
-
- for (int j = 0; j < 201; j++)
- {
- idField.SetValue(System.Convert.ToString(iter * 201 + j));
- writer.AddDocument(doc);
- }
-
- int delID = iter * 201;
- for (int j = 0; j < 20; j++)
- {
- writer.DeleteDocuments(new Term("id", System.Convert.ToString(delID)));
- delID += 5;
- }
-
- // Force a bunch of merge threads to kick off so we
- // stress out aborting them on close:
- writer.SetMergeFactor(3);
- writer.AddDocument(doc);
- writer.Flush();
-
- writer.Close(false);
-
- IndexReader reader = IndexReader.Open(directory);
- Assert.AreEqual((1 + iter) * 182, reader.NumDocs());
- reader.Close();
-
- // Reopen
- writer = new IndexWriter(directory, autoCommit, ANALYZER, false);
- }
- writer.Close();
- }
-
- directory.Close();
- }
+
+ [Test]
+ public virtual void TestNoExtraFiles()
+ {
+ RAMDirectory directory = new MockRAMDirectory();
+ IndexWriter writer = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
+
+ for (int iter = 0; iter < 7; iter++)
+ {
+ ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
+ writer.SetMergeScheduler(cms);
+ writer.SetMaxBufferedDocs(2);
+
+ for (int j = 0; j < 21; j++)
+ {
+ Document doc = new Document();
+ doc.Add(new Field("content", "a b c", Field.Store.NO, Field.Index.ANALYZED));
+ writer.AddDocument(doc);
+ }
+
+ writer.Close();
+ TestIndexWriter.AssertNoUnreferencedFiles(directory, "testNoExtraFiles");
+ // Reopen
+ writer = new IndexWriter(directory, ANALYZER, false, IndexWriter.MaxFieldLength.UNLIMITED);
+ }
+ writer.Close();
+ directory.Close();
+ }
+
+ [Test]
+ public virtual void TestNoWaitClose()
+ {
+ RAMDirectory directory = new MockRAMDirectory();
+
+ Document doc = new Document();
+ Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
+ doc.Add(idField);
+
+ IndexWriter writer = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
+
+ for (int iter = 0; iter < 10; iter++)
+ {
+ ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
+ writer.SetMergeScheduler(cms);
+ writer.SetMaxBufferedDocs(2);
+ writer.SetMergeFactor(100);
+
+ for (int j = 0; j < 201; j++)
+ {
+ idField.SetValue(System.Convert.ToString(iter*201 + j));
+ writer.AddDocument(doc);
+ }
+
+ int delID = iter*201;
+ for (int j = 0; j < 20; j++)
+ {
+ writer.DeleteDocuments(new Term("id", delID.ToString()));
+ delID += 5;
+ }
+
+ // Force a bunch of merge threads to kick off so we
+ // stress out aborting them on close:
+ writer.SetMergeFactor(3);
+ writer.AddDocument(doc);
+ writer.Commit();
+
+ writer.Close(false);
+
+ IndexReader reader = IndexReader.Open(directory, true);
+ Assert.AreEqual((1 + iter)*182, reader.NumDocs());
+ reader.Close();
+
+ // Reopen
+ writer = new IndexWriter(directory, ANALYZER, false, IndexWriter.MaxFieldLength.UNLIMITED);
+ }
+ writer.Close();
+
+ directory.Close();
+ }
}
}
\ No newline at end of file
Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestCrash.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestCrash.cs?rev=1201357&r1=1201356&r2=1201357&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestCrash.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestCrash.cs Sat Nov 12 23:51:42 2011
@@ -41,8 +41,8 @@ namespace Lucene.Net.Index
private IndexWriter InitIndex(MockRAMDirectory dir)
{
dir.SetLockFactory(NoLockFactory.GetNoLockFactory());
-
- IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer());
+
+ IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
//writer.setMaxBufferedDocs(2);
writer.SetMaxBufferedDocs(10);
((ConcurrentMergeScheduler) writer.GetMergeScheduler()).SetSuppressExceptions();
@@ -71,7 +71,7 @@ namespace Lucene.Net.Index
IndexWriter writer = InitIndex();
MockRAMDirectory dir = (MockRAMDirectory) writer.GetDirectory();
Crash(writer);
- IndexReader reader = IndexReader.Open(dir);
+ IndexReader reader = IndexReader.Open(dir, true);
Assert.IsTrue(reader.NumDocs() < 157);
}
@@ -85,7 +85,7 @@ namespace Lucene.Net.Index
writer = InitIndex(dir);
writer.Close();
- IndexReader reader = IndexReader.Open(dir);
+ IndexReader reader = IndexReader.Open(dir, false);
Assert.IsTrue(reader.NumDocs() < 314);
}
@@ -96,7 +96,7 @@ namespace Lucene.Net.Index
MockRAMDirectory dir = (MockRAMDirectory) writer.GetDirectory();
writer.Close();
writer = InitIndex(dir);
- Assert.AreEqual(314, writer.DocCount());
+ Assert.AreEqual(314, writer.MaxDoc());
Crash(writer);
/*
@@ -108,7 +108,7 @@ namespace Lucene.Net.Index
dir.fileLength(l[i]) + " bytes");
*/
- IndexReader reader = IndexReader.Open(dir);
+ IndexReader reader = IndexReader.Open(dir, false);
Assert.IsTrue(reader.NumDocs() >= 157);
}
@@ -129,7 +129,7 @@ namespace Lucene.Net.Index
System.out.println("file " + i + " = " + l[i] + " " + dir.fileLength(l[i]) + " bytes");
*/
- IndexReader reader = IndexReader.Open(dir);
+ IndexReader reader = IndexReader.Open(dir, false);
Assert.AreEqual(157, reader.NumDocs());
}
@@ -150,7 +150,7 @@ namespace Lucene.Net.Index
for(int i=0;i<l.length;i++)
System.out.println("file " + i + " = " + l[i] + " " + dir.fileLength(l[i]) + " bytes");
*/
- IndexReader reader = IndexReader.Open(dir);
+ IndexReader reader = IndexReader.Open(dir, false);
Assert.AreEqual(157, reader.NumDocs());
}
@@ -162,7 +162,7 @@ namespace Lucene.Net.Index
MockRAMDirectory dir = (MockRAMDirectory) writer.GetDirectory();
writer.Close(false);
- IndexReader reader = IndexReader.Open(dir);
+ IndexReader reader = IndexReader.Open(dir, false);
reader.DeleteDocument(3);
dir.Crash();
@@ -173,7 +173,7 @@ namespace Lucene.Net.Index
for(int i=0;i<l.length;i++)
System.out.println("file " + i + " = " + l[i] + " " + dir.fileLength(l[i]) + " bytes");
*/
- reader = IndexReader.Open(dir);
+ reader = IndexReader.Open(dir, false);
Assert.AreEqual(157, reader.NumDocs());
}
@@ -185,7 +185,7 @@ namespace Lucene.Net.Index
MockRAMDirectory dir = (MockRAMDirectory) writer.GetDirectory();
writer.Close(false);
- IndexReader reader = IndexReader.Open(dir);
+ IndexReader reader = IndexReader.Open(dir, false);
reader.DeleteDocument(3);
reader.Close();
@@ -197,7 +197,7 @@ namespace Lucene.Net.Index
for(int i=0;i<l.length;i++)
System.out.println("file " + i + " = " + l[i] + " " + dir.fileLength(l[i]) + " bytes");
*/
- reader = IndexReader.Open(dir);
+ reader = IndexReader.Open(dir, false);
Assert.AreEqual(156, reader.NumDocs());
}
}
Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestDeletionPolicy.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestDeletionPolicy.cs?rev=1201357&r1=1201356&r2=1201357&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestDeletionPolicy.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestDeletionPolicy.cs Sat Nov 12 23:51:42 2011
@@ -16,7 +16,7 @@
*/
using System;
-
+using System.Collections.Generic;
using NUnit.Framework;
using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
@@ -94,7 +94,7 @@ namespace Lucene.Net.Index
public virtual void OnCommit(System.Collections.IList commits)
{
IndexCommit lastCommit = (IndexCommit) commits[commits.Count - 1];
- IndexReader r = IndexReader.Open(dir);
+ IndexReader r = IndexReader.Open(dir, true);
Assert.AreEqual(r.IsOptimized(), lastCommit.IsOptimized(), "lastCommit.isOptimized()=" + lastCommit.IsOptimized() + " vs IndexReader.isOptimized=" + r.IsOptimized());
r.Close();
Enclosing_Instance.VerifyCommitOrder(commits);
@@ -102,8 +102,8 @@ namespace Lucene.Net.Index
}
}
- /// <summary> This is useful for adding to a big index w/ autoCommit
- /// false when you know readers are not using it.
+ /// <summary> This is useful for adding to a big index when you know
+ /// readers are not using it.
/// </summary>
internal class KeepNoneOnInitDeletionPolicy : IndexDeletionPolicy
{
@@ -284,15 +284,13 @@ namespace Lucene.Net.Index
[Test]
public virtual void TestExpirationTimeDeletionPolicy()
{
-
double SECONDS = 2.0;
- bool autoCommit = false;
bool useCompoundFile = true;
Directory dir = new RAMDirectory();
ExpirationTimeDeletionPolicy policy = new ExpirationTimeDeletionPolicy(this, dir, SECONDS);
- IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy);
+ IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
writer.SetUseCompoundFile(useCompoundFile);
writer.Close();
@@ -302,7 +300,7 @@ namespace Lucene.Net.Index
// Record last time when writer performed deletes of
// past commits
lastDeleteTime = (DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond);
- writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false, policy);
+ writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
writer.SetUseCompoundFile(useCompoundFile);
for (int j = 0; j < 17; j++)
{
@@ -330,7 +328,7 @@ namespace Lucene.Net.Index
{
try
{
- IndexReader reader = IndexReader.Open(dir);
+ IndexReader reader = IndexReader.Open(dir, true);
reader.Close();
fileName = IndexFileNames.FileNameFromGeneration(IndexFileNames.SEGMENTS, "", gen);
long modTime = dir.FileModified(fileName);
@@ -356,57 +354,48 @@ namespace Lucene.Net.Index
public virtual void TestKeepAllDeletionPolicy()
{
- for (int pass = 0; pass < 4; pass++)
+ for (int pass = 0; pass < 2; pass++)
{
-
- bool autoCommit = pass < 2;
- bool useCompoundFile = (pass % 2) > 0;
+ bool useCompoundFile = (pass % 2) != 0;
// Never deletes a commit
KeepAllDeletionPolicy policy = new KeepAllDeletionPolicy(this);
Directory dir = new RAMDirectory();
policy.dir = dir;
-
- IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy);
+
+ IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
writer.SetMaxBufferedDocs(10);
writer.SetUseCompoundFile(useCompoundFile);
writer.SetMergeScheduler(new SerialMergeScheduler());
for (int i = 0; i < 107; i++)
{
AddDoc(writer);
- if (autoCommit && i % 10 == 0)
- writer.Commit();
}
writer.Close();
-
- writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false, policy);
+
+ writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
writer.SetUseCompoundFile(useCompoundFile);
writer.Optimize();
writer.Close();
Assert.AreEqual(2, policy.numOnInit);
- if (!autoCommit)
+
// If we are not auto committing then there should
// be exactly 2 commits (one per close above):
- Assert.AreEqual(2, policy.numOnCommit);
+ Assert.AreEqual(2, policy.numOnCommit);
// Test listCommits
- System.Collections.ICollection commits = IndexReader.ListCommits(dir);
- if (!autoCommit)
+ ICollection<IndexCommit> commits = IndexReader.ListCommits(dir);
// 1 from opening writer + 2 from closing writer
- Assert.AreEqual(3, commits.Count);
- // 1 from opening writer + 2 from closing writer +
- // 11 from calling writer.commit() explicitly above
- else
- Assert.AreEqual(14, commits.Count);
+ Assert.AreEqual(3, commits.Count);
System.Collections.IEnumerator it = commits.GetEnumerator();
// Make sure we can open a reader on each commit:
while (it.MoveNext())
{
IndexCommit commit = (IndexCommit) it.Current;
- IndexReader r = IndexReader.Open(commit, null);
+ IndexReader r = IndexReader.Open(commit, null, false);
r.Close();
}
@@ -416,7 +405,7 @@ namespace Lucene.Net.Index
long gen = SegmentInfos.GetCurrentSegmentGeneration(dir);
while (gen > 0)
{
- IndexReader reader = IndexReader.Open(dir);
+ IndexReader reader = IndexReader.Open(dir, true);
reader.Close();
dir.DeleteFile(IndexFileNames.FileNameFromGeneration(IndexFileNames.SEGMENTS, "", gen));
gen--;
@@ -462,7 +451,7 @@ namespace Lucene.Net.Index
}
writer.Close();
- System.Collections.ICollection commits = IndexReader.ListCommits(dir);
+ ICollection<IndexCommit> commits = IndexReader.ListCommits(dir);
Assert.AreEqual(6, commits.Count);
IndexCommit lastCommit = null;
System.Collections.IEnumerator it = commits.GetEnumerator();
@@ -490,7 +479,7 @@ namespace Lucene.Net.Index
// Should undo our rollback:
writer.Rollback();
- IndexReader r = IndexReader.Open(dir);
+ IndexReader r = IndexReader.Open(dir, true);
// Still optimized, still 11 docs
Assert.IsTrue(r.IsOptimized());
Assert.AreEqual(11, r.NumDocs());
@@ -504,7 +493,7 @@ namespace Lucene.Net.Index
// Now 8 because we made another commit
Assert.AreEqual(8, IndexReader.ListCommits(dir).Count);
- r = IndexReader.Open(dir);
+ r = IndexReader.Open(dir, true);
// Not optimized because we rolled it back, and now only
// 10 docs
Assert.IsTrue(!r.IsOptimized());
@@ -516,7 +505,7 @@ namespace Lucene.Net.Index
writer.Optimize();
writer.Close();
- r = IndexReader.Open(dir);
+ r = IndexReader.Open(dir, true);
Assert.IsTrue(r.IsOptimized());
Assert.AreEqual(10, r.NumDocs());
r.Close();
@@ -528,7 +517,7 @@ namespace Lucene.Net.Index
// Reader still sees optimized index, because writer
// opened on the prior commit has not yet committed:
- r = IndexReader.Open(dir);
+ r = IndexReader.Open(dir, true);
Assert.IsTrue(r.IsOptimized());
Assert.AreEqual(10, r.NumDocs());
r.Close();
@@ -536,7 +525,7 @@ namespace Lucene.Net.Index
writer.Close();
// Now reader sees unoptimized index:
- r = IndexReader.Open(dir);
+ r = IndexReader.Open(dir, true);
Assert.IsTrue(!r.IsOptimized());
Assert.AreEqual(10, r.NumDocs());
r.Close();
@@ -546,24 +535,21 @@ namespace Lucene.Net.Index
/* Test keeping NO commit points. This is a viable and
- * useful case eg where you want to build a big index with
- * autoCommit false and you know there are no readers.
+ * useful case eg where you want to build a big index and
+ * you know there are no readers.
*/
[Test]
public virtual void TestKeepNoneOnInitDeletionPolicy()
{
-
- for (int pass = 0; pass < 4; pass++)
+ for (int pass = 0; pass < 2; pass++)
{
-
- bool autoCommit = pass < 2;
- bool useCompoundFile = (pass % 2) > 0;
+ bool useCompoundFile = (pass % 2) != 0;
KeepNoneOnInitDeletionPolicy policy = new KeepNoneOnInitDeletionPolicy(this);
Directory dir = new RAMDirectory();
-
- IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy);
+
+ IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
writer.SetMaxBufferedDocs(10);
writer.SetUseCompoundFile(useCompoundFile);
for (int i = 0; i < 107; i++)
@@ -571,21 +557,20 @@ namespace Lucene.Net.Index
AddDoc(writer);
}
writer.Close();
-
- writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false, policy);
+
+ writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
writer.SetUseCompoundFile(useCompoundFile);
writer.Optimize();
writer.Close();
Assert.AreEqual(2, policy.numOnInit);
- if (!autoCommit)
// If we are not auto committing then there should
// be exactly 2 commits (one per close above):
- Assert.AreEqual(2, policy.numOnCommit);
+ Assert.AreEqual(2, policy.numOnCommit);
// Simplistic check: just verify the index is in fact
// readable:
- IndexReader reader = IndexReader.Open(dir);
+ IndexReader reader = IndexReader.Open(dir, true);
reader.Close();
dir.Close();
@@ -598,14 +583,11 @@ namespace Lucene.Net.Index
[Test]
public virtual void TestKeepLastNDeletionPolicy()
{
-
int N = 5;
- for (int pass = 0; pass < 4; pass++)
+ for (int pass = 0; pass < 2; pass++)
{
-
- bool autoCommit = pass < 2;
- bool useCompoundFile = (pass % 2) > 0;
+ bool useCompoundFile = (pass % 2) != 0;
Directory dir = new RAMDirectory();
@@ -613,7 +595,7 @@ namespace Lucene.Net.Index
for (int j = 0; j < N + 1; j++)
{
- IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy);
+ IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
writer.SetMaxBufferedDocs(10);
writer.SetUseCompoundFile(useCompoundFile);
for (int i = 0; i < 17; i++)
@@ -626,14 +608,7 @@ namespace Lucene.Net.Index
Assert.IsTrue(policy.numDelete > 0);
Assert.AreEqual(N + 1, policy.numOnInit);
- if (autoCommit)
- {
- Assert.IsTrue(policy.numOnCommit > 1);
- }
- else
- {
- Assert.AreEqual(N + 1, policy.numOnCommit);
- }
+ Assert.AreEqual(N + 1, policy.numOnCommit);
// Simplistic check: just verify only the past N segments_N's still
// exist, and, I can open a reader on each:
@@ -643,7 +618,7 @@ namespace Lucene.Net.Index
{
try
{
- IndexReader reader = IndexReader.Open(dir);
+ IndexReader reader = IndexReader.Open(dir, true);
reader.Close();
if (i == N)
{
@@ -675,19 +650,16 @@ namespace Lucene.Net.Index
[Test]
public virtual void TestKeepLastNDeletionPolicyWithReader()
{
-
int N = 10;
- for (int pass = 0; pass < 4; pass++)
+ for (int pass = 0; pass < 2; pass++)
{
-
- bool autoCommit = pass < 2;
- bool useCompoundFile = (pass % 2) > 0;
+ bool useCompoundFile = (pass % 2) != 0;
KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(this, N);
Directory dir = new RAMDirectory();
- IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy);
+ IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
writer.SetUseCompoundFile(useCompoundFile);
writer.Close();
Term searchTerm = new Term("content", "aaa");
@@ -695,35 +667,34 @@ namespace Lucene.Net.Index
for (int i = 0; i < N + 1; i++)
{
- writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false, policy);
+ writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
writer.SetUseCompoundFile(useCompoundFile);
for (int j = 0; j < 17; j++)
{
AddDoc(writer);
}
- // this is a commit when autoCommit=false:
+ // this is a commit
writer.Close();
- IndexReader reader = IndexReader.Open(dir, policy);
+ IndexReader reader = IndexReader.Open(dir, policy, false);
reader.DeleteDocument(3 * i + 1);
reader.SetNorm(4 * i + 1, "content", 2.0F);
IndexSearcher searcher = new IndexSearcher(reader);
ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs;
Assert.AreEqual(16 * (1 + i), hits.Length);
- // this is a commit when autoCommit=false:
+ // this is a commit
reader.Close();
searcher.Close();
}
- writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false, policy);
+ writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
writer.SetUseCompoundFile(useCompoundFile);
writer.Optimize();
- // this is a commit when autoCommit=false:
+ // this is a commit
writer.Close();
Assert.AreEqual(2 * (N + 2), policy.numOnInit);
- if (!autoCommit)
- Assert.AreEqual(2 * (N + 2) - 1, policy.numOnCommit);
+ Assert.AreEqual(2 * (N + 2) - 1, policy.numOnCommit);
- IndexSearcher searcher2 = new IndexSearcher(dir);
+ IndexSearcher searcher2 = new IndexSearcher(dir, false);
ScoreDoc[] hits2 = searcher2.Search(query, null, 1000).ScoreDocs;
Assert.AreEqual(176, hits2.Length);
@@ -738,29 +709,25 @@ namespace Lucene.Net.Index
{
try
{
- IndexReader reader = IndexReader.Open(dir);
+ IndexReader reader = IndexReader.Open(dir, true);
// Work backwards in commits on what the expected
- // count should be. Only check this in the
- // autoCommit false case:
- if (!autoCommit)
+ // count should be.
+ searcher2 = new IndexSearcher(reader);
+ hits2 = searcher2.Search(query, null, 1000).ScoreDocs;
+ if (i > 1)
{
- searcher2 = new IndexSearcher(reader);
- hits2 = searcher2.Search(query, null, 1000).ScoreDocs;
- if (i > 1)
+ if (i % 2 == 0)
{
- if (i % 2 == 0)
- {
- expectedCount += 1;
- }
- else
- {
- expectedCount -= 17;
- }
+ expectedCount += 1;
+ }
+ else
+ {
+ expectedCount -= 17;
}
- Assert.AreEqual(expectedCount, hits2.Length);
- searcher2.Close();
}
+ Assert.AreEqual(expectedCount, hits2.Length);
+ searcher2.Close();
reader.Close();
if (i == N)
{
@@ -792,19 +759,16 @@ namespace Lucene.Net.Index
[Test]
public virtual void TestKeepLastNDeletionPolicyWithCreates()
{
-
int N = 10;
- for (int pass = 0; pass < 4; pass++)
+ for (int pass = 0; pass < 2; pass++)
{
-
- bool autoCommit = pass < 2;
- bool useCompoundFile = (pass % 2) > 0;
+ bool useCompoundFile = (pass % 2) != 0;
KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(this, N);
Directory dir = new RAMDirectory();
- IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy);
+ IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
writer.SetMaxBufferedDocs(10);
writer.SetUseCompoundFile(useCompoundFile);
writer.Close();
@@ -813,37 +777,36 @@ namespace Lucene.Net.Index
for (int i = 0; i < N + 1; i++)
{
-
- writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false, policy);
+
+ writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
writer.SetMaxBufferedDocs(10);
writer.SetUseCompoundFile(useCompoundFile);
for (int j = 0; j < 17; j++)
{
AddDoc(writer);
}
- // this is a commit when autoCommit=false:
+ // this is a commit
writer.Close();
- IndexReader reader = IndexReader.Open(dir, policy);
+ IndexReader reader = IndexReader.Open(dir, policy, false);
reader.DeleteDocument(3);
reader.SetNorm(5, "content", 2.0F);
IndexSearcher searcher = new IndexSearcher(reader);
ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs;
Assert.AreEqual(16, hits.Length);
- // this is a commit when autoCommit=false:
+ // this is a commit
reader.Close();
searcher.Close();
-
- writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy);
+
+ writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
// This will not commit: there are no changes
// pending because we opened for "create":
writer.Close();
}
Assert.AreEqual(1 + 3 * (N + 1), policy.numOnInit);
- if (!autoCommit)
- Assert.AreEqual(3 * (N + 1), policy.numOnCommit);
+ Assert.AreEqual(3 * (N + 1), policy.numOnCommit);
- IndexSearcher searcher2 = new IndexSearcher(dir);
+ IndexSearcher searcher2 = new IndexSearcher(dir, false);
ScoreDoc[] hits2 = searcher2.Search(query, null, 1000).ScoreDocs;
Assert.AreEqual(0, hits2.Length);
@@ -858,29 +821,25 @@ namespace Lucene.Net.Index
{
try
{
- IndexReader reader = IndexReader.Open(dir);
+ IndexReader reader = IndexReader.Open(dir, true);
// Work backwards in commits on what the expected
- // count should be. Only check this in the
- // autoCommit false case:
- if (!autoCommit)
+ // count should be.
+ searcher2 = new IndexSearcher(reader);
+ hits2 = searcher2.Search(query, null, 1000).ScoreDocs;
+ Assert.AreEqual(expectedCount, hits2.Length);
+ searcher2.Close();
+ if (expectedCount == 0)
{
- searcher2 = new IndexSearcher(reader);
- hits2 = searcher2.Search(query, null, 1000).ScoreDocs;
- Assert.AreEqual(expectedCount, hits2.Length);
- searcher2.Close();
- if (expectedCount == 0)
- {
- expectedCount = 16;
- }
- else if (expectedCount == 16)
- {
- expectedCount = 17;
- }
- else if (expectedCount == 17)
- {
- expectedCount = 0;
- }
+ expectedCount = 16;
+ }
+ else if (expectedCount == 16)
+ {
+ expectedCount = 17;
+ }
+ else if (expectedCount == 17)
+ {
+ expectedCount = 0;
}
reader.Close();
if (i == N)
Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestDirectoryReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestDirectoryReader.cs?rev=1201357&r1=1201356&r2=1201357&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestDirectoryReader.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestDirectoryReader.cs Sat Nov 12 23:51:42 2011
@@ -65,7 +65,7 @@ namespace Lucene.Net.Index
protected internal virtual IndexReader OpenReader()
{
IndexReader reader;
- reader = IndexReader.Open(dir);
+ reader = IndexReader.Open(dir, false);
Assert.IsTrue(reader is DirectoryReader);
Assert.IsTrue(dir != null);
@@ -151,7 +151,7 @@ namespace Lucene.Net.Index
AddDoc(ramDir1, "test foo", true);
RAMDirectory ramDir2 = new RAMDirectory();
AddDoc(ramDir2, "test blah", true);
- IndexReader[] readers = new IndexReader[]{IndexReader.Open(ramDir1), IndexReader.Open(ramDir2)};
+ IndexReader[] readers = new IndexReader[]{IndexReader.Open(ramDir1, false), IndexReader.Open(ramDir2, false)};
MultiReader mr = new MultiReader(readers);
Assert.IsTrue(mr.IsCurrent()); // just opened, must be current
AddDoc(ramDir1, "more text", false);
@@ -179,9 +179,9 @@ namespace Lucene.Net.Index
AddDoc(ramDir2, "test blah", true);
RAMDirectory ramDir3 = new RAMDirectory();
AddDoc(ramDir3, "test wow", true);
-
- IndexReader[] readers1 = new IndexReader[]{IndexReader.Open(ramDir1), IndexReader.Open(ramDir3)};
- IndexReader[] readers2 = new IndexReader[]{IndexReader.Open(ramDir1), IndexReader.Open(ramDir2), IndexReader.Open(ramDir3)};
+
+ IndexReader[] readers1 = new [] { IndexReader.Open(ramDir1, false), IndexReader.Open(ramDir3, false) };
+ IndexReader[] readers2 = new [] { IndexReader.Open(ramDir1, false), IndexReader.Open(ramDir2, false), IndexReader.Open(ramDir3, false) };
MultiReader mr2 = new MultiReader(readers1);
MultiReader mr3 = new MultiReader(readers2);
@@ -221,7 +221,7 @@ namespace Lucene.Net.Index
private void AddDoc(RAMDirectory ramDir1, System.String s, bool create)
{
- IndexWriter iw = new IndexWriter(ramDir1, new StandardAnalyzer(), create, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter iw = new IndexWriter(ramDir1, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_CURRENT), create, IndexWriter.MaxFieldLength.LIMITED);
Document doc = new Document();
doc.Add(new Field("body", s, Field.Store.YES, Field.Index.ANALYZED));
iw.AddDocument(doc);
Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestDoc.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestDoc.cs?rev=1201357&r1=1201356&r2=1201357&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestDoc.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestDoc.cs Sat Nov 12 23:51:42 2011
@@ -16,6 +16,7 @@
*/
using System;
+using Lucene.Net.Documents;
using Lucene.Net.Support;
using NUnit.Framework;
@@ -30,11 +31,7 @@ namespace Lucene.Net.Index
{
- /// <summary>JUnit adaptation of an older test case DocTest.
- ///
- /// </summary>
- /// <version> $Id: TestDoc.java 780770 2009-06-01 18:34:10Z uschindler $
- /// </version>
+ /// <summary>JUnit adaptation of an older test case DocTest.</summary>
[TestFixture]
public class TestDoc:LuceneTestCase
{
@@ -193,16 +190,17 @@ namespace Lucene.Net.Index
{
System.IO.FileInfo file = new System.IO.FileInfo(System.IO.Path.Combine(workDir.FullName, fileName));
Document doc = FileDocument.Document(file);
+ doc.Add(new Field("contents", new System.IO.StreamReader(file.FullName)));
writer.AddDocument(doc);
- writer.Flush();
+ writer.Commit();
return writer.NewestSegment();
}
private SegmentInfo Merge(SegmentInfo si1, SegmentInfo si2, System.String merged, bool useCompoundFile)
{
- SegmentReader r1 = SegmentReader.Get(si1);
- SegmentReader r2 = SegmentReader.Get(si2);
+ SegmentReader r1 = SegmentReader.Get(true, si1, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
+ SegmentReader r2 = SegmentReader.Get(true, si2, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
SegmentMerger merger = new SegmentMerger(si1.dir, merged);
@@ -226,7 +224,7 @@ namespace Lucene.Net.Index
private void PrintSegment(System.IO.StreamWriter out_Renamed, SegmentInfo si)
{
- SegmentReader reader = SegmentReader.Get(si);
+ SegmentReader reader = SegmentReader.Get(true, si, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
for (int i = 0; i < reader.NumDocs(); i++)
{
Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestDocumentWriter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestDocumentWriter.cs?rev=1201357&r1=1201356&r2=1201357&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestDocumentWriter.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestDocumentWriter.cs Sat Nov 12 23:51:42 2011
@@ -16,7 +16,7 @@
*/
using System;
-
+using Lucene.Net.Util;
using NUnit.Framework;
using Analyzer = Lucene.Net.Analysis.Analyzer;
@@ -44,7 +44,7 @@ namespace Lucene.Net.Index
{
[TestFixture]
- public class TestDocumentWriter:BaseTokenStreamTestCase
+ public class TestDocumentWriter : LuceneTestCase
{
private class AnonymousClassAnalyzer:Analyzer
{
@@ -86,9 +86,9 @@ namespace Lucene.Net.Index
private void InitBlock(AnonymousClassAnalyzer1 enclosingInstance)
{
this.enclosingInstance = enclosingInstance;
- termAtt = (TermAttribute) AddAttribute(typeof(TermAttribute));
- payloadAtt = (PayloadAttribute) AddAttribute(typeof(PayloadAttribute));
- posIncrAtt = (PositionIncrementAttribute) AddAttribute(typeof(PositionIncrementAttribute));
+ termAtt = AddAttribute<TermAttribute>();
+ payloadAtt = AddAttribute<PayloadAttribute>();
+ posIncrAtt = AddAttribute<PositionIncrementAttribute>();
}
private AnonymousClassAnalyzer1 enclosingInstance;
public AnonymousClassAnalyzer1 Enclosing_Instance
@@ -168,7 +168,7 @@ namespace Lucene.Net.Index
private void InitBlock(TestDocumentWriter enclosingInstance)
{
this.enclosingInstance = enclosingInstance;
- termAtt = (TermAttribute) AddAttribute(typeof(TermAttribute));
+ termAtt = AddAttribute<TermAttribute>();
}
private TestDocumentWriter enclosingInstance;
public TestDocumentWriter Enclosing_Instance
@@ -229,11 +229,11 @@ namespace Lucene.Net.Index
Analyzer analyzer = new WhitespaceAnalyzer();
IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
writer.AddDocument(testDoc);
- writer.Flush();
+ writer.Commit();
SegmentInfo info = writer.NewestSegment();
writer.Close();
//After adding the document, we should be able to read it back in
- SegmentReader reader = SegmentReader.Get(info);
+ SegmentReader reader = SegmentReader.Get(true, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
Assert.IsTrue(reader != null);
Document doc = reader.Document(0);
Assert.IsTrue(doc != null);
@@ -285,10 +285,10 @@ namespace Lucene.Net.Index
doc.Add(new Field("repeated", "repeated two", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
- writer.Flush();
+ writer.Commit();
SegmentInfo info = writer.NewestSegment();
writer.Close();
- SegmentReader reader = SegmentReader.Get(info);
+ SegmentReader reader = SegmentReader.Get(true, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
TermPositions termPositions = reader.TermPositions(new Term("repeated", "repeated"));
Assert.IsTrue(termPositions.Next());
@@ -309,10 +309,10 @@ namespace Lucene.Net.Index
doc.Add(new Field("f1", "a 5 a a", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
- writer.Flush();
+ writer.Commit();
SegmentInfo info = writer.NewestSegment();
writer.Close();
- SegmentReader reader = SegmentReader.Get(info);
+ SegmentReader reader = SegmentReader.Get(true, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
TermPositions termPositions = reader.TermPositions(new Term("f1", "a"));
Assert.IsTrue(termPositions.Next());
@@ -336,10 +336,10 @@ namespace Lucene.Net.Index
doc.Add(new Field("preanalyzed", new AnonymousClassTokenStream(this), TermVector.NO));
writer.AddDocument(doc);
- writer.Flush();
+ writer.Commit();
SegmentInfo info = writer.NewestSegment();
writer.Close();
- SegmentReader reader = SegmentReader.Get(info);
+ SegmentReader reader = SegmentReader.Get(true, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
TermPositions termPositions = reader.TermPositions(new Term("preanalyzed", "term1"));
Assert.IsTrue(termPositions.Next());
@@ -371,14 +371,15 @@ namespace Lucene.Net.Index
// f2 first with tv then without tv
doc.Add(new Field("f2", "v1", Field.Store.YES, Field.Index.NOT_ANALYZED, TermVector.WITH_POSITIONS_OFFSETS));
doc.Add(new Field("f2", "v2", Field.Store.YES, Field.Index.NOT_ANALYZED, TermVector.NO));
-
- IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+
+ IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_CURRENT), true,
+ IndexWriter.MaxFieldLength.LIMITED);
writer.AddDocument(doc);
writer.Close();
_TestUtil.CheckIndex(dir);
- IndexReader reader = IndexReader.Open(dir);
+ IndexReader reader = IndexReader.Open(dir, true);
// f1
TermFreqVector tfv1 = reader.GetTermFreqVector(0, "f1");
Assert.IsNotNull(tfv1);
@@ -406,7 +407,7 @@ namespace Lucene.Net.Index
doc.Add(f);
doc.Add(new Field("f2", "v2", Field.Store.YES, Field.Index.NO));
- IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer.AddDocument(doc);
writer.Optimize(); // be sure to have a single segment
writer.Close();