You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by cc...@apache.org on 2012/03/21 07:04:35 UTC
svn commit: r1303294 [5/6] - in /incubator/lucene.net/trunk:
src/contrib/Analyzers/AR/ src/contrib/Analyzers/BR/
src/contrib/Analyzers/CJK/ src/contrib/Analyzers/Cn/
src/contrib/Analyzers/Compound/ src/contrib/Analyzers/Cz/
src/contrib/Analyzers/De/ sr...
Modified: incubator/lucene.net/trunk/test/core/Index/TestIndexReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestIndexReader.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestIndexReader.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestIndexReader.cs Wed Mar 21 06:04:26 2012
@@ -129,17 +129,17 @@ namespace Lucene.Net.Index
writer.Close();
// set up reader:
IndexReader reader = IndexReader.Open(d, false);
- Assert.IsTrue(reader.IsCurrent);
+ Assert.IsTrue(reader.IsCurrent());
// modify index by adding another document:
writer = new IndexWriter(d, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
AddDocumentWithFields(writer);
writer.Close();
- Assert.IsFalse(reader.IsCurrent);
+ Assert.IsFalse(reader.IsCurrent());
// re-create index:
writer = new IndexWriter(d, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
AddDocumentWithFields(writer);
writer.Close();
- Assert.IsFalse(reader.IsCurrent);
+ Assert.IsFalse(reader.IsCurrent());
reader.Close();
d.Close();
}
@@ -165,17 +165,17 @@ namespace Lucene.Net.Index
// add more documents
writer = new IndexWriter(d, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
// want to get some more segments here
- for (int i = 0; i < 5 * writer.GetMergeFactor(); i++)
+ for (int i = 0; i < 5 * writer.MergeFactor; i++)
{
AddDocumentWithFields(writer);
}
// new fields are in some different segments (we hope)
- for (int i = 0; i < 5 * writer.GetMergeFactor(); i++)
+ for (int i = 0; i < 5 * writer.MergeFactor; i++)
{
AddDocumentWithDifferentFields(writer);
}
// new termvector fields
- for (int i = 0; i < 5 * writer.GetMergeFactor(); i++)
+ for (int i = 0; i < 5 * writer.MergeFactor; i++)
{
AddDocumentWithTermVectorFields(writer);
}
@@ -248,7 +248,7 @@ namespace Lucene.Net.Index
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
// want to get some more segments here
// new termvector fields
- for (int i = 0; i < 5 * writer.GetMergeFactor(); i++)
+ for (int i = 0; i < 5 * writer.MergeFactor; i++)
{
Document doc = new Document();
doc.Add(new Field("tvnot", "one two two three three three", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.NO));
@@ -375,7 +375,7 @@ namespace Lucene.Net.Index
Assert.AreEqual(1, fields.Length);
Field b1 = fields[0];
Assert.IsTrue(b1.IsBinary);
- byte[] data1 = b1.BinaryValue;
+ byte[] data1 = b1.GetBinaryValue();
Assert.AreEqual(bin.Length, b1.BinaryLength);
for (int i = 0; i < bin.Length; i++)
{
@@ -391,7 +391,7 @@ namespace Lucene.Net.Index
IFieldable fb1 = fieldables[0];
Assert.IsTrue(fb1.IsBinary);
Assert.AreEqual(bin.Length, fb1.BinaryLength);
- data1 = fb1.BinaryValue;
+ data1 = fb1.GetBinaryValue();
Assert.AreEqual(bin.Length, fb1.BinaryLength);
for (int i = 0; i < bin.Length; i++)
{
@@ -411,7 +411,7 @@ namespace Lucene.Net.Index
Assert.AreEqual(1, fields.Length);
b1 = fields[0];
Assert.IsTrue(b1.IsBinary);
- data1 = b1.BinaryValue;
+ data1 = b1.GetBinaryValue();
Assert.AreEqual(bin.Length, b1.BinaryLength);
for (int i = 0; i < bin.Length; i++)
{
@@ -886,7 +886,7 @@ namespace Lucene.Net.Index
reader.UndeleteAll();
reader.Close();
reader = IndexReader.Open(dir, false);
- Assert.AreEqual(2, reader.NumDocs); // nothing has really been deleted thanks to undeleteAll()
+ Assert.AreEqual(2, reader.GetNumDocs()); // nothing has really been deleted thanks to undeleteAll()
reader.Close();
dir.Close();
}
@@ -905,7 +905,7 @@ namespace Lucene.Net.Index
reader.Close();
reader = IndexReader.Open(dir, false);
reader.UndeleteAll();
- Assert.AreEqual(2, reader.NumDocs); // nothing has really been deleted thanks to undeleteAll()
+ Assert.AreEqual(2, reader.GetNumDocs()); // nothing has really been deleted thanks to undeleteAll()
reader.Close();
dir.Close();
}
@@ -926,7 +926,7 @@ namespace Lucene.Net.Index
reader.UndeleteAll();
reader.Close();
reader = IndexReader.Open(dir, false);
- Assert.AreEqual(2, reader.NumDocs); // nothing has really been deleted thanks to undeleteAll()
+ Assert.AreEqual(2, reader.GetNumDocs()); // nothing has really been deleted thanks to undeleteAll()
reader.Close();
dir.Close();
}
@@ -1511,10 +1511,10 @@ namespace Lucene.Net.Index
public static void AssertIndexEquals(IndexReader index1, IndexReader index2)
{
- Assert.AreEqual(index1.NumDocs, index2.NumDocs, "IndexReaders have different values for numDocs.");
+ Assert.AreEqual(index1.GetNumDocs(), index2.GetNumDocs(), "IndexReaders have different values for numDocs.");
Assert.AreEqual(index1.MaxDoc, index2.MaxDoc, "IndexReaders have different values for maxDoc.");
Assert.AreEqual(index1.HasDeletions, index2.HasDeletions, "Only one IndexReader has deletions.");
- Assert.AreEqual(index1.IsOptimized, index2.IsOptimized, "Only one index is optimized.");
+ Assert.AreEqual(index1.IsOptimized(), index2.IsOptimized(), "Only one index is optimized.");
// check field names
System.Collections.Generic.ICollection<string> fieldsNames1 = index1.GetFieldNames(FieldOption.ALL);
@@ -1876,7 +1876,7 @@ namespace Lucene.Net.Index
// Reopen reader1 --> reader2
IndexReader r2 = r.Reopen();
r.Close();
- IndexReader sub0 = r2.SequentialSubReaders[0];
+ IndexReader sub0 = r2.GetSequentialSubReaders()[0];
int[] ints2 = Lucene.Net.Search.FieldCache_Fields.DEFAULT.GetInts(sub0, "number");
r2.Close();
Assert.IsTrue(ints == ints2);
@@ -1917,7 +1917,7 @@ namespace Lucene.Net.Index
IndexReader r2 = r.Reopen(true);
r.Close();
Assert.IsTrue(r2 is ReadOnlyDirectoryReader);
- IndexReader[] subs = r2.SequentialSubReaders;
+ IndexReader[] subs = r2.GetSequentialSubReaders();
int[] ints2 = Lucene.Net.Search.FieldCache_Fields.DEFAULT.GetInts(subs[0], "number");
r2.Close();
@@ -1957,7 +1957,7 @@ namespace Lucene.Net.Index
{
// expected
}
- IndexReader[] subs = r2.SequentialSubReaders;
+ IndexReader[] subs = r2.GetSequentialSubReaders();
for (int i = 0; i < subs.Length; i++)
{
Assert.AreEqual(36, subs[i].UniqueTermCount);
@@ -1990,9 +1990,9 @@ namespace Lucene.Net.Index
{
// expected
}
- Assert.IsFalse(((SegmentReader) r.SequentialSubReaders[0]).TermsIndexLoaded());
+ Assert.IsFalse(((SegmentReader) r.GetSequentialSubReaders()[0]).TermsIndexLoaded());
- Assert.AreEqual(-1, (r.SequentialSubReaders[0]).TermInfosIndexDivisor);
+ Assert.AreEqual(-1, (r.GetSequentialSubReaders()[0]).TermInfosIndexDivisor);
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
writer.AddDocument(doc);
writer.Close();
@@ -2000,7 +2000,7 @@ namespace Lucene.Net.Index
// LUCENE-1718: ensure re-open carries over no terms index:
IndexReader r2 = r.Reopen();
r.Close();
- IndexReader[] subReaders = r2.SequentialSubReaders;
+ IndexReader[] subReaders = r2.GetSequentialSubReaders();
Assert.AreEqual(2, subReaders.Length);
for (int i = 0; i < 2; i++)
{
@@ -2019,14 +2019,14 @@ namespace Lucene.Net.Index
Document doc = new Document();
writer.AddDocument(doc);
IndexReader r = IndexReader.Open(dir, true);
- Assert.IsTrue(r.IsCurrent);
+ Assert.IsTrue(r.IsCurrent());
writer.AddDocument(doc);
writer.PrepareCommit();
- Assert.IsTrue(r.IsCurrent);
+ Assert.IsTrue(r.IsCurrent());
IndexReader r2 = r.Reopen();
Assert.IsTrue(r == r2);
writer.Commit();
- Assert.IsFalse(r.IsCurrent);
+ Assert.IsFalse(r.IsCurrent());
writer.Close();
r.Close();
dir.Close();
Modified: incubator/lucene.net/trunk/test/core/Index/TestIndexReaderClone.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestIndexReaderClone.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestIndexReaderClone.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestIndexReaderClone.cs Wed Mar 21 06:04:26 2012
@@ -183,9 +183,9 @@ namespace Lucene.Net.Index
TestIndexReaderReopen.CreateIndex(dir1, true);
IndexReader reader = IndexReader.Open(dir1, false);
- int docCount = reader.NumDocs;
+ int docCount = reader.GetNumDocs();
Assert.IsTrue(DeleteWorked(1, reader));
- Assert.AreEqual(docCount - 1, reader.NumDocs);
+ Assert.AreEqual(docCount - 1, reader.GetNumDocs());
IndexReader readOnlyReader = reader.Reopen(true);
if (!IsReadOnly(readOnlyReader))
@@ -193,7 +193,7 @@ namespace Lucene.Net.Index
Assert.Fail("reader isn't read only");
}
Assert.IsFalse(DeleteWorked(1, readOnlyReader));
- Assert.AreEqual(docCount - 1, readOnlyReader.NumDocs);
+ Assert.AreEqual(docCount - 1, readOnlyReader.GetNumDocs());
reader.Close();
readOnlyReader.Close();
dir1.Close();
@@ -518,7 +518,7 @@ namespace Lucene.Net.Index
TestIndexReaderReopen.CreateIndex(dir1, true);
IndexReader reader = IndexReader.Open(dir1, false);
reader.DeleteDocument(1); // acquire write lock
- IndexReader[] subs = reader.SequentialSubReaders;
+ IndexReader[] subs = reader.GetSequentialSubReaders();
System.Diagnostics.Debug.Assert(subs.Length > 1);
IndexReader[] clones = new IndexReader[subs.Length];
Modified: incubator/lucene.net/trunk/test/core/Index/TestIndexReaderCloneNorms.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestIndexReaderCloneNorms.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestIndexReaderCloneNorms.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestIndexReaderCloneNorms.cs Wed Mar 21 06:04:26 2012
@@ -146,7 +146,7 @@ namespace Lucene.Net.Index
CreateIndex(dir3);
IndexWriter iw = new IndexWriter(dir3, anlzr, false, IndexWriter.MaxFieldLength.LIMITED);
iw.SetMaxBufferedDocs(5);
- iw.SetMergeFactor(3);
+ iw.MergeFactor = 3;
iw.AddIndexesNoOptimize(new Directory[]{dir1, dir2});
iw.Optimize();
iw.Close();
@@ -164,7 +164,7 @@ namespace Lucene.Net.Index
// now with optimize
iw = new IndexWriter(dir3, anlzr, false, IndexWriter.MaxFieldLength.LIMITED);
iw.SetMaxBufferedDocs(5);
- iw.SetMergeFactor(3);
+ iw.MergeFactor = 3;
iw.Optimize();
iw.Close();
VerifyIndex(dir3);
@@ -273,7 +273,7 @@ namespace Lucene.Net.Index
{
IndexWriter iw = new IndexWriter(dir, anlzr, true, IndexWriter.MaxFieldLength.LIMITED);
iw.SetMaxBufferedDocs(5);
- iw.SetMergeFactor(3);
+ iw.MergeFactor = 3;
iw.SetSimilarity(similarityOne);
iw.UseCompoundFile = true;
iw.Close();
@@ -336,7 +336,7 @@ namespace Lucene.Net.Index
{
IndexWriter iw = new IndexWriter(dir, anlzr, false, IndexWriter.MaxFieldLength.LIMITED);
iw.SetMaxBufferedDocs(5);
- iw.SetMergeFactor(3);
+ iw.MergeFactor = 3;
iw.SetSimilarity(similarityOne);
iw.UseCompoundFile = compound;
for (int i = 0; i < ndocs; i++)
Modified: incubator/lucene.net/trunk/test/core/Index/TestIndexReaderReopen.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestIndexReaderReopen.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestIndexReaderReopen.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestIndexReaderReopen.cs Wed Mar 21 06:04:26 2012
@@ -502,7 +502,7 @@ namespace Lucene.Net.Index
AssertRefCountEquals(1, reader0);
Assert.IsTrue(reader0 is DirectoryReader);
- IndexReader[] subReaders0 = reader0.SequentialSubReaders;
+ IndexReader[] subReaders0 = reader0.GetSequentialSubReaders();
for (int i = 0; i < subReaders0.Length; i++)
{
AssertRefCountEquals(1, subReaders0[i]);
@@ -515,7 +515,7 @@ namespace Lucene.Net.Index
IndexReader reader1 = RefreshReader(reader0, true).refreshedReader;
Assert.IsTrue(reader1 is DirectoryReader);
- IndexReader[] subReaders1 = reader1.SequentialSubReaders;
+ IndexReader[] subReaders1 = reader1.GetSequentialSubReaders();
Assert.AreEqual(subReaders0.Length, subReaders1.Length);
for (int i = 0; i < subReaders0.Length; i++)
@@ -538,7 +538,7 @@ namespace Lucene.Net.Index
IndexReader reader2 = RefreshReader(reader1, true).refreshedReader;
Assert.IsTrue(reader2 is DirectoryReader);
- IndexReader[] subReaders2 = reader2.SequentialSubReaders;
+ IndexReader[] subReaders2 = reader2.GetSequentialSubReaders();
Assert.AreEqual(subReaders1.Length, subReaders2.Length);
for (int i = 0; i < subReaders2.Length; i++)
@@ -572,7 +572,7 @@ namespace Lucene.Net.Index
IndexReader reader3 = RefreshReader(reader0, true).refreshedReader;
Assert.IsTrue(reader3 is DirectoryReader);
- IndexReader[] subReaders3 = reader3.SequentialSubReaders;
+ IndexReader[] subReaders3 = reader3.GetSequentialSubReaders();
Assert.AreEqual(subReaders3.Length, subReaders0.Length);
// try some permutations
@@ -1129,11 +1129,11 @@ namespace Lucene.Net.Index
IndexReader r = IndexReader.Open(dir, false);
if (multiSegment)
{
- Assert.IsTrue(r.SequentialSubReaders.Length > 1);
+ Assert.IsTrue(r.GetSequentialSubReaders().Length > 1);
}
else
{
- Assert.IsTrue(r.SequentialSubReaders.Length == 1);
+ Assert.IsTrue(r.GetSequentialSubReaders().Length == 1);
}
r.Close();
}
@@ -1225,7 +1225,7 @@ namespace Lucene.Net.Index
{
if (reader is DirectoryReader)
{
- IndexReader[] subReaders = reader.SequentialSubReaders;
+ IndexReader[] subReaders = reader.GetSequentialSubReaders();
for (int i = 0; i < subReaders.Length; i++)
{
AssertReaderClosed(subReaders[i], checkSubReaders, checkNormsClosed);
@@ -1234,7 +1234,7 @@ namespace Lucene.Net.Index
if (reader is MultiReader)
{
- IndexReader[] subReaders = reader.SequentialSubReaders;
+ IndexReader[] subReaders = reader.GetSequentialSubReaders();
for (int i = 0; i < subReaders.Length; i++)
{
AssertReaderClosed(subReaders[i], checkSubReaders, checkNormsClosed);
@@ -1329,8 +1329,8 @@ namespace Lucene.Net.Index
IndexReader r2 = r1.Reopen(); // MSR
Assert.IsTrue(r1 != r2);
- SegmentReader sr1 = (SegmentReader) r1.SequentialSubReaders[0]; // Get SRs for the first segment from original
- SegmentReader sr2 = (SegmentReader) r2.SequentialSubReaders[0]; // and reopened IRs
+ SegmentReader sr1 = (SegmentReader) r1.GetSequentialSubReaders()[0]; // Get SRs for the first segment from original
+ SegmentReader sr2 = (SegmentReader) r2.GetSequentialSubReaders()[0]; // and reopened IRs
// At this point they share the same BitVector
Assert.IsTrue(sr1.deletedDocs_ForNUnit == sr2.deletedDocs_ForNUnit);
@@ -1363,7 +1363,7 @@ namespace Lucene.Net.Index
IndexReader r2 = r1.Reopen();
Assert.IsTrue(r1 != r2);
- IndexReader[] rs2 = r2.SequentialSubReaders;
+ IndexReader[] rs2 = r2.GetSequentialSubReaders();
SegmentReader sr1 = SegmentReader.GetOnlySegmentReader(r1);
SegmentReader sr2 = (SegmentReader) rs2[0];
@@ -1413,7 +1413,7 @@ namespace Lucene.Net.Index
writer.Close();
IndexReader r = IndexReader.Open(dir, false);
- Assert.AreEqual(0, r.NumDocs);
+ Assert.AreEqual(0, r.GetNumDocs());
Assert.AreEqual(4, r.MaxDoc);
System.Collections.IEnumerator it = IndexReader.ListCommits(dir).GetEnumerator();
@@ -1447,11 +1447,11 @@ namespace Lucene.Net.Index
}
if (v < 4)
{
- Assert.AreEqual(1 + v, r2.NumDocs);
+ Assert.AreEqual(1 + v, r2.GetNumDocs());
}
else
{
- Assert.AreEqual(7 - v, r2.NumDocs);
+ Assert.AreEqual(7 - v, r2.GetNumDocs());
}
r.Close();
r = r2;
Modified: incubator/lucene.net/trunk/test/core/Index/TestIndexWriter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestIndexWriter.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestIndexWriter.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestIndexWriter.cs Wed Mar 21 06:04:26 2012
@@ -429,19 +429,19 @@ namespace Lucene.Net.Index
IndexReader reader = null;
int i;
- IndexWriter.SetDefaultWriteLockTimeout(2000);
- Assert.AreEqual(2000, IndexWriter.GetDefaultWriteLockTimeout());
+ IndexWriter.DefaultWriteLockTimeout = 2000;
+ Assert.AreEqual(2000, IndexWriter.DefaultWriteLockTimeout);
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
- IndexWriter.SetDefaultWriteLockTimeout(1000);
+ IndexWriter.DefaultWriteLockTimeout = 1000;
// add 100 documents
for (i = 0; i < 100; i++)
{
AddDoc(writer);
}
- Assert.AreEqual(100, writer.MaxDoc);
+ Assert.AreEqual(100, writer.MaxDoc());
writer.Close();
// delete 40 documents
@@ -454,33 +454,33 @@ namespace Lucene.Net.Index
// test doc count before segments are merged/index is optimized
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
- Assert.AreEqual(100, writer.MaxDoc);
+ Assert.AreEqual(100, writer.MaxDoc());
writer.Close();
reader = IndexReader.Open(dir, true);
Assert.AreEqual(100, reader.MaxDoc);
- Assert.AreEqual(60, reader.NumDocs);
+ Assert.AreEqual(60, reader.GetNumDocs());
reader.Close();
// optimize the index and check that the new doc count is correct
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
- Assert.AreEqual(100, writer.MaxDoc);
+ Assert.AreEqual(100, writer.MaxDoc());
Assert.AreEqual(60, writer.NumDocs());
writer.Optimize();
- Assert.AreEqual(60, writer.MaxDoc);
+ Assert.AreEqual(60, writer.MaxDoc());
Assert.AreEqual(60, writer.NumDocs());
writer.Close();
// check that the index reader gives the same numbers.
reader = IndexReader.Open(dir, true);
Assert.AreEqual(60, reader.MaxDoc);
- Assert.AreEqual(60, reader.NumDocs);
+ Assert.AreEqual(60, reader.GetNumDocs());
reader.Close();
// make sure opening a new index for create over
// this existing one works correctly:
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
- Assert.AreEqual(0, writer.MaxDoc);
+ Assert.AreEqual(0, writer.MaxDoc());
Assert.AreEqual(0, writer.NumDocs());
writer.Close();
}
@@ -999,7 +999,7 @@ namespace Lucene.Net.Index
// Make sure the doc that has the massive term is in
// the index:
- Assert.AreEqual(2, reader.NumDocs, "document with wicked long term should is not in the index!");
+ Assert.AreEqual(2, reader.GetNumDocs(), "document with wicked long term should is not in the index!");
reader.Close();
@@ -1033,7 +1033,7 @@ namespace Lucene.Net.Index
LogDocMergePolicy ldmp = new LogDocMergePolicy(writer);
ldmp.MinMergeDocs = 1;
writer.SetMergePolicy(ldmp);
- writer.SetMergeFactor(5);
+ writer.MergeFactor = 5;
writer.SetMaxBufferedDocs(2);
for (int j = 0; j < numDocs; j++)
writer.AddDocument(doc);
@@ -1045,7 +1045,7 @@ namespace Lucene.Net.Index
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
writer.SetMergePolicy(ldmp);
- writer.SetMergeFactor(5);
+ writer.MergeFactor = 5;
writer.Optimize(3);
writer.Close();
@@ -1072,7 +1072,7 @@ namespace Lucene.Net.Index
LogDocMergePolicy ldmp = new LogDocMergePolicy(writer);
ldmp.MinMergeDocs = 1;
writer.SetMergePolicy(ldmp);
- writer.SetMergeFactor(4);
+ writer.MergeFactor = 4;
writer.SetMaxBufferedDocs(2);
for (int iter = 0; iter < 10; iter++)
@@ -1176,17 +1176,17 @@ namespace Lucene.Net.Index
// now open reader:
IndexReader reader = IndexReader.Open(dir, true);
- Assert.AreEqual(reader.NumDocs, 1, "should be one document");
+ Assert.AreEqual(reader.GetNumDocs(), 1, "should be one document");
// now open index for create:
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
- Assert.AreEqual(writer.MaxDoc, 0, "should be zero documents");
+ Assert.AreEqual(writer.MaxDoc(), 0, "should be zero documents");
AddDoc(writer);
writer.Close();
- Assert.AreEqual(reader.NumDocs, 1, "should be one document");
+ Assert.AreEqual(reader.GetNumDocs(), 1, "should be one document");
IndexReader reader2 = IndexReader.Open(dir, true);
- Assert.AreEqual(reader2.NumDocs, 1, "should be one document");
+ Assert.AreEqual(reader2.GetNumDocs(), 1, "should be one document");
reader.Close();
reader2.Close();
}
@@ -1427,12 +1427,12 @@ namespace Lucene.Net.Index
hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
Assert.AreEqual(14, hits.Length, "reader incorrectly sees changes from writer");
searcher.Close();
- Assert.IsTrue(reader.IsCurrent, "reader should have still been current");
+ Assert.IsTrue(reader.IsCurrent(), "reader should have still been current");
}
// Now, close the writer:
writer.Close();
- Assert.IsFalse(reader.IsCurrent, "reader should not be current now");
+ Assert.IsFalse(reader.IsCurrent(), "reader should not be current now");
searcher = new IndexSearcher(dir, false);
hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
@@ -1592,7 +1592,7 @@ namespace Lucene.Net.Index
// Reader should see index as unoptimized at this
// point:
- Assert.IsFalse(reader.IsOptimized, "Reader incorrectly sees that the index is optimized");
+ Assert.IsFalse(reader.IsOptimized(), "Reader incorrectly sees that the index is optimized");
reader.Close();
// Abort the writer:
@@ -1603,7 +1603,7 @@ namespace Lucene.Net.Index
reader = IndexReader.Open(dir, true);
// Reader should still see index as unoptimized:
- Assert.IsFalse(reader.IsOptimized, "Reader incorrectly sees that the index is optimized");
+ Assert.IsFalse(reader.IsOptimized(), "Reader incorrectly sees that the index is optimized");
reader.Close();
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
@@ -1615,7 +1615,7 @@ namespace Lucene.Net.Index
reader = IndexReader.Open(dir, true);
// Reader should still see index as unoptimized:
- Assert.IsTrue(reader.IsOptimized, "Reader incorrectly sees that the index is unoptimized");
+ Assert.IsTrue(reader.IsOptimized(), "Reader incorrectly sees that the index is unoptimized");
reader.Close();
}
@@ -1629,7 +1629,7 @@ namespace Lucene.Net.Index
IndexReader reader = IndexReader.Open(dir, true);
Assert.AreEqual(0, reader.MaxDoc);
- Assert.AreEqual(0, reader.NumDocs);
+ Assert.AreEqual(0, reader.GetNumDocs());
reader.Close();
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
@@ -1638,7 +1638,7 @@ namespace Lucene.Net.Index
reader = IndexReader.Open(dir, true);
Assert.AreEqual(0, reader.MaxDoc);
- Assert.AreEqual(0, reader.NumDocs);
+ Assert.AreEqual(0, reader.GetNumDocs());
reader.Close();
}
@@ -1663,7 +1663,7 @@ namespace Lucene.Net.Index
IndexReader reader = IndexReader.Open(dir, true);
Assert.AreEqual(100, reader.MaxDoc);
- Assert.AreEqual(100, reader.NumDocs);
+ Assert.AreEqual(100, reader.GetNumDocs());
for (int j = 0; j < 100; j++)
{
Assert.AreEqual(1, reader.DocFreq(new Term("a" + j, "aaa" + j)));
@@ -1989,7 +1989,7 @@ namespace Lucene.Net.Index
IndexReader reader = IndexReader.Open(dir, true);
Assert.AreEqual(1, reader.MaxDoc);
- Assert.AreEqual(1, reader.NumDocs);
+ Assert.AreEqual(1, reader.GetNumDocs());
Term t = new Term("field", "a");
Assert.AreEqual(1, reader.DocFreq(t));
TermDocs td = reader.TermDocs(t);
@@ -2062,7 +2062,7 @@ namespace Lucene.Net.Index
writer.Close();
_TestUtil.CheckIndex(dir);
IndexReader reader = IndexReader.Open(dir, true);
- Assert.AreEqual(2, reader.NumDocs);
+ Assert.AreEqual(2, reader.GetNumDocs());
}
// Test calling optimize(false) whereby optimize is kicked
@@ -2080,7 +2080,7 @@ namespace Lucene.Net.Index
Document doc = new Document();
doc.Add(new Field("field", "aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
writer.SetMaxBufferedDocs(2);
- writer.SetMergeFactor(101);
+ writer.MergeFactor = 101;
for (int i = 0; i < 200; i++)
writer.AddDocument(doc);
writer.Optimize(false);
@@ -2089,7 +2089,7 @@ namespace Lucene.Net.Index
{
writer.Close();
IndexReader reader = IndexReader.Open(dir, false);
- Assert.IsTrue(reader.IsOptimized);
+ Assert.IsTrue(reader.IsOptimized());
reader.Close();
}
else
@@ -2101,7 +2101,7 @@ namespace Lucene.Net.Index
writer.Close();
IndexReader reader = IndexReader.Open(dir, true);
- Assert.IsTrue(!reader.IsOptimized);
+ Assert.IsTrue(!reader.IsOptimized());
reader.Close();
SegmentInfos infos = new SegmentInfos();
@@ -2239,7 +2239,7 @@ namespace Lucene.Net.Index
Document document = new Document();
document.Add(new Field("tvtest", "a b c", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.YES));
iw.SetMaxBufferedDocs(2);
- iw.SetMergeFactor(2);
+ iw.MergeFactor = 2;
ThreadClass.Current().Priority = (System.Threading.ThreadPriority)System.Threading.ThreadPriority.Highest;
for (int i = 0; i < 4; i++)
iw.AddDocument(document);
@@ -2279,7 +2279,7 @@ namespace Lucene.Net.Index
while (true)
{
- MergePolicy.OneMerge merge = writer.GetNextMerge_forNUnit();
+ MergePolicy.OneMerge merge = writer.GetNextMerge();
if (merge == null)
break;
for (int i = 0; i < merge.segments_ForNUnit.Count; i++)
@@ -2304,7 +2304,7 @@ namespace Lucene.Net.Index
iw.SetMergeScheduler(new MyMergeScheduler(this));
iw.MaxMergeDocs = 20;
iw.SetMaxBufferedDocs(2);
- iw.SetMergeFactor(2);
+ iw.MergeFactor = 2;
Document document = new Document();
document.Add(new Field("tvtest", "a b c", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.YES));
for (int i = 0; i < 177; i++)
@@ -2649,7 +2649,7 @@ namespace Lucene.Net.Index
{
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
writer.SetMaxBufferedDocs(2);
- writer.SetMergeFactor(2);
+ writer.MergeFactor = 2;
writer.UseCompoundFile = false;
Document doc = new Document();
System.String contents = "aa bb cc dd ee ff gg hh ii jj kk";
@@ -2717,7 +2717,7 @@ namespace Lucene.Net.Index
writer.SetMergeScheduler(ms);
writer.SetMaxBufferedDocs(2);
- writer.SetMergeFactor(100);
+ writer.MergeFactor = 100;
for (int j = 0; j < 199; j++)
{
@@ -2734,7 +2734,7 @@ namespace Lucene.Net.Index
// Force a bunch of merge threads to kick off so we
// stress out aborting them on close:
- writer.SetMergeFactor(2);
+ writer.MergeFactor = 2;
IndexWriter finalWriter = writer;
System.Collections.ArrayList failure = new System.Collections.ArrayList();
@@ -2875,7 +2875,7 @@ namespace Lucene.Net.Index
writer.SetMergeScheduler(cms);
writer.SetMaxBufferedDocs(10);
- writer.SetMergeFactor(4);
+ writer.MergeFactor = 4;
IndexerThread[] threads = new IndexerThread[NUM_THREADS];
@@ -2982,7 +2982,7 @@ namespace Lucene.Net.Index
cms.SetSuppressExceptions();
writer.SetMergeScheduler(cms);
writer.SetMaxBufferedDocs(2);
- writer.SetMergeFactor(4);
+ writer.MergeFactor = 4;
dir.SetMaxSizeInBytes(4 * 1024 + 20 * iter);
IndexerThread[] threads = new IndexerThread[NUM_THREADS];
@@ -3089,7 +3089,7 @@ namespace Lucene.Net.Index
cms.SetSuppressExceptions();
writer.SetMergeScheduler(cms);
writer.SetMaxBufferedDocs(2);
- writer.SetMergeFactor(4);
+ writer.MergeFactor = 4;
IndexerThread[] threads = new IndexerThread[NUM_THREADS];
@@ -3359,30 +3359,30 @@ namespace Lucene.Net.Index
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
writer.SetMaxBufferedDocs(2);
- writer.SetMergeFactor(5);
+ writer.MergeFactor = 5;
for (int i = 0; i < 23; i++)
AddDoc(writer);
IndexReader reader = IndexReader.Open(dir, true);
- Assert.AreEqual(0, reader.NumDocs);
+ Assert.AreEqual(0, reader.GetNumDocs());
writer.Commit();
IndexReader reader2 = reader.Reopen();
- Assert.AreEqual(0, reader.NumDocs);
- Assert.AreEqual(23, reader2.NumDocs);
+ Assert.AreEqual(0, reader.GetNumDocs());
+ Assert.AreEqual(23, reader2.GetNumDocs());
reader.Close();
for (int i = 0; i < 17; i++)
AddDoc(writer);
- Assert.AreEqual(23, reader2.NumDocs);
+ Assert.AreEqual(23, reader2.GetNumDocs());
reader2.Close();
reader = IndexReader.Open(dir, true);
- Assert.AreEqual(23, reader.NumDocs);
+ Assert.AreEqual(23, reader.GetNumDocs());
reader.Close();
writer.Commit();
reader = IndexReader.Open(dir, true);
- Assert.AreEqual(40, reader.NumDocs);
+ Assert.AreEqual(40, reader.GetNumDocs());
reader.Close();
writer.Close();
dir.Close();
@@ -3425,7 +3425,7 @@ namespace Lucene.Net.Index
ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
writer.SetMergeScheduler(cms);
writer.SetMaxBufferedDocs(2);
- writer.SetMergeFactor(5);
+ writer.MergeFactor = 5;
for (int i = 0; i < 23; i++)
{
@@ -3449,7 +3449,7 @@ namespace Lucene.Net.Index
writer.Close();
IndexReader reader = IndexReader.Open(dir, true);
- Assert.AreEqual(23, reader.NumDocs);
+ Assert.AreEqual(23, reader.GetNumDocs());
reader.Close();
dir.Close();
}
@@ -3487,7 +3487,7 @@ namespace Lucene.Net.Index
writer.Close();
IndexReader reader = IndexReader.Open(dir, true);
- for (int i = 0; i < reader.NumDocs; i++)
+ for (int i = 0; i < reader.GetNumDocs(); i++)
{
reader.Document(i);
reader.GetTermFreqVectors(i);
@@ -3635,21 +3635,21 @@ namespace Lucene.Net.Index
IndexReader ir = IndexReader.Open(dir, false);
Assert.AreEqual(10, ir.MaxDoc);
- Assert.AreEqual(10, ir.NumDocs);
+ Assert.AreEqual(10, ir.GetNumDocs());
ir.DeleteDocument(0);
ir.DeleteDocument(7);
- Assert.AreEqual(8, ir.NumDocs);
+ Assert.AreEqual(8, ir.GetNumDocs());
ir.Close();
writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
Assert.AreEqual(8, writer.NumDocs());
- Assert.AreEqual(10, writer.MaxDoc);
+ Assert.AreEqual(10, writer.MaxDoc());
writer.ExpungeDeletes();
Assert.AreEqual(8, writer.NumDocs());
writer.Close();
ir = IndexReader.Open(dir, true);
Assert.AreEqual(8, ir.MaxDoc);
- Assert.AreEqual(8, ir.NumDocs);
+ Assert.AreEqual(8, ir.GetNumDocs());
ir.Close();
dir.Close();
}
@@ -3661,7 +3661,7 @@ namespace Lucene.Net.Index
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
writer.SetMaxBufferedDocs(2);
- writer.SetMergeFactor(50);
+ writer.MergeFactor = 50;
writer.SetRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
Document document = new Document();
@@ -3677,20 +3677,20 @@ namespace Lucene.Net.Index
IndexReader ir = IndexReader.Open(dir, false);
Assert.AreEqual(98, ir.MaxDoc);
- Assert.AreEqual(98, ir.NumDocs);
+ Assert.AreEqual(98, ir.GetNumDocs());
for (int i = 0; i < 98; i += 2)
ir.DeleteDocument(i);
- Assert.AreEqual(49, ir.NumDocs);
+ Assert.AreEqual(49, ir.GetNumDocs());
ir.Close();
writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
- writer.SetMergeFactor(3);
+ writer.MergeFactor = 3;
Assert.AreEqual(49, writer.NumDocs());
writer.ExpungeDeletes();
writer.Close();
ir = IndexReader.Open(dir, true);
Assert.AreEqual(49, ir.MaxDoc);
- Assert.AreEqual(49, ir.NumDocs);
+ Assert.AreEqual(49, ir.GetNumDocs());
ir.Close();
dir.Close();
}
@@ -3703,7 +3703,7 @@ namespace Lucene.Net.Index
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
writer.SetMaxBufferedDocs(2);
- writer.SetMergeFactor(50);
+ writer.MergeFactor = 50;
writer.SetRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
Document document = new Document();
@@ -3719,20 +3719,20 @@ namespace Lucene.Net.Index
IndexReader ir = IndexReader.Open(dir, false);
Assert.AreEqual(98, ir.MaxDoc);
- Assert.AreEqual(98, ir.NumDocs);
+ Assert.AreEqual(98, ir.GetNumDocs());
for (int i = 0; i < 98; i += 2)
ir.DeleteDocument(i);
- Assert.AreEqual(49, ir.NumDocs);
+ Assert.AreEqual(49, ir.GetNumDocs());
ir.Close();
writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
// Force many merges to happen
- writer.SetMergeFactor(3);
+ writer.MergeFactor = 3;
writer.ExpungeDeletes(false);
writer.Close();
ir = IndexReader.Open(dir, true);
Assert.AreEqual(49, ir.MaxDoc);
- Assert.AreEqual(49, ir.NumDocs);
+ Assert.AreEqual(49, ir.GetNumDocs());
ir.Close();
dir.Close();
}
@@ -3881,7 +3881,7 @@ namespace Lucene.Net.Index
MockRAMDirectory dir = new MockRAMDirectory();
MockIndexWriter2 w = new MockIndexWriter2(this, dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
w.SetMaxBufferedDocs(2);
- w.SetMergeFactor(2);
+ w.MergeFactor = 2;
w.doFail = true;
w.SetMergeScheduler(new ConcurrentMergeScheduler());
Document doc = new Document();
@@ -3963,7 +3963,7 @@ namespace Lucene.Net.Index
IndexReader ir = IndexReader.Open(dir, true);
Assert.AreEqual(1, ir.MaxDoc);
- Assert.AreEqual(0, ir.NumDocs);
+ Assert.AreEqual(0, ir.GetNumDocs());
ir.Close();
dir.Close();
@@ -4309,46 +4309,46 @@ namespace Lucene.Net.Index
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
writer.SetMaxBufferedDocs(2);
- writer.SetMergeFactor(5);
+ writer.MergeFactor = 5;
for (int i = 0; i < 23; i++)
AddDoc(writer);
IndexReader reader = IndexReader.Open(dir, true);
- Assert.AreEqual(0, reader.NumDocs);
+ Assert.AreEqual(0, reader.GetNumDocs());
writer.PrepareCommit();
IndexReader reader2 = IndexReader.Open(dir, true);
- Assert.AreEqual(0, reader2.NumDocs);
+ Assert.AreEqual(0, reader2.GetNumDocs());
writer.Commit();
IndexReader reader3 = reader.Reopen();
- Assert.AreEqual(0, reader.NumDocs);
- Assert.AreEqual(0, reader2.NumDocs);
- Assert.AreEqual(23, reader3.NumDocs);
+ Assert.AreEqual(0, reader.GetNumDocs());
+ Assert.AreEqual(0, reader2.GetNumDocs());
+ Assert.AreEqual(23, reader3.GetNumDocs());
reader.Close();
reader2.Close();
for (int i = 0; i < 17; i++)
AddDoc(writer);
- Assert.AreEqual(23, reader3.NumDocs);
+ Assert.AreEqual(23, reader3.GetNumDocs());
reader3.Close();
reader = IndexReader.Open(dir, true);
- Assert.AreEqual(23, reader.NumDocs);
+ Assert.AreEqual(23, reader.GetNumDocs());
reader.Close();
writer.PrepareCommit();
reader = IndexReader.Open(dir, true);
- Assert.AreEqual(23, reader.NumDocs);
+ Assert.AreEqual(23, reader.GetNumDocs());
reader.Close();
writer.Commit();
reader = IndexReader.Open(dir, true);
- Assert.AreEqual(40, reader.NumDocs);
+ Assert.AreEqual(40, reader.GetNumDocs());
reader.Close();
writer.Close();
dir.Close();
@@ -4364,25 +4364,25 @@ namespace Lucene.Net.Index
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
writer.SetMaxBufferedDocs(2);
- writer.SetMergeFactor(5);
+ writer.MergeFactor = 5;
for (int i = 0; i < 23; i++)
AddDoc(writer);
IndexReader reader = IndexReader.Open(dir, true);
- Assert.AreEqual(0, reader.NumDocs);
+ Assert.AreEqual(0, reader.GetNumDocs());
writer.PrepareCommit();
IndexReader reader2 = IndexReader.Open(dir, true);
- Assert.AreEqual(0, reader2.NumDocs);
+ Assert.AreEqual(0, reader2.GetNumDocs());
writer.Rollback();
IndexReader reader3 = reader.Reopen();
- Assert.AreEqual(0, reader.NumDocs);
- Assert.AreEqual(0, reader2.NumDocs);
- Assert.AreEqual(0, reader3.NumDocs);
+ Assert.AreEqual(0, reader.GetNumDocs());
+ Assert.AreEqual(0, reader2.GetNumDocs());
+ Assert.AreEqual(0, reader3.GetNumDocs());
reader.Close();
reader2.Close();
@@ -4390,21 +4390,21 @@ namespace Lucene.Net.Index
for (int i = 0; i < 17; i++)
AddDoc(writer);
- Assert.AreEqual(0, reader3.NumDocs);
+ Assert.AreEqual(0, reader3.GetNumDocs());
reader3.Close();
reader = IndexReader.Open(dir, true);
- Assert.AreEqual(0, reader.NumDocs);
+ Assert.AreEqual(0, reader.GetNumDocs());
reader.Close();
writer.PrepareCommit();
reader = IndexReader.Open(dir, true);
- Assert.AreEqual(0, reader.NumDocs);
+ Assert.AreEqual(0, reader.GetNumDocs());
reader.Close();
writer.Commit();
reader = IndexReader.Open(dir, true);
- Assert.AreEqual(17, reader.NumDocs);
+ Assert.AreEqual(17, reader.GetNumDocs());
reader.Close();
writer.Close();
dir.Close();
@@ -4422,7 +4422,7 @@ namespace Lucene.Net.Index
writer.Close();
IndexReader reader = IndexReader.Open(dir, true);
- Assert.AreEqual(0, reader.NumDocs);
+ Assert.AreEqual(0, reader.GetNumDocs());
reader.Close();
dir.Close();
}
@@ -4625,7 +4625,7 @@ namespace Lucene.Net.Index
_TestUtil.CheckIndex(c.dir2);
IndexReader reader = IndexReader.Open(c.dir2, true);
- Assert.AreEqual(100 + NUM_COPY * (3 * NUM_ITER / 4) * Lucene.Net.Index.TestIndexWriter.CommitAndAddIndexes.NUM_THREADS * Lucene.Net.Index.TestIndexWriter.CommitAndAddIndexes.NUM_INIT_DOCS, reader.NumDocs);
+ Assert.AreEqual(100 + NUM_COPY * (3 * NUM_ITER / 4) * Lucene.Net.Index.TestIndexWriter.CommitAndAddIndexes.NUM_THREADS * Lucene.Net.Index.TestIndexWriter.CommitAndAddIndexes.NUM_INIT_DOCS, reader.GetNumDocs());
reader.Close();
c.CloseDir();
@@ -4883,7 +4883,7 @@ namespace Lucene.Net.Index
Document doc = new Document();
Field f = new Field("binary", b, 10, 17, Field.Store.YES);
- byte[] bx = f.BinaryValue;
+ byte[] bx = f.GetBinaryValue();
Assert.IsTrue(bx != null);
Assert.AreEqual(50, bx.Length);
Assert.AreEqual(10, f.BinaryOffset);
@@ -4895,7 +4895,7 @@ namespace Lucene.Net.Index
IndexReader ir = IndexReader.Open(dir, true);
doc = ir.Document(0);
f = doc.GetField("binary");
- b = f.BinaryValue;
+ b = f.GetBinaryValue();
Assert.IsTrue(b != null);
Assert.AreEqual(17, b.Length, 17);
Assert.AreEqual(87, b[0]);
@@ -4951,7 +4951,7 @@ namespace Lucene.Net.Index
RAMDirectory startDir = new MockRAMDirectory();
IndexWriter w = new IndexWriter(startDir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
w.SetMaxBufferedDocs(2);
- w.SetMergeFactor(100);
+ w.MergeFactor = 100;
for (int i = 0; i < 27; i++)
AddDoc(w);
w.Close();
@@ -5308,7 +5308,7 @@ namespace Lucene.Net.Index
writer.Close();
IndexReader r3 = IndexReader.Open(dir, true);
- Assert.AreEqual(5, r3.NumDocs);
+ Assert.AreEqual(5, r3.GetNumDocs());
r3.Close();
r1.Close();
@@ -5368,7 +5368,7 @@ namespace Lucene.Net.Index
}
w.SetMaxBufferedDocs(2);
- w.SetMergeFactor(2);
+ w.MergeFactor = 2;
Document doc = new Document();
doc.Add(new Field("field", "some text contents", Field.Store.YES, Field.Index.ANALYZED));
for (int i = 0; i < 100; i++)
@@ -5514,7 +5514,7 @@ namespace Lucene.Net.Index
IndexReader ir = IndexReader.Open(dir, true);
doc = ir.Document(0);
f = doc.GetField("binary");
- b = f.BinaryValue;
+ b = f.GetBinaryValue();
Assert.IsTrue(b != null);
Assert.AreEqual(17, b.Length, 17);
Assert.AreEqual(87, b[0]);
@@ -5841,12 +5841,12 @@ namespace Lucene.Net.Index
Assert.NotNull(commit);
IndexReader r = IndexReader.Open(commit, true);
- Assert.AreEqual(2, r.NumDocs);
+ Assert.AreEqual(2, r.GetNumDocs());
r.Close();
// open "second", w/ writeable IndexReader & commit
r = IndexReader.Open(commit, new NoDeletionPolicy(), false);
- Assert.AreEqual(2, r.NumDocs);
+ Assert.AreEqual(2, r.GetNumDocs());
r.DeleteDocument(0);
r.DeleteDocument(1);
commitData["tag"]="fourth";
Modified: incubator/lucene.net/trunk/test/core/Index/TestIndexWriterDelete.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestIndexWriterDelete.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestIndexWriterDelete.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestIndexWriterDelete.cs Wed Mar 21 06:04:26 2012
@@ -198,7 +198,7 @@ namespace Lucene.Net.Index
modifier.Commit();
IndexReader reader = IndexReader.Open(dir, true);
- Assert.AreEqual(7, reader.NumDocs);
+ Assert.AreEqual(7, reader.GetNumDocs());
reader.Close();
modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
@@ -206,7 +206,7 @@ namespace Lucene.Net.Index
modifier.Commit();
reader = IndexReader.Open(dir, true);
- Assert.AreEqual(0, reader.NumDocs);
+ Assert.AreEqual(0, reader.GetNumDocs());
reader.Close();
modifier.Close();
dir.Close();
@@ -263,7 +263,7 @@ namespace Lucene.Net.Index
modifier.Commit();
IndexReader reader = IndexReader.Open(dir, true);
- Assert.AreEqual(1, reader.NumDocs);
+ Assert.AreEqual(1, reader.GetNumDocs());
int hitCount = GetHitCount(dir, new Term("id", System.Convert.ToString(id)));
Assert.AreEqual(1, hitCount);
@@ -306,7 +306,7 @@ namespace Lucene.Net.Index
modifier.Commit();
IndexReader reader = IndexReader.Open(dir, true);
- Assert.AreEqual(5, reader.NumDocs);
+ Assert.AreEqual(5, reader.GetNumDocs());
modifier.Close();
}
@@ -330,7 +330,7 @@ namespace Lucene.Net.Index
modifier.Commit();
IndexReader reader = IndexReader.Open(dir, true);
- Assert.AreEqual(7, reader.NumDocs);
+ Assert.AreEqual(7, reader.GetNumDocs());
reader.Close();
id = 0;
@@ -340,7 +340,7 @@ namespace Lucene.Net.Index
modifier.Commit();
reader = IndexReader.Open(dir, true);
- Assert.AreEqual(5, reader.NumDocs);
+ Assert.AreEqual(5, reader.GetNumDocs());
reader.Close();
Term[] terms = new Term[3];
@@ -351,7 +351,7 @@ namespace Lucene.Net.Index
modifier.DeleteDocuments(terms);
modifier.Commit();
reader = IndexReader.Open(dir, true);
- Assert.AreEqual(2, reader.NumDocs);
+ Assert.AreEqual(2, reader.GetNumDocs());
reader.Close();
modifier.Close();
@@ -377,7 +377,7 @@ namespace Lucene.Net.Index
modifier.Commit();
IndexReader reader = IndexReader.Open(dir, true);
- Assert.AreEqual(7, reader.NumDocs);
+ Assert.AreEqual(7, reader.GetNumDocs());
reader.Close();
// Add 1 doc (so we will have something buffered)
@@ -388,7 +388,7 @@ namespace Lucene.Net.Index
// Delete all shouldn't be on disk yet
reader = IndexReader.Open(dir, true);
- Assert.AreEqual(7, reader.NumDocs);
+ Assert.AreEqual(7, reader.GetNumDocs());
reader.Close();
// Add a doc and update a doc (after the deleteAll, before the commit)
@@ -400,7 +400,7 @@ namespace Lucene.Net.Index
// Validate there are no docs left
reader = IndexReader.Open(dir, true);
- Assert.AreEqual(2, reader.NumDocs);
+ Assert.AreEqual(2, reader.GetNumDocs());
reader.Close();
modifier.Close();
@@ -428,7 +428,7 @@ namespace Lucene.Net.Index
AddDoc(modifier, ++id, value_Renamed);
IndexReader reader = IndexReader.Open(dir, true);
- Assert.AreEqual(7, reader.NumDocs);
+ Assert.AreEqual(7, reader.GetNumDocs());
reader.Close();
// Delete all
@@ -440,7 +440,7 @@ namespace Lucene.Net.Index
// Validate that the docs are still there
reader = IndexReader.Open(dir, true);
- Assert.AreEqual(7, reader.NumDocs);
+ Assert.AreEqual(7, reader.GetNumDocs());
reader.Close();
dir.Close();
@@ -466,7 +466,7 @@ namespace Lucene.Net.Index
modifier.Commit();
IndexReader reader = modifier.GetReader();
- Assert.AreEqual(7, reader.NumDocs);
+ Assert.AreEqual(7, reader.GetNumDocs());
reader.Close();
AddDoc(modifier, ++id, value_Renamed);
@@ -476,7 +476,7 @@ namespace Lucene.Net.Index
modifier.DeleteAll();
reader = modifier.GetReader();
- Assert.AreEqual(0, reader.NumDocs);
+ Assert.AreEqual(0, reader.GetNumDocs());
reader.Close();
@@ -486,7 +486,7 @@ namespace Lucene.Net.Index
// Validate that the docs are still there
reader = IndexReader.Open(dir, true);
- Assert.AreEqual(7, reader.NumDocs);
+ Assert.AreEqual(7, reader.GetNumDocs());
reader.Close();
dir.Close();
Modified: incubator/lucene.net/trunk/test/core/Index/TestIndexWriterExceptions.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestIndexWriterExceptions.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestIndexWriterExceptions.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestIndexWriterExceptions.cs Wed Mar 21 06:04:26 2012
@@ -115,7 +115,7 @@ namespace Lucene.Net.Index
}
try
{
- _TestUtil.CheckIndex(writer.GetDirectory());
+ _TestUtil.CheckIndex(writer.Directory);
}
catch (System.IO.IOException ioe)
{
Modified: incubator/lucene.net/trunk/test/core/Index/TestIndexWriterMergePolicy.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestIndexWriterMergePolicy.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestIndexWriterMergePolicy.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestIndexWriterMergePolicy.cs Wed Mar 21 06:04:26 2012
@@ -42,7 +42,7 @@ namespace Lucene.Net.Index
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
writer.SetMaxBufferedDocs(10);
- writer.SetMergeFactor(10);
+ writer.MergeFactor = 10;
writer.SetMergePolicy(new LogDocMergePolicy(writer));
for (int i = 0; i < 100; i++)
@@ -62,7 +62,7 @@ namespace Lucene.Net.Index
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
writer.SetMaxBufferedDocs(10);
- writer.SetMergeFactor(10);
+ writer.MergeFactor = 10;
writer.SetMergePolicy(new LogDocMergePolicy(writer));
bool noOverMerge = false;
@@ -88,7 +88,7 @@ namespace Lucene.Net.Index
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
writer.SetMaxBufferedDocs(10);
- writer.SetMergeFactor(10);
+ writer.MergeFactor = 10;
LogDocMergePolicy mp = new LogDocMergePolicy(writer);
mp.MinMergeDocs = 100;
writer.SetMergePolicy(mp);
@@ -102,7 +102,7 @@ namespace Lucene.Net.Index
writer.SetMaxBufferedDocs(10);
writer.SetMergePolicy(mp);
mp.MinMergeDocs = 100;
- writer.SetMergeFactor(10);
+ writer.MergeFactor = 10;
CheckInvariants(writer);
}
@@ -117,7 +117,7 @@ namespace Lucene.Net.Index
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
writer.SetMaxBufferedDocs(10);
- writer.SetMergeFactor(100);
+ writer.MergeFactor = 100;
writer.SetMergePolicy(new LogDocMergePolicy(writer));
for (int i = 0; i < 250; i++)
@@ -126,7 +126,7 @@ namespace Lucene.Net.Index
CheckInvariants(writer);
}
- writer.SetMergeFactor(5);
+ writer.MergeFactor = 5;
// merge policy only fixes segments on levels where merges
// have been triggered, so check invariants after all adds
@@ -147,7 +147,7 @@ namespace Lucene.Net.Index
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
writer.SetMaxBufferedDocs(101);
- writer.SetMergeFactor(101);
+ writer.MergeFactor = 101;
writer.SetMergePolicy(new LogDocMergePolicy(writer));
// leftmost* segment has 1 doc
@@ -163,12 +163,12 @@ namespace Lucene.Net.Index
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.UNLIMITED);
writer.SetMaxBufferedDocs(101);
- writer.SetMergeFactor(101);
+ writer.MergeFactor = 101;
writer.SetMergePolicy(new LogDocMergePolicy(writer));
}
writer.SetMaxBufferedDocs(10);
- writer.SetMergeFactor(10);
+ writer.MergeFactor = 10;
// merge policy only fixes segments on levels where merges
// have been triggered, so check invariants after all adds
@@ -199,7 +199,7 @@ namespace Lucene.Net.Index
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
writer.SetMergePolicy(new LogDocMergePolicy(writer));
writer.SetMaxBufferedDocs(10);
- writer.SetMergeFactor(100);
+ writer.MergeFactor = 100;
for (int i = 0; i < 250; i++)
{
@@ -215,7 +215,7 @@ namespace Lucene.Net.Index
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.UNLIMITED);
writer.SetMergePolicy(new LogDocMergePolicy(writer));
writer.SetMaxBufferedDocs(10);
- writer.SetMergeFactor(5);
+ writer.MergeFactor = 5;
// merge factor is changed, so check invariants after all adds
for (int i = 0; i < 10; i++)
@@ -226,7 +226,7 @@ namespace Lucene.Net.Index
((ConcurrentMergeScheduler)writer.MergeScheduler).Sync();
writer.Commit();
CheckInvariants(writer);
- Assert.AreEqual(10, writer.MaxDoc);
+ Assert.AreEqual(10, writer.MaxDoc());
writer.Close();
}
@@ -242,7 +242,7 @@ namespace Lucene.Net.Index
{
writer.WaitForMerges();
int maxBufferedDocs = writer.GetMaxBufferedDocs();
- int mergeFactor = writer.GetMergeFactor();
+ int mergeFactor = writer.MergeFactor;
int maxMergeDocs = writer.MaxMergeDocs;
int ramSegmentCount = writer.GetNumBufferedDocuments();
Modified: incubator/lucene.net/trunk/test/core/Index/TestIndexWriterMerging.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestIndexWriterMerging.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestIndexWriterMerging.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestIndexWriterMerging.cs Wed Mar 21 06:04:26 2012
@@ -63,7 +63,7 @@ namespace Lucene.Net.Index
Directory merged = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(merged, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
- writer.SetMergeFactor(2);
+ writer.MergeFactor = 2;
writer.AddIndexesNoOptimize(new []{indexA, indexB});
writer.Optimize();
@@ -100,7 +100,7 @@ namespace Lucene.Net.Index
{
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
- writer.SetMergeFactor(2);
+ writer.MergeFactor = 2;
writer.SetMaxBufferedDocs(2);
for (int i = start; i < (start + numDocs); i++)
Modified: incubator/lucene.net/trunk/test/core/Index/TestIndexWriterReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestIndexWriterReader.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestIndexWriterReader.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestIndexWriterReader.cs Wed Mar 21 06:04:26 2012
@@ -198,7 +198,7 @@ namespace Lucene.Net.Index
// get a reader
IndexReader r1 = writer.GetReader();
- Assert.IsTrue(r1.IsCurrent);
+ Assert.IsTrue(r1.IsCurrent());
System.String id10 = r1.Document(10).GetField("id").StringValue;
@@ -206,20 +206,20 @@ namespace Lucene.Net.Index
newDoc.RemoveField("id");
newDoc.Add(new Field("id", System.Convert.ToString(8000), Field.Store.YES, Field.Index.NOT_ANALYZED));
writer.UpdateDocument(new Term("id", id10), newDoc);
- Assert.IsFalse(r1.IsCurrent);
+ Assert.IsFalse(r1.IsCurrent());
IndexReader r2 = writer.GetReader();
- Assert.IsTrue(r2.IsCurrent);
+ Assert.IsTrue(r2.IsCurrent());
Assert.AreEqual(0, Count(new Term("id", id10), r2));
Assert.AreEqual(1, Count(new Term("id", System.Convert.ToString(8000)), r2));
r1.Close();
writer.Close();
- Assert.IsTrue(r2.IsCurrent);
+ Assert.IsTrue(r2.IsCurrent());
IndexReader r3 = IndexReader.Open(dir1, true);
- Assert.IsTrue(r3.IsCurrent);
- Assert.IsTrue(r2.IsCurrent);
+ Assert.IsTrue(r3.IsCurrent());
+ Assert.IsTrue(r2.IsCurrent());
Assert.AreEqual(0, Count(new Term("id", id10), r3));
Assert.AreEqual(1, Count(new Term("id", System.Convert.ToString(8000)), r3));
@@ -227,13 +227,13 @@ namespace Lucene.Net.Index
Document doc = new Document();
doc.Add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
writer.AddDocument(doc);
- Assert.IsTrue(r2.IsCurrent);
- Assert.IsTrue(r3.IsCurrent);
+ Assert.IsTrue(r2.IsCurrent());
+ Assert.IsTrue(r3.IsCurrent());
writer.Close();
- Assert.IsFalse(r2.IsCurrent);
- Assert.IsTrue(!r3.IsCurrent);
+ Assert.IsFalse(r2.IsCurrent());
+ Assert.IsTrue(!r3.IsCurrent());
r2.Close();
r3.Close();
@@ -265,16 +265,16 @@ namespace Lucene.Net.Index
writer2.Close();
IndexReader r0 = writer.GetReader();
- Assert.IsTrue(r0.IsCurrent);
+ Assert.IsTrue(r0.IsCurrent());
writer.AddIndexesNoOptimize(new Directory[]{dir2});
- Assert.IsFalse(r0.IsCurrent);
+ Assert.IsFalse(r0.IsCurrent());
r0.Close();
IndexReader r1 = writer.GetReader();
- Assert.IsTrue(r1.IsCurrent);
+ Assert.IsTrue(r1.IsCurrent());
writer.Commit();
- Assert.IsFalse(r1.IsCurrent);
+ Assert.IsFalse(r1.IsCurrent());
Assert.AreEqual(200, r1.MaxDoc);
@@ -404,7 +404,7 @@ namespace Lucene.Net.Index
_TestUtil.CheckIndex(mainDir);
IndexReader reader = IndexReader.Open(mainDir, true);
- Assert.AreEqual(addDirThreads.count.IntValue(), reader.NumDocs);
+ Assert.AreEqual(addDirThreads.count.IntValue(), reader.GetNumDocs());
//Assert.AreEqual(100 + numDirs * (3 * numIter / 4) * addDirThreads.NUM_THREADS
// * addDirThreads.NUM_INIT_DOCS, reader.numDocs());
reader.Close();
@@ -851,8 +851,8 @@ namespace Lucene.Net.Index
// Enroll warmer
MyWarmer warmer = new MyWarmer();
- writer.SetMergedSegmentWarmer(warmer);
- writer.SetMergeFactor(2);
+ writer.MergedSegmentWarmer = warmer;
+ writer.MergeFactor = 2;
writer.SetMaxBufferedDocs(2);
for (int i = 0; i < 100; i++)
@@ -888,7 +888,7 @@ namespace Lucene.Net.Index
_TestUtil.CheckIndex(dir1);
writer.Commit();
_TestUtil.CheckIndex(dir1);
- Assert.AreEqual(100, r1.NumDocs);
+ Assert.AreEqual(100, r1.GetNumDocs());
for (int i = 0; i < 10; i++)
{
@@ -902,7 +902,7 @@ namespace Lucene.Net.Index
r1.Close();
r1 = r2;
}
- Assert.AreEqual(110, r1.NumDocs);
+ Assert.AreEqual(110, r1.GetNumDocs());
writer.Close();
r1.Close();
dir1.Close();
@@ -925,7 +925,7 @@ namespace Lucene.Net.Index
_TestUtil.CheckIndex(dir1);
// reader should remain usable even after IndexWriter is closed:
- Assert.AreEqual(100, r.NumDocs);
+ Assert.AreEqual(100, r.GetNumDocs());
Query q = new TermQuery(new Term("indexname", "test"));
Assert.AreEqual(100, new IndexSearcher(r).Search(q, 10).TotalHits);
@@ -949,7 +949,7 @@ namespace Lucene.Net.Index
MockRAMDirectory dir1 = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
writer.SetInfoStream(infoStream);
- writer.SetMergeFactor(2);
+ writer.MergeFactor = 2;
// create the index
CreateIndexNoClose(false, "test", writer);
@@ -1028,7 +1028,7 @@ namespace Lucene.Net.Index
MockRAMDirectory dir1 = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
writer.SetInfoStream(infoStream);
- writer.SetMergeFactor(2);
+ writer.MergeFactor = 2;
// create the index
CreateIndexNoClose(false, "test", writer);
@@ -1090,7 +1090,7 @@ namespace Lucene.Net.Index
Directory dir1 = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
writer.SetInfoStream(infoStream);
- writer.SetMergeFactor(2);
+ writer.MergeFactor = 2;
// create the index
CreateIndexNoClose(false, "test", writer);
@@ -1159,7 +1159,7 @@ namespace Lucene.Net.Index
w.Close();
r.Close();
r = IndexReader.Open(dir, true);
- Assert.AreEqual(1, r.NumDocs);
+ Assert.AreEqual(1, r.GetNumDocs());
Assert.IsFalse(r.HasDeletions);
r.Close();
dir.Close();
@@ -1180,17 +1180,17 @@ namespace Lucene.Net.Index
id.SetValue("1");
w.AddDocument(doc);
IndexReader r = w.GetReader();
- Assert.AreEqual(2, r.NumDocs);
+ Assert.AreEqual(2, r.GetNumDocs());
r.Close();
w.DeleteDocuments(new Term("id", "0"));
r = w.GetReader();
- Assert.AreEqual(1, r.NumDocs);
+ Assert.AreEqual(1, r.GetNumDocs());
r.Close();
w.DeleteDocuments(new Term("id", "1"));
r = w.GetReader();
- Assert.AreEqual(0, r.NumDocs);
+ Assert.AreEqual(0, r.GetNumDocs());
r.Close();
w.Close();
@@ -1214,7 +1214,7 @@ namespace Lucene.Net.Index
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
w.SetMaxBufferedDocs(2);
w.GetReader().Close();
- w.SetMergedSegmentWarmer(new AnonymousIndexReaderWarmer());
+ w.MergedSegmentWarmer = new AnonymousIndexReaderWarmer();
Document doc = new Document();
doc.Add(new Field("foo", "bar", Field.Store.YES, Field.Index.NOT_ANALYZED));
Modified: incubator/lucene.net/trunk/test/core/Index/TestIsCurrent.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestIsCurrent.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestIsCurrent.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestIsCurrent.cs Wed Mar 21 06:04:26 2012
@@ -75,7 +75,7 @@ namespace Lucene.Net.Index
// assert index has a document and reader is up2date
Assert.AreEqual(1, writer.NumDocs(), "One document should be in the index");
- Assert.IsTrue(reader.IsCurrent, "Document added, reader should be stale ");
+ Assert.IsTrue(reader.IsCurrent(), "Document added, reader should be stale ");
// remove document
Term idTerm = new Term("UUID", "1");
@@ -84,7 +84,7 @@ namespace Lucene.Net.Index
// assert document has been deleted (index changed), reader is stale
Assert.AreEqual(0, writer.NumDocs(), "Document should be removed");
- Assert.IsFalse(reader.IsCurrent, "Reader should be stale");
+ Assert.IsFalse(reader.IsCurrent(), "Reader should be stale");
reader.Close();
}
@@ -103,7 +103,7 @@ namespace Lucene.Net.Index
// assert index has a document and reader is up2date
Assert.AreEqual(1, writer.NumDocs(), "One document should be in the index");
- Assert.IsTrue(reader.IsCurrent, "Document added, reader should be stale ");
+ Assert.IsTrue(reader.IsCurrent(), "Document added, reader should be stale ");
// remove all documents
writer.DeleteAll();
@@ -111,7 +111,7 @@ namespace Lucene.Net.Index
// assert document has been deleted (index changed), reader is stale
Assert.AreEqual(0, writer.NumDocs(), "Document should be removed");
- Assert.IsFalse(reader.IsCurrent, "Reader should be stale");
+ Assert.IsFalse(reader.IsCurrent(), "Reader should be stale");
reader.Close();
}
Modified: incubator/lucene.net/trunk/test/core/Index/TestNRTReaderWithThreads.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestNRTReaderWithThreads.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestNRTReaderWithThreads.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestNRTReaderWithThreads.cs Wed Mar 21 06:04:26 2012
@@ -43,7 +43,7 @@ namespace Lucene.Net.Index
writer.UseCompoundFile = false;
IndexReader reader = writer.GetReader(); // start pooling readers
reader.Close();
- writer.SetMergeFactor(2);
+ writer.MergeFactor = 2;
writer.SetMaxBufferedDocs(10);
RunThread[] indexThreads = new RunThread[4];
for (int x = 0; x < indexThreads.Length; x++)
Modified: incubator/lucene.net/trunk/test/core/Index/TestNorms.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestNorms.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestNorms.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestNorms.cs Wed Mar 21 06:04:26 2012
@@ -138,7 +138,7 @@ namespace Lucene.Net.Index
CreateIndex(dir3);
IndexWriter iw = new IndexWriter(dir3, anlzr, false, IndexWriter.MaxFieldLength.LIMITED);
iw.SetMaxBufferedDocs(5);
- iw.SetMergeFactor(3);
+ iw.MergeFactor = 3;
iw.AddIndexesNoOptimize(new Directory[]{dir1, dir2});
iw.Optimize();
iw.Close();
@@ -156,7 +156,7 @@ namespace Lucene.Net.Index
// now with optimize
iw = new IndexWriter(dir3, anlzr, false, IndexWriter.MaxFieldLength.LIMITED);
iw.SetMaxBufferedDocs(5);
- iw.SetMergeFactor(3);
+ iw.MergeFactor = 3;
iw.Optimize();
iw.Close();
VerifyIndex(dir3);
@@ -185,7 +185,7 @@ namespace Lucene.Net.Index
{
IndexWriter iw = new IndexWriter(dir, anlzr, true, IndexWriter.MaxFieldLength.LIMITED);
iw.SetMaxBufferedDocs(5);
- iw.SetMergeFactor(3);
+ iw.MergeFactor = 3;
iw.SetSimilarity(similarityOne);
iw.UseCompoundFile = true;
iw.Close();
@@ -234,7 +234,7 @@ namespace Lucene.Net.Index
{
IndexWriter iw = new IndexWriter(dir, anlzr, false, IndexWriter.MaxFieldLength.LIMITED);
iw.SetMaxBufferedDocs(5);
- iw.SetMergeFactor(3);
+ iw.MergeFactor = 3;
iw.SetSimilarity(similarityOne);
iw.UseCompoundFile = compound;
for (int i = 0; i < ndocs; i++)
Modified: incubator/lucene.net/trunk/test/core/Index/TestOmitTf.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestOmitTf.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestOmitTf.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestOmitTf.cs Wed Mar 21 06:04:26 2012
@@ -306,7 +306,7 @@ namespace Lucene.Net.Index
Analyzer analyzer = new StandardAnalyzer(Util.Version.LUCENE_CURRENT);
IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
writer.SetMaxBufferedDocs(3);
- writer.SetMergeFactor(2);
+ writer.MergeFactor = 2;
Document d = new Document();
// this field will have Tf
@@ -361,7 +361,7 @@ namespace Lucene.Net.Index
Analyzer analyzer = new StandardAnalyzer(Util.Version.LUCENE_CURRENT);
IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
writer.SetMaxBufferedDocs(10);
- writer.SetMergeFactor(2);
+ writer.MergeFactor = 2;
Document d = new Document();
// this field will have Tf
@@ -412,7 +412,7 @@ namespace Lucene.Net.Index
Analyzer analyzer = new StandardAnalyzer(Util.Version.LUCENE_CURRENT);
IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
writer.SetMaxBufferedDocs(3);
- writer.SetMergeFactor(2);
+ writer.MergeFactor = 2;
writer.UseCompoundFile = false;
Document d = new Document();
@@ -444,7 +444,7 @@ namespace Lucene.Net.Index
Directory dir = new MockRAMDirectory();
Analyzer analyzer = new StandardAnalyzer(Util.Version.LUCENE_CURRENT);
IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
- writer.SetMergeFactor(2);
+ writer.MergeFactor = 2;
writer.SetMaxBufferedDocs(2);
writer.SetSimilarity(new SimpleSimilarity());
@@ -550,10 +550,11 @@ namespace Lucene.Net.Index
{
this.docBase = docBase;
}
- public override bool AcceptsDocsOutOfOrder()
- {
- return true;
- }
+
+ public override bool AcceptsDocsOutOfOrder
+ {
+ get { return true; }
+ }
}
}
}
\ No newline at end of file
Modified: incubator/lucene.net/trunk/test/core/Index/TestParallelReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestParallelReader.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestParallelReader.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestParallelReader.cs Wed Mar 21 06:04:26 2012
@@ -146,21 +146,21 @@ namespace Lucene.Net.Index
pr.Add(IndexReader.Open(dir1, false));
pr.Add(IndexReader.Open(dir2, false));
- Assert.IsTrue(pr.IsCurrent);
+ Assert.IsTrue(pr.IsCurrent());
IndexReader modifier = IndexReader.Open(dir1, false);
modifier.SetNorm(0, "f1", 100);
modifier.Close();
// one of the two IndexReaders which ParallelReader is using
// is not current anymore
- Assert.IsFalse(pr.IsCurrent);
+ Assert.IsFalse(pr.IsCurrent());
modifier = IndexReader.Open(dir2, false);
modifier.SetNorm(0, "f3", 100);
modifier.Close();
// now both are not current anymore
- Assert.IsFalse(pr.IsCurrent);
+ Assert.IsFalse(pr.IsCurrent());
}
[Test]
@@ -186,7 +186,7 @@ namespace Lucene.Net.Index
ParallelReader pr = new ParallelReader();
pr.Add(IndexReader.Open(dir1, false));
pr.Add(IndexReader.Open(dir2, false));
- Assert.IsFalse(pr.IsOptimized);
+ Assert.IsFalse(pr.IsOptimized());
pr.Close();
modifier = new IndexWriter(dir1, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
@@ -197,7 +197,7 @@ namespace Lucene.Net.Index
pr.Add(IndexReader.Open(dir1, false));
pr.Add(IndexReader.Open(dir2, false));
// just one of the two indexes are optimized
- Assert.IsFalse(pr.IsOptimized);
+ Assert.IsFalse(pr.IsOptimized());
pr.Close();
@@ -209,7 +209,7 @@ namespace Lucene.Net.Index
pr.Add(IndexReader.Open(dir1, false));
pr.Add(IndexReader.Open(dir2, false));
// now both indexes are optimized
- Assert.IsTrue(pr.IsOptimized);
+ Assert.IsTrue(pr.IsOptimized());
pr.Close();
}
Modified: incubator/lucene.net/trunk/test/core/Index/TestRollback.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestRollback.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestRollback.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestRollback.cs Wed Mar 21 06:04:26 2012
@@ -61,7 +61,7 @@ namespace Lucene.Net.Index
w.Rollback();
IndexReader r = IndexReader.Open(dir, true);
- Assert.AreEqual(5, r.NumDocs, "index should contain same number of docs post rollback");
+ Assert.AreEqual(5, r.GetNumDocs(), "index should contain same number of docs post rollback");
r.Close();
dir.Close();
}
Modified: incubator/lucene.net/trunk/test/core/Index/TestSegmentMerger.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestSegmentMerger.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestSegmentMerger.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestSegmentMerger.cs Wed Mar 21 06:04:26 2012
@@ -95,7 +95,7 @@ namespace Lucene.Net.Index
//Should be able to open a new SegmentReader against the new directory
SegmentReader mergedReader = SegmentReader.Get(true, new SegmentInfo(mergedSegment, docsMerged, mergedDir, false, true), IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
Assert.IsTrue(mergedReader != null);
- Assert.IsTrue(mergedReader.NumDocs == 2);
+ Assert.IsTrue(mergedReader.GetNumDocs() == 2);
Document newDoc1 = mergedReader.Document(0);
Assert.IsTrue(newDoc1 != null);
//There are 2 unstored fields on the document
@@ -113,7 +113,7 @@ namespace Lucene.Net.Index
//System.out.println("stored size: " + stored.size());
Assert.IsTrue(stored.Count == 3, "We do not have 3 fields that were indexed with term vector");
- TermFreqVector vector = mergedReader.GetTermFreqVector(0, DocHelper.TEXT_FIELD_2_KEY);
+ ITermFreqVector vector = mergedReader.GetTermFreqVector(0, DocHelper.TEXT_FIELD_2_KEY);
Assert.IsTrue(vector != null);
System.String[] terms = vector.GetTerms();
Assert.IsTrue(terms != null);
Modified: incubator/lucene.net/trunk/test/core/Index/TestSegmentReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestSegmentReader.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestSegmentReader.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestSegmentReader.cs Wed Mar 21 06:04:26 2012
@@ -71,7 +71,7 @@ namespace Lucene.Net.Index
[Test]
public virtual void TestDocument()
{
- Assert.IsTrue(reader.NumDocs == 1);
+ Assert.IsTrue(reader.GetNumDocs() == 1);
Assert.IsTrue(reader.MaxDoc >= 1);
Document result = reader.Document(0);
Assert.IsTrue(result != null);
@@ -94,11 +94,11 @@ namespace Lucene.Net.Index
SegmentInfo info = DocHelper.WriteDoc(dir, docToDelete);
SegmentReader deleteReader = SegmentReader.Get(false, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
Assert.IsTrue(deleteReader != null);
- Assert.IsTrue(deleteReader.NumDocs == 1);
+ Assert.IsTrue(deleteReader.GetNumDocs() == 1);
deleteReader.DeleteDocument(0);
Assert.IsTrue(deleteReader.IsDeleted(0) == true);
Assert.IsTrue(deleteReader.HasDeletions == true);
- Assert.IsTrue(deleteReader.NumDocs == 0);
+ Assert.IsTrue(deleteReader.GetNumDocs() == 0);
}
[Test]
@@ -212,7 +212,7 @@ namespace Lucene.Net.Index
[Test]
public virtual void TestTermVectors()
{
- TermFreqVector result = reader.GetTermFreqVector(0, DocHelper.TEXT_FIELD_2_KEY);
+ ITermFreqVector result = reader.GetTermFreqVector(0, DocHelper.TEXT_FIELD_2_KEY);
Assert.IsTrue(result != null);
System.String[] terms = result.GetTerms();
int[] freqs = result.GetTermFrequencies();
@@ -225,7 +225,7 @@ namespace Lucene.Net.Index
Assert.IsTrue(freq > 0);
}
- TermFreqVector[] results = reader.GetTermFreqVectors(0);
+ ITermFreqVector[] results = reader.GetTermFreqVectors(0);
Assert.IsTrue(results != null);
Assert.IsTrue(results.Length == 3, "We do not have 3 term freq vectors, we have: " + results.Length);
}
Modified: incubator/lucene.net/trunk/test/core/Index/TestStressIndexing2.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestStressIndexing2.cs?rev=1303294&r1=1303293&r2=1303294&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestStressIndexing2.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestStressIndexing2.cs Wed Mar 21 06:04:26 2012
@@ -172,7 +172,7 @@ namespace Lucene.Net.Index
***/
// force many merges
- w.SetMergeFactor(mergeFactor);
+ w.MergeFactor = mergeFactor;
w.SetRAMBufferSizeMB(.1);
w.SetMaxBufferedDocs(maxBufferedDocs);
@@ -224,7 +224,7 @@ namespace Lucene.Net.Index
w.UseCompoundFile = false;
// force many merges
- w.SetMergeFactor(mergeFactor);
+ w.MergeFactor = mergeFactor;
w.SetRAMBufferSizeMB(.1);
w.SetMaxBufferedDocs(maxBufferedDocs);
@@ -287,7 +287,7 @@ namespace Lucene.Net.Index
//SupportClass.CollectionsHelper.Sort(fields, fieldNameComparator);
Document d1 = new Document();
- d1.SetBoost(d.Boost);
+ d1.Boost = d.Boost;
for (int i = 0; i < fields.Count; i++)
{
d1.Add((IFieldable) fields[i]);
@@ -318,8 +318,8 @@ namespace Lucene.Net.Index
public static void VerifyEquals(IndexReader r1, IndexReader r2, System.String idField)
{
- Assert.AreEqual(r1.NumDocs, r2.NumDocs);
- bool hasDeletes = !(r1.MaxDoc == r2.MaxDoc && r1.NumDocs == r1.MaxDoc);
+ Assert.AreEqual(r1.GetNumDocs(), r2.GetNumDocs());
+ bool hasDeletes = !(r1.MaxDoc == r2.MaxDoc && r1.GetNumDocs() == r1.MaxDoc);
int[] r2r1 = new int[r2.MaxDoc]; // r2 id to r1 id mapping
@@ -375,7 +375,7 @@ namespace Lucene.Net.Index
catch (System.Exception e)
{
System.Console.Out.WriteLine("FAILED id=" + term + " id1=" + id1 + " id2=" + id2);
- TermFreqVector[] tv1 = r1.GetTermFreqVectors(id1);
+ ITermFreqVector[] tv1 = r1.GetTermFreqVectors(id1);
System.Console.Out.WriteLine(" d1=" + tv1);
if (tv1 != null)
for (int i = 0; i < tv1.Length; i++)
@@ -383,7 +383,7 @@ namespace Lucene.Net.Index
System.Console.Out.WriteLine(" " + i + ": " + tv1[i]);
}
- TermFreqVector[] tv2 = r2.GetTermFreqVectors(id2);
+ ITermFreqVector[] tv2 = r2.GetTermFreqVectors(id2);
System.Console.Out.WriteLine(" d2=" + tv2);
if (tv2 != null)
for (int i = 0; i < tv2.Length; i++)
@@ -403,8 +403,8 @@ namespace Lucene.Net.Index
TermEnum termEnum2 = r2.Terms(new Term("", ""));
// pack both doc and freq into single element for easy sorting
- long[] info1 = new long[r1.NumDocs];
- long[] info2 = new long[r2.NumDocs];
+ long[] info1 = new long[r1.GetNumDocs()];
+ long[] info2 = new long[r2.GetNumDocs()];
for (; ; )
{
@@ -517,7 +517,7 @@ namespace Lucene.Net.Index
}
}
- public static void VerifyEquals(TermFreqVector[] d1, TermFreqVector[] d2)
+ public static void VerifyEquals(ITermFreqVector[] d1, ITermFreqVector[] d2)
{
if (d1 == null)
{
@@ -529,14 +529,14 @@ namespace Lucene.Net.Index
Assert.AreEqual(d1.Length, d2.Length);
for (int i = 0; i < d1.Length; i++)
{
- TermFreqVector v1 = d1[i];
- TermFreqVector v2 = d2[i];
+ ITermFreqVector v1 = d1[i];
+ ITermFreqVector v2 = d2[i];
if (v1 == null || v2 == null)
{
System.Console.Out.WriteLine("v1=" + v1 + " v2=" + v2 + " i=" + i + " of " + d1.Length);
}
- Assert.AreEqual(v1.Size(), v2.Size());
- int numTerms = v1.Size();
+ Assert.AreEqual(v1.Size, v2.Size);
+ int numTerms = v1.Size;
System.String[] terms1 = v1.GetTerms();
System.String[] terms2 = v2.GetTerms();
int[] freq1 = v1.GetTermFrequencies();