You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by si...@apache.org on 2011/02/09 10:36:03 UTC
svn commit: r1068809 [16/36] - in /lucene/dev/branches/docvalues: ./
dev-tools/eclipse/ dev-tools/idea/.idea/ dev-tools/idea/.idea/copyright/
dev-tools/idea/lucene/ dev-tools/idea/lucene/contrib/ant/
dev-tools/idea/lucene/contrib/queryparser/ dev-tools...
Modified: lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java Wed Feb 9 09:35:27 2011
@@ -32,6 +32,7 @@ import org.apache.lucene.index.IndexWrit
import org.apache.lucene.index.SegmentReader.Norm;
import org.apache.lucene.search.DefaultSimilarity;
import org.apache.lucene.search.Similarity;
+import org.apache.lucene.search.SimilarityProvider;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase;
@@ -42,14 +43,15 @@ public class TestIndexReaderCloneNorms e
private class SimilarityOne extends DefaultSimilarity {
@Override
- public float lengthNorm(String fieldName, int numTerms) {
- return 1;
+ public float computeNorm(String fieldName, FieldInvertState state) {
+ // diable length norm
+ return state.getBoost();
}
}
private static final int NUM_FIELDS = 10;
- private Similarity similarityOne;
+ private SimilarityProvider similarityOne;
private Analyzer anlzr;
@@ -202,19 +204,20 @@ public class TestIndexReaderCloneNorms e
IndexReader reader4C = (IndexReader) reader3C.clone();
SegmentReader segmentReader4C = getOnlySegmentReader(reader4C);
assertEquals(4, reader3CCNorm.bytesRef().get());
- reader4C.setNorm(5, "field1", 0.33f);
+ Similarity sim = new DefaultSimilarity().get("field1");
+ reader4C.setNorm(5, "field1", sim.encodeNormValue(0.33f));
// generate a cannot update exception in reader1
try {
- reader3C.setNorm(1, "field1", 0.99f);
+ reader3C.setNorm(1, "field1", sim.encodeNormValue(0.99f));
fail("did not hit expected exception");
} catch (Exception ex) {
// expected
}
// norm values should be different
- assertTrue(Similarity.getDefault().decodeNormValue(segmentReader3C.norms("field1")[5])
- != Similarity.getDefault().decodeNormValue(segmentReader4C.norms("field1")[5]));
+ assertTrue(sim.decodeNormValue(segmentReader3C.norms("field1")[5])
+ != sim.decodeNormValue(segmentReader4C.norms("field1")[5]));
Norm reader4CCNorm = segmentReader4C.norms.get("field1");
assertEquals(3, reader3CCNorm.bytesRef().get());
assertEquals(1, reader4CCNorm.bytesRef().get());
@@ -222,7 +225,7 @@ public class TestIndexReaderCloneNorms e
IndexReader reader5C = (IndexReader) reader4C.clone();
SegmentReader segmentReader5C = getOnlySegmentReader(reader5C);
Norm reader5CCNorm = segmentReader5C.norms.get("field1");
- reader5C.setNorm(5, "field1", 0.7f);
+ reader5C.setNorm(5, "field1", sim.encodeNormValue(0.7f));
assertEquals(1, reader5CCNorm.bytesRef().get());
reader5C.close();
@@ -236,7 +239,7 @@ public class TestIndexReaderCloneNorms e
private void createIndex(Random random, Directory dir) throws IOException {
IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, anlzr).setOpenMode(OpenMode.CREATE)
- .setMaxBufferedDocs(5).setSimilarity(similarityOne));
+ .setMaxBufferedDocs(5).setSimilarityProvider(similarityOne).setMergePolicy(newLogMergePolicy()));
LogMergePolicy lmp = (LogMergePolicy) iw.getConfig().getMergePolicy();
lmp.setMergeFactor(3);
lmp.setUseCompoundFile(true);
@@ -255,8 +258,9 @@ public class TestIndexReaderCloneNorms e
// System.out.println(" and: for "+k+" from "+newNorm+" to "+origNorm);
modifiedNorms.set(i, Float.valueOf(newNorm));
modifiedNorms.set(k, Float.valueOf(origNorm));
- ir.setNorm(i, "f" + 1, newNorm);
- ir.setNorm(k, "f" + 1, origNorm);
+ Similarity sim = new DefaultSimilarity().get("f" + 1);
+ ir.setNorm(i, "f" + 1, sim.encodeNormValue(newNorm));
+ ir.setNorm(k, "f" + 1, sim.encodeNormValue(origNorm));
// System.out.println("setNorm i: "+i);
// break;
}
@@ -276,7 +280,8 @@ public class TestIndexReaderCloneNorms e
assertEquals("number of norms mismatches", numDocNorms, b.length);
ArrayList<Float> storedNorms = (i == 1 ? modifiedNorms : norms);
for (int j = 0; j < b.length; j++) {
- float norm = Similarity.getDefault().decodeNormValue(b[j]);
+ Similarity sim = new DefaultSimilarity().get(field);
+ float norm = sim.decodeNormValue(b[j]);
float norm1 = storedNorms.get(j).floatValue();
assertEquals("stored norm value of " + field + " for doc " + j + " is "
+ norm + " - a mismatch!", norm, norm1, 0.000001);
@@ -288,7 +293,7 @@ public class TestIndexReaderCloneNorms e
throws IOException {
IndexWriterConfig conf = newIndexWriterConfig(
TEST_VERSION_CURRENT, anlzr).setOpenMode(OpenMode.APPEND)
- .setMaxBufferedDocs(5).setSimilarity(similarityOne);
+ .setMaxBufferedDocs(5).setSimilarityProvider(similarityOne).setMergePolicy(newLogMergePolicy());
LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
lmp.setMergeFactor(3);
lmp.setUseCompoundFile(compound);
@@ -302,7 +307,7 @@ public class TestIndexReaderCloneNorms e
// create the next document
private Document newDoc() {
Document d = new Document();
- float boost = nextNorm();
+ float boost = nextNorm("anyfield"); // in this test the same similarity is used for all fields so it does not matter what field is passed
for (int i = 0; i < 10; i++) {
Field f = newField("f" + i, "v" + i, Store.NO, Index.NOT_ANALYZED);
f.setBoost(boost);
@@ -312,11 +317,12 @@ public class TestIndexReaderCloneNorms e
}
// return unique norm values that are unchanged by encoding/decoding
- private float nextNorm() {
+ private float nextNorm(String fname) {
float norm = lastNorm + normDelta;
+ Similarity sim = new DefaultSimilarity().get(fname);
do {
- float norm1 = Similarity.getDefault().decodeNormValue(
- Similarity.getDefault().encodeNormValue(norm));
+ float norm1 = sim.decodeNormValue(
+ sim.encodeNormValue(norm));
if (norm1 > lastNorm) {
// System.out.println(norm1+" > "+lastNorm);
norm = norm1;
Modified: lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexReaderReopen.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexReaderReopen.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexReaderReopen.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexReaderReopen.java Wed Feb 9 09:35:27 2011
@@ -35,8 +35,11 @@ import org.apache.lucene.document.Field.
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
+import org.apache.lucene.search.DefaultSimilarity;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.ScoreDoc;
+import org.apache.lucene.search.Similarity;
+import org.apache.lucene.search.SimilarityProvider;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.Directory;
@@ -171,7 +174,7 @@ public class TestIndexReaderReopen exten
private void doTestReopenWithCommit (Random random, Directory dir, boolean withReopen) throws IOException {
IndexWriter iwriter = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(
- OpenMode.CREATE).setMergeScheduler(new SerialMergeScheduler()));
+ OpenMode.CREATE).setMergeScheduler(new SerialMergeScheduler()).setMergePolicy(newInOrderLogMergePolicy()));
iwriter.commit();
IndexReader reader = IndexReader.open(dir, false);
try {
@@ -614,8 +617,9 @@ public class TestIndexReaderReopen exten
IndexReader reader2 = reader1.reopen();
modifier = IndexReader.open(dir1, false);
- modifier.setNorm(1, "field1", 50);
- modifier.setNorm(1, "field2", 50);
+ SimilarityProvider sim = new DefaultSimilarity();
+ modifier.setNorm(1, "field1", sim.get("field1").encodeNormValue(50f));
+ modifier.setNorm(1, "field2", sim.get("field2").encodeNormValue(50f));
modifier.close();
IndexReader reader3 = reader2.reopen();
@@ -708,7 +712,8 @@ public class TestIndexReaderReopen exten
protected void modifyIndex(int i) throws IOException {
if (i % 3 == 0) {
IndexReader modifier = IndexReader.open(dir, false);
- modifier.setNorm(i, "field1", 50);
+ Similarity sim = new DefaultSimilarity().get("field1");
+ modifier.setNorm(i, "field1", sim.encodeNormValue(50f));
modifier.close();
} else if (i % 3 == 1) {
IndexReader modifier = IndexReader.open(dir, false);
@@ -768,14 +773,14 @@ public class TestIndexReaderReopen exten
// not synchronized
IndexReader refreshed = r.reopen();
- IndexSearcher searcher = new IndexSearcher(refreshed);
+ IndexSearcher searcher = newSearcher(refreshed);
ScoreDoc[] hits = searcher.search(
new TermQuery(new Term("field1", "a" + rnd.nextInt(refreshed.maxDoc()))),
null, 1000).scoreDocs;
if (hits.length > 0) {
searcher.doc(hits[0].doc);
}
-
+ searcher.close();
if (refreshed != r) {
refreshed.close();
}
@@ -976,7 +981,11 @@ public class TestIndexReaderReopen exten
static void modifyIndex(int i, Directory dir) throws IOException {
switch (i) {
case 0: {
+ if (VERBOSE) {
+ System.out.println("TEST: modify index");
+ }
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+ w.setInfoStream(VERBOSE ? System.out : null);
w.deleteDocuments(new Term("field2", "a11"));
w.deleteDocuments(new Term("field2", "b30"));
w.close();
@@ -984,9 +993,10 @@ public class TestIndexReaderReopen exten
}
case 1: {
IndexReader reader = IndexReader.open(dir, false);
- reader.setNorm(4, "field1", 123);
- reader.setNorm(44, "field2", 222);
- reader.setNorm(44, "field4", 22);
+ SimilarityProvider sim = new DefaultSimilarity();
+ reader.setNorm(4, "field1", sim.get("field1").encodeNormValue(123f));
+ reader.setNorm(44, "field2", sim.get("field2").encodeNormValue(222f));
+ reader.setNorm(44, "field4", sim.get("field4").encodeNormValue(22f));
reader.close();
break;
}
@@ -1007,8 +1017,9 @@ public class TestIndexReaderReopen exten
}
case 4: {
IndexReader reader = IndexReader.open(dir, false);
- reader.setNorm(5, "field1", 123);
- reader.setNorm(55, "field2", 222);
+ SimilarityProvider sim = new DefaultSimilarity();
+ reader.setNorm(5, "field1", sim.get("field1").encodeNormValue(123f));
+ reader.setNorm(55, "field2", sim.get("field2").encodeNormValue(222f));
reader.close();
break;
}
@@ -1200,7 +1211,6 @@ public class TestIndexReaderReopen exten
IndexReader r = IndexReader.open(dir, false);
assertEquals(0, r.numDocs());
- assertEquals(4, r.maxDoc());
Collection<IndexCommit> commits = IndexReader.listCommits(dir);
for (final IndexCommit commit : commits) {
Modified: lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexWriter.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexWriter.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexWriter.java Wed Feb 9 09:35:27 2011
@@ -43,7 +43,6 @@ import org.apache.lucene.analysis.tokena
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Fieldable;
-import org.apache.lucene.index.codecs.CodecProvider;
import org.apache.lucene.document.Field.Index;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.document.Field.TermVector;
@@ -102,19 +101,12 @@ public class TestIndexWriter extends Luc
}
reader.close();
- // test doc count before segments are merged/index is optimized
- writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
- assertEquals(100, writer.maxDoc());
- writer.close();
-
reader = IndexReader.open(dir, true);
- assertEquals(100, reader.maxDoc());
assertEquals(60, reader.numDocs());
reader.close();
// optimize the index and check that the new doc count is correct
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
- assertEquals(100, writer.maxDoc());
assertEquals(60, writer.numDocs());
writer.optimize();
assertEquals(60, writer.maxDoc());
@@ -157,7 +149,7 @@ public class TestIndexWriter extends Luc
String[] startFiles = dir.listAll();
SegmentInfos infos = new SegmentInfos();
infos.read(dir);
- new IndexFileDeleter(dir, new KeepOnlyLastCommitDeletionPolicy(), infos, null, CodecProvider.getDefault());
+ new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())).rollback();
String[] endFiles = dir.listAll();
Arrays.sort(startFiles);
@@ -261,7 +253,7 @@ public class TestIndexWriter extends Luc
public void testOptimizeTempSpaceUsage() throws IOException {
MockDirectoryWrapper dir = newDirectory();
- IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(10));
+ IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(10).setMergePolicy(newLogMergePolicy()));
if (VERBOSE) {
System.out.println("TEST: config1=" + writer.getConfig());
}
@@ -785,7 +777,7 @@ public class TestIndexWriter extends Luc
public void testHighFreqTerm() throws IOException {
MockDirectoryWrapper dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
- TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxFieldLength(100000000).setRAMBufferSizeMB(0.01));
+ TEST_VERSION_CURRENT, new MockAnalyzer()).setRAMBufferSizeMB(0.01));
// Massive doc that has 128 K a's
StringBuilder b = new StringBuilder(1024*1024);
for(int i=0;i<4096;i++) {
@@ -969,7 +961,7 @@ public class TestIndexWriter extends Luc
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer())
- .setMaxBufferedDocs(2);
+ .setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy());
((LogMergePolicy) conf.getMergePolicy()).setMergeFactor(2);
IndexWriter iw = new IndexWriter(dir, conf);
Document document = new Document();
@@ -1011,7 +1003,7 @@ public class TestIndexWriter extends Luc
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer())
- .setMergeScheduler(new MyMergeScheduler()).setMaxBufferedDocs(2);
+ .setMergeScheduler(new MyMergeScheduler()).setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy());
LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
lmp.setMaxMergeDocs(20);
lmp.setMergeFactor(2);
@@ -1034,11 +1026,11 @@ public class TestIndexWriter extends Luc
if (VERBOSE) {
System.out.println("TEST: iter=" + i);
}
- IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2));
+ IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy()));
writer.setInfoStream(VERBOSE ? System.out : null);
- LogMergePolicy lmp = (LogMergePolicy) writer.getConfig().getMergePolicy();
- lmp.setMergeFactor(2);
- lmp.setUseCompoundFile(false);
+ //LogMergePolicy lmp = (LogMergePolicy) writer.getConfig().getMergePolicy();
+ //lmp.setMergeFactor(2);
+ //lmp.setUseCompoundFile(false);
Document doc = new Document();
String contents = "aa bb cc dd ee ff gg hh ii jj kk";
@@ -1072,8 +1064,8 @@ public class TestIndexWriter extends Luc
if (0 == i % 4) {
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
- LogMergePolicy lmp2 = (LogMergePolicy) writer.getConfig().getMergePolicy();
- lmp2.setUseCompoundFile(false);
+ //LogMergePolicy lmp2 = (LogMergePolicy) writer.getConfig().getMergePolicy();
+ //lmp2.setUseCompoundFile(false);
writer.optimize();
writer.close();
}
@@ -1098,10 +1090,7 @@ public class TestIndexWriter extends Luc
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
setOpenMode(OpenMode.CREATE).
setMaxBufferedDocs(2).
- // have to use compound file to prevent running out of
- // descripters when newDirectory returns a file-system
- // backed directory:
- setMergePolicy(newLogMergePolicy(false, 10))
+ setMergePolicy(newLogMergePolicy())
);
writer.setInfoStream(VERBOSE ? System.out : null);
@@ -1167,7 +1156,7 @@ public class TestIndexWriter extends Luc
reader.close();
// Reopen
- writer = new IndexWriter(directory, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
+ writer = new IndexWriter(directory, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
writer.setInfoStream(VERBOSE ? System.out : null);
}
writer.close();
@@ -1237,30 +1226,7 @@ public class TestIndexWriter extends Luc
writer.close();
dir.close();
}
-
- // LUCENE-1084: test user-specified field length
- public void testUserSpecifiedMaxFieldLength() throws IOException {
- Directory dir = newDirectory();
-
- IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
- TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxFieldLength(100000));
-
- Document doc = new Document();
- StringBuilder b = new StringBuilder();
- for(int i=0;i<10000;i++)
- b.append(" a");
- b.append(" x");
- doc.add(newField("field", b.toString(), Field.Store.NO, Field.Index.ANALYZED));
- writer.addDocument(doc);
- writer.close();
-
- IndexReader reader = IndexReader.open(dir, true);
- Term t = new Term("field", "x");
- assertEquals(1, reader.docFreq(t));
- reader.close();
- dir.close();
- }
-
+
// LUCENE-325: test expungeDeletes, when 2 singular merges
// are required
public void testExpungeDeletes() throws IOException {
@@ -1268,8 +1234,8 @@ public class TestIndexWriter extends Luc
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer())
.setMaxBufferedDocs(2).setRAMBufferSizeMB(
- IndexWriterConfig.DISABLE_AUTO_FLUSH));
-
+ IndexWriterConfig.DISABLE_AUTO_FLUSH));
+ writer.setInfoStream(VERBOSE ? System.out : null);
Document document = new Document();
document = new Document();
@@ -1292,7 +1258,7 @@ public class TestIndexWriter extends Luc
assertEquals(8, ir.numDocs());
ir.close();
- writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
+ writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy()));
assertEquals(8, writer.numDocs());
assertEquals(10, writer.maxDoc());
writer.expungeDeletes();
@@ -1455,7 +1421,6 @@ public class TestIndexWriter extends Luc
w.close();
IndexReader ir = IndexReader.open(dir, true);
- assertEquals(1, ir.maxDoc());
assertEquals(0, ir.numDocs());
ir.close();
@@ -2034,7 +1999,6 @@ public class TestIndexWriter extends Luc
}
IndexWriterConfig conf = newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2);
- ((LogMergePolicy) conf.getMergePolicy()).setMergeFactor(2);
w = new IndexWriter(dir, conf);
Document doc = new Document();
@@ -2253,8 +2217,6 @@ public class TestIndexWriter extends Luc
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer()));
- LogMergePolicy lmp = (LogMergePolicy) writer.getConfig().getMergePolicy();
- lmp.setUseCompoundFile(false);
ByteArrayOutputStream bos = new ByteArrayOutputStream(1024);
writer.setInfoStream(new PrintStream(bos));
writer.addDocument(new Document());
@@ -2271,7 +2233,8 @@ public class TestIndexWriter extends Luc
final int NUM_THREADS = 5;
final double RUN_SEC = 0.5;
final Directory dir = newDirectory();
- final RandomIndexWriter w = new RandomIndexWriter(random, dir);
+ final RandomIndexWriter w = new RandomIndexWriter(random, dir, newIndexWriterConfig(
+ TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy()));
_TestUtil.reduceOpenFiles(w.w);
w.commit();
final AtomicBoolean failed = new AtomicBoolean();
@@ -2613,7 +2576,7 @@ public class TestIndexWriter extends Luc
count++;
}
}
- assertTrue("flush happened too quickly during " + (doIndexing ? "indexing" : "deleting") + " count=" + count, count > 2500);
+ assertTrue("flush happened too quickly during " + (doIndexing ? "indexing" : "deleting") + " count=" + count, count > 1500);
}
w.close();
dir.close();
@@ -2653,9 +2616,11 @@ public class TestIndexWriter extends Luc
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer())
- .setMaxBufferedDocs(2));
+ .setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy()));
String[] files = dir.listAll();
+ writer.setInfoStream(VERBOSE ? System.out : null);
+
// Creating over empty dir should not create any files,
// or, at most the write.lock file
final int extraFileCount;
@@ -2677,9 +2642,10 @@ public class TestIndexWriter extends Luc
doc = new Document();
doc.add(newField("c", "val", Store.YES, Index.ANALYZED, TermVector.WITH_POSITIONS_OFFSETS));
writer.addDocument(doc);
+
// The second document should cause a flush.
- assertTrue("flush should have occurred and files created", dir.listAll().length > 5 + extraFileCount);
-
+ assertTrue("flush should have occurred and files should have been created", dir.listAll().length > 5 + extraFileCount);
+
// After rollback, IW should remove all files
writer.rollback();
assertEquals("no files should exist in the directory after rollback", 0, dir.listAll().length);
@@ -2851,7 +2817,7 @@ public class TestIndexWriter extends Luc
for(int x=0;x<2;x++) {
IndexReader r = w.getReader();
- IndexSearcher s = new IndexSearcher(r);
+ IndexSearcher s = newSearcher(r);
if (VERBOSE) {
System.out.println("TEST: cycle x=" + x + " r=" + r);
@@ -2867,6 +2833,7 @@ public class TestIndexWriter extends Luc
assertEquals("doc " + testID + ", field f" + fieldCount + " is wrong", docExp.get("f"+i), doc.get("f"+i));
}
}
+ s.close();
r.close();
w.optimize();
}
@@ -2878,7 +2845,7 @@ public class TestIndexWriter extends Luc
public void testNoUnwantedTVFiles() throws Exception {
Directory dir = newDirectory();
- IndexWriter indexWriter = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setRAMBufferSizeMB(0.01));
+ IndexWriter indexWriter = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setRAMBufferSizeMB(0.01).setMergePolicy(newLogMergePolicy()));
((LogMergePolicy) indexWriter.getConfig().getMergePolicy()).setUseCompoundFile(false);
String BIG="alskjhlaksjghlaksjfhalksvjepgjioefgjnsdfjgefgjhelkgjhqewlrkhgwlekgrhwelkgjhwelkgrhwlkejg";
@@ -2907,4 +2874,36 @@ public class TestIndexWriter extends Luc
dir.close();
}
+
+ public void testDeleteAllSlowly() throws Exception {
+ final Directory dir = newDirectory();
+ RandomIndexWriter w = new RandomIndexWriter(random, dir);
+ final int NUM_DOCS = 1000 * RANDOM_MULTIPLIER;
+ final List<Integer> ids = new ArrayList<Integer>(NUM_DOCS);
+ for(int id=0;id<NUM_DOCS;id++) {
+ ids.add(id);
+ }
+ Collections.shuffle(ids, random);
+ for(int id : ids) {
+ Document doc = new Document();
+ doc.add(newField("id", ""+id, Field.Index.NOT_ANALYZED));
+ w.addDocument(doc);
+ }
+ Collections.shuffle(ids, random);
+ int upto = 0;
+ while(upto < ids.size()) {
+ final int left = ids.size() - upto;
+ final int inc = Math.min(left, _TestUtil.nextInt(random, 1, 20));
+ final int limit = upto + inc;
+ while(upto < limit) {
+ w.deleteDocuments(new Term("id", ""+ids.get(upto++)));
+ }
+ final IndexReader r = w.getReader();
+ assertEquals(NUM_DOCS - upto, r.numDocs());
+ r.close();
+ }
+
+ w.close();
+ dir.close();
+ }
}
Modified: lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexWriterConfig.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexWriterConfig.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexWriterConfig.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexWriterConfig.java Wed Feb 9 09:35:27 2011
@@ -17,7 +17,6 @@ package org.apache.lucene.index;
* limitations under the License.
*/
-import java.io.IOException;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
@@ -26,10 +25,9 @@ import java.util.Set;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.index.DocumentsWriter.IndexingChain;
-import org.apache.lucene.index.IndexWriter.IndexReaderWarmer;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.DefaultSimilarity;
-import org.apache.lucene.search.Similarity;
+import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.util.LuceneTestCase;
import org.junit.Test;
@@ -49,25 +47,16 @@ public class TestIndexWriterConfig exten
}
- private static final class MyWarmer extends IndexReaderWarmer {
- // Does not implement anything - used only for type checking on IndexWriterConfig.
-
- @Override
- public void warm(IndexReader reader) throws IOException {
- }
-
- }
-
@Test
public void testDefaults() throws Exception {
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer());
assertEquals(MockAnalyzer.class, conf.getAnalyzer().getClass());
assertNull(conf.getIndexCommit());
assertEquals(KeepOnlyLastCommitDeletionPolicy.class, conf.getIndexDeletionPolicy().getClass());
- assertEquals(IndexWriterConfig.UNLIMITED_FIELD_LENGTH, conf.getMaxFieldLength());
assertEquals(ConcurrentMergeScheduler.class, conf.getMergeScheduler().getClass());
assertEquals(OpenMode.CREATE_OR_APPEND, conf.getOpenMode());
- assertTrue(Similarity.getDefault() == conf.getSimilarity());
+ // we don't need to assert this, it should be unspecified
+ assertTrue(IndexSearcher.getDefaultSimilarityProvider() == conf.getSimilarityProvider());
assertEquals(IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL, conf.getTermIndexInterval());
assertEquals(IndexWriterConfig.getDefaultWriteLockTimeout(), conf.getWriteLockTimeout());
assertEquals(IndexWriterConfig.WRITE_LOCK_TIMEOUT, IndexWriterConfig.getDefaultWriteLockTimeout());
@@ -89,7 +78,7 @@ public class TestIndexWriterConfig exten
getters.add("getMaxFieldLength");
getters.add("getMergeScheduler");
getters.add("getOpenMode");
- getters.add("getSimilarity");
+ getters.add("getSimilarityProvider");
getters.add("getTermIndexInterval");
getters.add("getWriteLockTimeout");
getters.add("getDefaultWriteLockTimeout");
@@ -129,7 +118,6 @@ public class TestIndexWriterConfig exten
// Tests that the values of the constants does not change
assertEquals(1000, IndexWriterConfig.WRITE_LOCK_TIMEOUT);
assertEquals(32, IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL);
- assertEquals(Integer.MAX_VALUE, IndexWriterConfig.UNLIMITED_FIELD_LENGTH);
assertEquals(-1, IndexWriterConfig.DISABLE_AUTO_FLUSH);
assertEquals(IndexWriterConfig.DISABLE_AUTO_FLUSH, IndexWriterConfig.DEFAULT_MAX_BUFFERED_DELETE_TERMS);
assertEquals(IndexWriterConfig.DISABLE_AUTO_FLUSH, IndexWriterConfig.DEFAULT_MAX_BUFFERED_DOCS);
@@ -186,12 +174,13 @@ public class TestIndexWriterConfig exten
conf.setMergeScheduler(null);
assertEquals(ConcurrentMergeScheduler.class, conf.getMergeScheduler().getClass());
- // Test Similarity
- assertTrue(Similarity.getDefault() == conf.getSimilarity());
- conf.setSimilarity(new MySimilarity());
- assertEquals(MySimilarity.class, conf.getSimilarity().getClass());
- conf.setSimilarity(null);
- assertTrue(Similarity.getDefault() == conf.getSimilarity());
+ // Test Similarity:
+ // we shouldnt assert what the default is, just that its not null.
+ assertTrue(IndexSearcher.getDefaultSimilarityProvider() == conf.getSimilarityProvider());
+ conf.setSimilarityProvider(new MySimilarity());
+ assertEquals(MySimilarity.class, conf.getSimilarityProvider().getClass());
+ conf.setSimilarityProvider(null);
+ assertTrue(IndexSearcher.getDefaultSimilarityProvider() == conf.getSimilarityProvider());
// Test IndexingChain
assertTrue(DocumentsWriter.defaultIndexingChain == conf.getIndexingChain());
@@ -233,6 +222,23 @@ public class TestIndexWriterConfig exten
// this is expected
}
+ // Test setReaderTermsIndexDivisor
+ try {
+ conf.setReaderTermsIndexDivisor(0);
+ fail("should not have succeeded to set termsIndexDivisor to 0");
+ } catch (IllegalArgumentException e) {
+ // this is expected
+ }
+
+ // Setting to -1 is ok
+ conf.setReaderTermsIndexDivisor(-1);
+ try {
+ conf.setReaderTermsIndexDivisor(-2);
+ fail("should not have succeeded to set termsIndexDivisor to < -1");
+ } catch (IllegalArgumentException e) {
+ // this is expected
+ }
+
assertEquals(IndexWriterConfig.DEFAULT_MAX_THREAD_STATES, conf.getMaxThreadStates());
conf.setMaxThreadStates(5);
assertEquals(5, conf.getMaxThreadStates());
Modified: lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexWriterDelete.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexWriterDelete.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexWriterDelete.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexWriterDelete.java Wed Feb 9 09:35:27 2011
@@ -157,8 +157,6 @@ public class TestIndexWriterDelete exten
assertEquals(0, modifier.getSegmentCount());
modifier.commit();
- modifier.commit();
-
IndexReader reader = IndexReader.open(dir, true);
assertEquals(1, reader.numDocs());
@@ -567,7 +565,7 @@ public class TestIndexWriterDelete exten
+ e);
}
- IndexSearcher searcher = new IndexSearcher(newReader);
+ IndexSearcher searcher = newSearcher(newReader);
ScoreDoc[] hits = null;
try {
hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
@@ -684,7 +682,7 @@ public class TestIndexWriterDelete exten
MockDirectoryWrapper dir = newDirectory();
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
- TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(2).setReaderPooling(false));
+ TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(2).setReaderPooling(false).setMergePolicy(newLogMergePolicy()));
modifier.setInfoStream(VERBOSE ? System.out : null);
LogMergePolicy lmp = (LogMergePolicy) modifier.getConfig().getMergePolicy();
@@ -832,8 +830,8 @@ public class TestIndexWriterDelete exten
}
}
- TestIndexWriter.assertNoUnreferencedFiles(dir, "docsWriter.abort() failed to delete unreferenced files");
modifier.close();
+ TestIndexWriter.assertNoUnreferencedFiles(dir, "docsWriter.abort() failed to delete unreferenced files");
dir.close();
}
Modified: lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java Wed Feb 9 09:35:27 2011
@@ -288,6 +288,7 @@ public class TestIndexWriterExceptions e
public void testExceptionDocumentsWriterInit() throws IOException {
Directory dir = newDirectory();
MockIndexWriter2 w = new MockIndexWriter2(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
+ w.setInfoStream(VERBOSE ? System.out : null);
Document doc = new Document();
doc.add(newField("field", "a field", Field.Store.YES,
Field.Index.ANALYZED));
@@ -359,7 +360,7 @@ public class TestIndexWriterExceptions e
public void testExceptionOnMergeInit() throws IOException {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())
- .setMaxBufferedDocs(2).setMergeScheduler(new ConcurrentMergeScheduler());
+ .setMaxBufferedDocs(2).setMergeScheduler(new ConcurrentMergeScheduler()).setMergePolicy(newLogMergePolicy());
((LogMergePolicy) conf.getMergePolicy()).setMergeFactor(2);
MockIndexWriter3 w = new MockIndexWriter3(dir, conf);
w.doFail = true;
@@ -527,7 +528,7 @@ public class TestIndexWriterExceptions e
System.out.println("TEST: cycle i=" + i);
}
MockDirectoryWrapper dir = newDirectory();
- IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer));
+ IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer).setMergePolicy(newLogMergePolicy()));
writer.setInfoStream(VERBOSE ? System.out : null);
// don't allow a sudden merge to clean up the deleted
@@ -567,24 +568,25 @@ public class TestIndexWriterExceptions e
System.out.println("TEST: open reader");
}
IndexReader reader = IndexReader.open(dir, true);
- int expected = 3+(1-i)*2;
- assertEquals(expected, reader.docFreq(new Term("contents", "here")));
- assertEquals(expected, reader.maxDoc());
- int numDel = 0;
- final Bits delDocs = MultiFields.getDeletedDocs(reader);
- assertNotNull(delDocs);
- for(int j=0;j<reader.maxDoc();j++) {
- if (delDocs.get(j))
- numDel++;
- else {
- reader.document(j);
- reader.getTermFreqVectors(j);
+ if (i == 0) {
+ int expected = 5;
+ assertEquals(expected, reader.docFreq(new Term("contents", "here")));
+ assertEquals(expected, reader.maxDoc());
+ int numDel = 0;
+ final Bits delDocs = MultiFields.getDeletedDocs(reader);
+ assertNotNull(delDocs);
+ for(int j=0;j<reader.maxDoc();j++) {
+ if (delDocs.get(j))
+ numDel++;
+ else {
+ reader.document(j);
+ reader.getTermFreqVectors(j);
+ }
}
+ assertEquals(1, numDel);
}
reader.close();
- assertEquals(1, numDel);
-
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT,
analyzer).setMaxBufferedDocs(10));
doc = new Document();
@@ -596,10 +598,10 @@ public class TestIndexWriterExceptions e
writer.close();
reader = IndexReader.open(dir, true);
- expected = 19+(1-i)*2;
+ int expected = 19+(1-i)*2;
assertEquals(expected, reader.docFreq(new Term("contents", "here")));
assertEquals(expected, reader.maxDoc());
- numDel = 0;
+ int numDel = 0;
assertNull(MultiFields.getDeletedDocs(reader));
for(int j=0;j<reader.maxDoc();j++) {
reader.document(j);
@@ -843,7 +845,7 @@ public class TestIndexWriterExceptions e
public void testOptimizeExceptions() throws IOException {
Directory startDir = newDirectory();
- IndexWriterConfig conf = newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2);
+ IndexWriterConfig conf = newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy());
((LogMergePolicy) conf.getMergePolicy()).setMergeFactor(100);
IndexWriter w = new IndexWriter(startDir, conf);
for(int i=0;i<27;i++)
@@ -981,7 +983,8 @@ public class TestIndexWriterExceptions e
// latest segments file and make sure we get an
// IOException trying to open the index:
public void testSimulatedCorruptIndex1() throws IOException {
- Directory dir = newDirectory();
+ MockDirectoryWrapper dir = newDirectory();
+ dir.setCheckIndexOnClose(false); // we are corrupting it!
IndexWriter writer = null;
@@ -1028,8 +1031,8 @@ public class TestIndexWriterExceptions e
// files and make sure we get an IOException trying to
// open the index:
public void testSimulatedCorruptIndex2() throws IOException {
- Directory dir = newDirectory();
-
+ MockDirectoryWrapper dir = newDirectory();
+ dir.setCheckIndexOnClose(false); // we are corrupting it!
IndexWriter writer = null;
writer = new IndexWriter(
Modified: lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java Wed Feb 9 09:35:27 2011
@@ -104,7 +104,7 @@ public class TestIndexWriterMergePolicy
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
setMaxBufferedDocs(10).
- setMergePolicy(newLogMergePolicy())
+ setMergePolicy(newInOrderLogMergePolicy())
);
for (int i = 0; i < 250; i++) {
Modified: lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexWriterMerging.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexWriterMerging.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexWriterMerging.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexWriterMerging.java Wed Feb 9 09:35:27 2011
@@ -58,7 +58,7 @@ public class TestIndexWriterMerging exte
IndexWriter writer = new IndexWriter(
merged,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
- setMergePolicy(newLogMergePolicy(2))
+ setMergePolicy(newInOrderLogMergePolicy(2))
);
writer.setInfoStream(VERBOSE ? System.out : null);
writer.addIndexes(indexA, indexB);
@@ -66,7 +66,6 @@ public class TestIndexWriterMerging exte
writer.close();
fail = verifyIndex(merged, 0);
- merged.close();
assertFalse("The merged index is invalid", fail);
indexA.close();
@@ -102,7 +101,7 @@ public class TestIndexWriterMerging exte
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
setOpenMode(OpenMode.CREATE).
setMaxBufferedDocs(2).
- setMergePolicy(newLogMergePolicy(2))
+ setMergePolicy(newInOrderLogMergePolicy(2))
);
for (int i = start; i < (start + numDocs); i++)
Modified: lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java Wed Feb 9 09:35:27 2011
@@ -177,7 +177,7 @@ public class TestIndexWriterOnDiskFull e
IndexReader reader = IndexReader.open(startDir, true);
assertEquals("first docFreq", 57, reader.docFreq(searchTerm));
- IndexSearcher searcher = new IndexSearcher(reader);
+ IndexSearcher searcher = newSearcher(reader);
ScoreDoc[] hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
assertEquals("first number of hits", 57, hits.length);
searcher.close();
@@ -232,7 +232,7 @@ public class TestIndexWriterOnDiskFull e
// Make a new dir that will enforce disk usage:
MockDirectoryWrapper dir = new MockDirectoryWrapper(random, new RAMDirectory(startDir));
- writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
+ writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
IOException err = null;
writer.setInfoStream(VERBOSE ? System.out : null);
@@ -360,7 +360,7 @@ public class TestIndexWriterOnDiskFull e
}
}
- searcher = new IndexSearcher(reader);
+ searcher = newSearcher(reader);
try {
hits = searcher.search(new TermQuery(searchTerm), null, END_COUNT).scoreDocs;
} catch (IOException e) {
@@ -401,10 +401,10 @@ public class TestIndexWriterOnDiskFull e
// required is at most 2X total input size of
// indices so let's make sure:
assertTrue("max free Directory space required exceeded 1X the total input index sizes during " + methodName +
- ": max temp usage = " + (dir.getMaxUsedSizeInBytes()-startDiskUsage) + " bytes; " +
- "starting disk usage = " + startDiskUsage + " bytes; " +
- "input index disk usage = " + inputDiskUsage + " bytes",
- (dir.getMaxUsedSizeInBytes()-startDiskUsage) < 2*(startDiskUsage + inputDiskUsage));
+ ": max temp usage = " + (dir.getMaxUsedSizeInBytes()-startDiskUsage) + " bytes vs limit=" + (2*(startDiskUsage + inputDiskUsage)) +
+ "; starting disk usage = " + startDiskUsage + " bytes; " +
+ "input index disk usage = " + inputDiskUsage + " bytes",
+ (dir.getMaxUsedSizeInBytes()-startDiskUsage) < 2*(startDiskUsage + inputDiskUsage));
}
// Make sure we don't hit disk full during close below:
Modified: lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexWriterOnJRECrash.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexWriterOnJRECrash.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexWriterOnJRECrash.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexWriterOnJRECrash.java Wed Feb 9 09:35:27 2011
@@ -50,7 +50,7 @@ public class TestIndexWriterOnJRECrash e
public void testNRTThreads() throws Exception {
String vendor = Constants.JAVA_VENDOR;
assumeTrue(vendor + " JRE not supported.",
- vendor.startsWith("Sun") || vendor.startsWith("IBM") || vendor.startsWith("Apple"));
+ vendor.startsWith("Sun") || vendor.startsWith("Apple"));
// if we are not the fork
if (System.getProperty("tests.crashmode") == null) {
Modified: lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexWriterReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexWriterReader.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexWriterReader.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestIndexWriterReader.java Wed Feb 9 09:35:27 2011
@@ -20,6 +20,7 @@ import java.io.IOException;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.Collections;
+import java.util.HashSet;
import java.util.List;
import java.util.Random;
import java.util.concurrent.atomic.AtomicBoolean;
@@ -365,7 +366,7 @@ public class TestIndexWriterReader exten
int numDirs = 3;
Directory mainDir = newDirectory();
- IndexWriter mainWriter = new IndexWriter(mainDir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
+ IndexWriter mainWriter = new IndexWriter(mainDir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy()));
_TestUtil.reduceOpenFiles(mainWriter);
mainWriter.setInfoStream(infoStream);
@@ -717,8 +718,9 @@ public class TestIndexWriterReader exten
// reader should remain usable even after IndexWriter is closed:
assertEquals(100, r.numDocs());
Query q = new TermQuery(new Term("indexname", "test"));
- assertEquals(100, new IndexSearcher(r).search(q, 10).totalHits);
-
+ IndexSearcher searcher = newSearcher(r);
+ assertEquals(100, searcher.search(q, 10).totalHits);
+ searcher.close();
try {
r.reopen();
fail("failed to hit AlreadyClosedException");
@@ -784,7 +786,9 @@ public class TestIndexWriterReader exten
r = r2;
}
Query q = new TermQuery(new Term("indexname", "test"));
- final int count = new IndexSearcher(r).search(q, 10).totalHits;
+ IndexSearcher searcher = newSearcher(r);
+ final int count = searcher.search(q, 10).totalHits;
+ searcher.close();
assertTrue(count >= lastCount);
lastCount = count;
}
@@ -799,7 +803,9 @@ public class TestIndexWriterReader exten
r = r2;
}
Query q = new TermQuery(new Term("indexname", "test"));
- final int count = new IndexSearcher(r).search(q, 10).totalHits;
+ IndexSearcher searcher = newSearcher(r);
+ final int count = searcher.search(q, 10).totalHits;
+ searcher.close();
assertTrue(count >= lastCount);
assertEquals(0, excs.size());
@@ -872,7 +878,9 @@ public class TestIndexWriterReader exten
r = r2;
}
Query q = new TermQuery(new Term("indexname", "test"));
- sum += new IndexSearcher(r).search(q, 10).totalHits;
+ IndexSearcher searcher = newSearcher(r);
+ sum += searcher.search(q, 10).totalHits;
+ searcher.close();
}
for(int i=0;i<NUM_THREAD;i++) {
@@ -885,8 +893,9 @@ public class TestIndexWriterReader exten
r = r2;
}
Query q = new TermQuery(new Term("indexname", "test"));
- sum += new IndexSearcher(r).search(q, 10).totalHits;
-
+ IndexSearcher searcher = newSearcher(r);
+ sum += searcher.search(q, 10).totalHits;
+ searcher.close();
assertTrue("no documents found at all", sum > 0);
assertEquals(0, excs.size());
@@ -899,7 +908,7 @@ public class TestIndexWriterReader exten
public void testExpungeDeletes() throws Throwable {
Directory dir = newDirectory();
- final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
+ final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy()));
Document doc = new Document();
doc.add(newField("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
Field id = newField("id", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
@@ -970,11 +979,13 @@ public class TestIndexWriterReader exten
setMaxBufferedDocs(2).
setReaderPooling(true).
setMergedSegmentWarmer(new IndexWriter.IndexReaderWarmer() {
+ @Override
public void warm(IndexReader r) throws IOException {
- IndexSearcher s = new IndexSearcher(r);
+ IndexSearcher s = newSearcher(r);
TopDocs hits = s.search(new TermQuery(new Term("foo", "bar")), 10);
assertEquals(20, hits.totalHits);
didWarm.set(true);
+ s.close();
}
}).
setMergePolicy(newLogMergePolicy(10))
@@ -990,4 +1001,35 @@ public class TestIndexWriterReader exten
dir.close();
assertTrue(didWarm.get());
}
+
+ public void testNoTermsIndex() throws Exception {
+ // Some Codecs don't honor the ReaderTermsIndexDivisor, so skip the test if
+ // they're picked.
+ HashSet<String> illegalCodecs = new HashSet<String>();
+ illegalCodecs.add("PreFlex");
+ illegalCodecs.add("SimpleText");
+
+ IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
+ new MockAnalyzer()).setReaderTermsIndexDivisor(-1);
+ // Don't proceed if picked Codec is in the list of illegal ones.
+ if (illegalCodecs.contains(conf.getCodecProvider().getFieldCodec("f"))) return;
+
+ Directory dir = newDirectory();
+ IndexWriter w = new IndexWriter(dir, conf);
+ Document doc = new Document();
+ doc.add(new Field("f", "val", Store.NO, Index.ANALYZED));
+ w.addDocument(doc);
+ IndexReader r = IndexReader.open(w, true).getSequentialSubReaders()[0];
+ try {
+ r.termDocsEnum(null, "f", new BytesRef("val"));
+ fail("should have failed to seek since terms index was not loaded. Codec used " + conf.getCodecProvider().getFieldCodec("f"));
+ } catch (IllegalStateException e) {
+ // expected - we didn't load the term index
+ } finally {
+ r.close();
+ w.close();
+ dir.close();
+ }
+ }
+
}
Modified: lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestLazyBug.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestLazyBug.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestLazyBug.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestLazyBug.java Wed Feb 9 09:35:27 2011
@@ -63,7 +63,7 @@ public class TestLazyBug extends LuceneT
Directory dir = newDirectory();
try {
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
- TEST_VERSION_CURRENT, new MockAnalyzer()));
+ TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy()));
LogMergePolicy lmp = (LogMergePolicy) writer.getConfig().getMergePolicy();
lmp.setUseCompoundFile(false);
Modified: lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestLazyProxSkipping.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestLazyProxSkipping.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestLazyProxSkipping.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestLazyProxSkipping.java Wed Feb 9 09:35:27 2011
@@ -98,7 +98,7 @@ public class TestLazyProxSkipping extend
SegmentReader reader = getOnlySegmentReader(IndexReader.open(directory, false));
- this.searcher = new IndexSearcher(reader);
+ this.searcher = newSearcher(reader);
}
private ScoreDoc[] search() throws IOException {
@@ -126,7 +126,9 @@ public class TestLazyProxSkipping extend
// test whether only the minimum amount of seeks()
// are performed
performTest(5);
+ searcher.close();
performTest(10);
+ searcher.close();
}
public void testSeek() throws IOException {
Modified: lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestMultiFields.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestMultiFields.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestMultiFields.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestMultiFields.java Wed Feb 9 09:35:27 2011
@@ -97,6 +97,9 @@ public class TestMultiFields extends Luc
for(int i=0;i<100;i++) {
BytesRef term = terms.get(random.nextInt(terms.size()));
+ if (VERBOSE) {
+ System.out.println("TEST: seek to term= "+ UnicodeUtil.toHexString(term.utf8ToString()));
+ }
DocsEnum docsEnum = terms2.docs(delDocs, term, null);
assertNotNull(docsEnum);
Modified: lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestMultiLevelSkipList.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestMultiLevelSkipList.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestMultiLevelSkipList.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestMultiLevelSkipList.java Wed Feb 9 09:35:27 2011
@@ -53,6 +53,7 @@ public class TestMultiLevelSkipList exte
super(random, delegate);
}
+ @Override
public IndexInput openInput(String fileName) throws IOException {
IndexInput in = super.openInput(fileName);
if (fileName.endsWith(".frq"))
@@ -61,6 +62,7 @@ public class TestMultiLevelSkipList exte
}
}
+ @Override
@Before
public void setUp() throws Exception {
super.setUp();
@@ -69,7 +71,7 @@ public class TestMultiLevelSkipList exte
public void testSimpleSkip() throws IOException {
Directory dir = new CountingRAMDirectory(new RAMDirectory());
- IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new PayloadAnalyzer()).setCodecProvider(_TestUtil.alwaysCodec("Standard")));
+ IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new PayloadAnalyzer()).setCodecProvider(_TestUtil.alwaysCodec("Standard")).setMergePolicy(newInOrderLogMergePolicy()));
Term term = new Term("test", "a");
for (int i = 0; i < 5000; i++) {
Document d1 = new Document();
Modified: lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestNRTThreads.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestNRTThreads.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestNRTThreads.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestNRTThreads.java Wed Feb 9 09:35:27 2011
@@ -23,6 +23,7 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Set;
+import java.util.HashSet;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.Executors;
@@ -39,6 +40,7 @@ import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
+import org.apache.lucene.document.Field;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.util.NamedThreadFactory;
@@ -65,7 +67,7 @@ public class TestNRTThreads extends Luce
CodecProvider.getDefault().setDefaultFieldCodec("Standard");
}
- final LineFileDocs docs = new LineFileDocs(true);
+ final LineFileDocs docs = new LineFileDocs(random);
final File tempDir = _TestUtil.getTempDir("nrtopenfiles");
final MockDirectoryWrapper dir = new MockDirectoryWrapper(random, FSDirectory.open(tempDir));
final IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer());
@@ -86,14 +88,16 @@ public class TestNRTThreads extends Luce
}
}
- sum += new IndexSearcher(reader).search(new TermQuery(new Term("body", "united")), 10).totalHits;
+ IndexSearcher searcher = newSearcher(reader);
+ sum += searcher.search(new TermQuery(new Term("body", "united")), 10).totalHits;
+ searcher.close();
if (VERBOSE) {
System.out.println("TEST: warm visited " + sum + " fields");
}
}
});
-
+
final IndexWriter writer = new IndexWriter(dir, conf);
if (VERBOSE) {
writer.setInfoStream(System.out);
@@ -104,10 +108,12 @@ public class TestNRTThreads extends Luce
((ConcurrentMergeScheduler) ms).setMaxThreadCount(1);
((ConcurrentMergeScheduler) ms).setMaxMergeCount(1);
}
+ /*
LogMergePolicy lmp = (LogMergePolicy) writer.getConfig().getMergePolicy();
if (lmp.getMergeFactor() > 5) {
lmp.setMergeFactor(5);
}
+ */
final int NUM_INDEX_THREADS = 2;
final int NUM_SEARCH_THREADS = 3;
@@ -117,7 +123,7 @@ public class TestNRTThreads extends Luce
final AtomicInteger addCount = new AtomicInteger();
final AtomicInteger delCount = new AtomicInteger();
- final List<String> delIDs = Collections.synchronizedList(new ArrayList<String>());
+ final Set<String> delIDs = Collections.synchronizedSet(new HashSet<String>());
final long stopTime = System.currentTimeMillis() + RUN_TIME_SEC*1000;
Thread[] threads = new Thread[NUM_INDEX_THREADS];
@@ -132,22 +138,29 @@ public class TestNRTThreads extends Luce
if (doc == null) {
break;
}
+ final String addedField;
+ if (random.nextBoolean()) {
+ addedField = "extra" + random.nextInt(10);
+ doc.add(new Field(addedField, "a random field", Field.Store.NO, Field.Index.ANALYZED));
+ } else {
+ addedField = null;
+ }
if (random.nextBoolean()) {
if (VERBOSE) {
- //System.out.println(Thread.currentThread().getName() + ": add doc id:" + doc.get("id"));
+ System.out.println(Thread.currentThread().getName() + ": add doc id:" + doc.get("id"));
}
writer.addDocument(doc);
} else {
// we use update but it never replaces a
// prior doc
if (VERBOSE) {
- //System.out.println(Thread.currentThread().getName() + ": update doc id:" + doc.get("id"));
+ System.out.println(Thread.currentThread().getName() + ": update doc id:" + doc.get("id"));
}
writer.updateDocument(new Term("id", doc.get("id")), doc);
}
if (random.nextInt(5) == 3) {
if (VERBOSE) {
- //System.out.println(Thread.currentThread().getName() + ": buffer del id:" + doc.get("id"));
+ System.out.println(Thread.currentThread().getName() + ": buffer del id:" + doc.get("id"));
}
toDeleteIDs.add(doc.get("id"));
}
@@ -156,6 +169,9 @@ public class TestNRTThreads extends Luce
System.out.println(Thread.currentThread().getName() + ": apply " + toDeleteIDs.size() + " deletes");
}
for(String id : toDeleteIDs) {
+ if (VERBOSE) {
+ System.out.println(Thread.currentThread().getName() + ": del term=id:" + id);
+ }
writer.deleteDocuments(new Term("id", id));
}
final int count = delCount.addAndGet(toDeleteIDs.size());
@@ -166,6 +182,9 @@ public class TestNRTThreads extends Luce
toDeleteIDs.clear();
}
addCount.getAndIncrement();
+ if (addedField != null) {
+ doc.removeField(addedField);
+ }
} catch (Exception exc) {
System.out.println(Thread.currentThread().getName() + ": hit exc");
exc.printStackTrace();
@@ -189,7 +208,7 @@ public class TestNRTThreads extends Luce
// let index build up a bit
Thread.sleep(100);
- IndexReader r = IndexReader.open(writer);
+ IndexReader r = IndexReader.open(writer, true);
boolean any = false;
// silly starting guess:
@@ -222,7 +241,7 @@ public class TestNRTThreads extends Luce
if (VERBOSE) {
System.out.println("TEST: now open");
}
- r = IndexReader.open(writer);
+ r = IndexReader.open(writer, true);
}
if (VERBOSE) {
System.out.println("TEST: got new reader=" + r);
@@ -335,20 +354,39 @@ public class TestNRTThreads extends Luce
}
final IndexReader r2 = writer.getReader();
- final IndexSearcher s = new IndexSearcher(r2);
+ final IndexSearcher s = newSearcher(r2);
+ boolean doFail = false;
for(String id : delIDs) {
final TopDocs hits = s.search(new TermQuery(new Term("id", id)), 1);
if (hits.totalHits != 0) {
- fail("doc id=" + id + " is supposed to be deleted, but got docID=" + hits.scoreDocs[0].doc);
+ System.out.println("doc id=" + id + " is supposed to be deleted, but got docID=" + hits.scoreDocs[0].doc);
+ doFail = true;
}
}
+
+ final int endID = Integer.parseInt(docs.nextDoc().get("id"));
+ for(int id=0;id<endID;id++) {
+ String stringID = ""+id;
+ if (!delIDs.contains(stringID)) {
+ final TopDocs hits = s.search(new TermQuery(new Term("id", stringID)), 1);
+ if (hits.totalHits != 1) {
+ System.out.println("doc id=" + stringID + " is not supposed to be deleted, but got hitCount=" + hits.totalHits);
+ doFail = true;
+ }
+ }
+ }
+ assertFalse(doFail);
+
assertEquals("index=" + writer.segString() + " addCount=" + addCount + " delCount=" + delCount, addCount.get() - delCount.get(), r2.numDocs());
r2.close();
writer.commit();
assertEquals("index=" + writer.segString() + " addCount=" + addCount + " delCount=" + delCount, addCount.get() - delCount.get(), writer.numDocs());
-
+
+ assertFalse(writer.anyNonBulkMerges);
writer.close(false);
+ _TestUtil.checkIndex(dir);
+ s.close();
dir.close();
_TestUtil.rmDir(tempDir);
docs.close();
@@ -363,7 +401,7 @@ public class TestNRTThreads extends Luce
}
private void smokeTestReader(IndexReader r) throws Exception {
- IndexSearcher s = new IndexSearcher(r);
+ IndexSearcher s = newSearcher(r);
runQuery(s, new TermQuery(new Term("body", "united")));
runQuery(s, new TermQuery(new Term("titleTokenized", "states")));
PhraseQuery pq = new PhraseQuery();
Modified: lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestNorms.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestNorms.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestNorms.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestNorms.java Wed Feb 9 09:35:27 2011
@@ -30,6 +30,7 @@ import org.apache.lucene.document.Field.
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.DefaultSimilarity;
import org.apache.lucene.search.Similarity;
+import org.apache.lucene.search.SimilarityProvider;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase;
@@ -41,14 +42,15 @@ public class TestNorms extends LuceneTes
private class SimilarityOne extends DefaultSimilarity {
@Override
- public float lengthNorm(String fieldName, int numTerms) {
- return 1;
+ public float computeNorm(String fieldName, FieldInvertState state) {
+ // Disable length norm
+ return state.getBoost();
}
}
private static final int NUM_FIELDS = 10;
- private Similarity similarityOne;
+ private SimilarityProvider similarityOne;
private Analyzer anlzr;
private int numDocNorms;
private ArrayList<Float> norms;
@@ -150,7 +152,7 @@ public class TestNorms extends LuceneTes
private void createIndex(Random random, Directory dir) throws IOException {
IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, anlzr).setOpenMode(OpenMode.CREATE)
- .setMaxBufferedDocs(5).setSimilarity(similarityOne));
+ .setMaxBufferedDocs(5).setSimilarityProvider(similarityOne).setMergePolicy(newInOrderLogMergePolicy()));
LogMergePolicy lmp = (LogMergePolicy) iw.getConfig().getMergePolicy();
lmp.setMergeFactor(3);
lmp.setUseCompoundFile(true);
@@ -168,8 +170,9 @@ public class TestNorms extends LuceneTes
//System.out.println(" and: for "+k+" from "+newNorm+" to "+origNorm);
modifiedNorms.set(i, Float.valueOf(newNorm));
modifiedNorms.set(k, Float.valueOf(origNorm));
- ir.setNorm(i, "f"+1, newNorm);
- ir.setNorm(k, "f"+1, origNorm);
+ Similarity sim = new DefaultSimilarity().get("f"+1);
+ ir.setNorm(i, "f"+1, sim.encodeNormValue(newNorm));
+ ir.setNorm(k, "f"+1, sim.encodeNormValue(origNorm));
}
ir.close();
}
@@ -183,7 +186,7 @@ public class TestNorms extends LuceneTes
assertEquals("number of norms mismatches",numDocNorms,b.length);
ArrayList<Float> storedNorms = (i==1 ? modifiedNorms : norms);
for (int j = 0; j < b.length; j++) {
- float norm = similarityOne.decodeNormValue(b[j]);
+ float norm = similarityOne.get(field).decodeNormValue(b[j]);
float norm1 = storedNorms.get(j).floatValue();
assertEquals("stored norm value of "+field+" for doc "+j+" is "+norm+" - a mismatch!", norm, norm1, 0.000001);
}
@@ -194,7 +197,7 @@ public class TestNorms extends LuceneTes
private void addDocs(Random random, Directory dir, int ndocs, boolean compound) throws IOException {
IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, anlzr).setOpenMode(OpenMode.APPEND)
- .setMaxBufferedDocs(5).setSimilarity(similarityOne));
+ .setMaxBufferedDocs(5).setSimilarityProvider(similarityOne).setMergePolicy(newInOrderLogMergePolicy()));
LogMergePolicy lmp = (LogMergePolicy) iw.getConfig().getMergePolicy();
lmp.setMergeFactor(3);
lmp.setUseCompoundFile(compound);
@@ -207,7 +210,7 @@ public class TestNorms extends LuceneTes
// create the next document
private Document newDoc() {
Document d = new Document();
- float boost = nextNorm();
+ float boost = nextNorm("anyfield"); // in this test the same similarity is used for all fields so it does not matter what field is passed
for (int i = 0; i < 10; i++) {
Field f = newField("f"+i,"v"+i,Store.NO,Index.NOT_ANALYZED);
f.setBoost(boost);
@@ -217,10 +220,11 @@ public class TestNorms extends LuceneTes
}
// return unique norm values that are unchanged by encoding/decoding
- private float nextNorm() {
+ private float nextNorm(String fname) {
float norm = lastNorm + normDelta;
+ Similarity similarity = similarityOne.get(fname);
do {
- float norm1 = similarityOne.decodeNormValue(similarityOne.encodeNormValue(norm));
+ float norm1 = similarity.decodeNormValue(similarity.encodeNormValue(norm));
if (norm1 > lastNorm) {
//System.out.println(norm1+" > "+lastNorm);
norm = norm1;
@@ -236,4 +240,52 @@ public class TestNorms extends LuceneTes
return norm;
}
+ class CustomNormEncodingSimilarity extends DefaultSimilarity {
+ @Override
+ public byte encodeNormValue(float f) {
+ return (byte) f;
+ }
+
+ @Override
+ public float decodeNormValue(byte b) {
+ return (float) b;
+ }
+
+ @Override
+ public float computeNorm(String field, FieldInvertState state) {
+ return (float) state.getLength();
+ }
+ }
+
+ // LUCENE-1260
+ public void testCustomEncoder() throws Exception {
+ Directory dir = newDirectory();
+ IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer());
+ config.setSimilarityProvider(new CustomNormEncodingSimilarity());
+ RandomIndexWriter writer = new RandomIndexWriter(random, dir, config);
+ Document doc = new Document();
+ Field foo = newField("foo", "", Field.Store.NO, Field.Index.ANALYZED);
+ Field bar = newField("bar", "", Field.Store.NO, Field.Index.ANALYZED);
+ doc.add(foo);
+ doc.add(bar);
+
+ for (int i = 0; i < 100; i++) {
+ bar.setValue("singleton");
+ writer.addDocument(doc);
+ }
+
+ IndexReader reader = writer.getReader();
+ writer.close();
+
+ byte fooNorms[] = MultiNorms.norms(reader, "foo");
+ for (int i = 0; i < reader.maxDoc(); i++)
+ assertEquals(0, fooNorms[i]);
+
+ byte barNorms[] = MultiNorms.norms(reader, "bar");
+ for (int i = 0; i < reader.maxDoc(); i++)
+ assertEquals(1, barNorms[i]);
+
+ reader.close();
+ dir.close();
+ }
}
Modified: lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestOmitTf.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestOmitTf.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestOmitTf.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestOmitTf.java Wed Feb 9 09:35:27 2011
@@ -26,6 +26,7 @@ import org.apache.lucene.analysis.Analyz
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
+import org.apache.lucene.index.IndexReader.AtomicReaderContext;
import org.apache.lucene.search.*;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.store.Directory;
@@ -34,13 +35,11 @@ import org.apache.lucene.search.Explanat
public class TestOmitTf extends LuceneTestCase {
- public static class SimpleSimilarity extends Similarity {
- @Override public float lengthNorm(String field, int numTerms) { return 1.0f; }
- @Override public float queryNorm(float sumOfSquaredWeights) { return 1.0f; }
+ public static class SimpleSimilarity extends Similarity implements SimilarityProvider {
+ @Override public float computeNorm(String field, FieldInvertState state) { return state.getBoost(); }
@Override public float tf(float freq) { return freq; }
@Override public float sloppyFreq(int distance) { return 2.0f; }
@Override public float idf(int docFreq, int numDocs) { return 1.0f; }
- @Override public float coord(int overlap, int maxOverlap) { return 1.0f; }
@Override public IDFExplanation idfExplain(Collection<Term> terms, IndexSearcher searcher) throws IOException {
return new IDFExplanation() {
@Override
@@ -53,6 +52,11 @@ public class TestOmitTf extends LuceneTe
}
};
}
+ public float queryNorm(float sumOfSquaredWeights) { return 1.0f; }
+ public float coord(int overlap, int maxOverlap) { return 1.0f; }
+ public Similarity get(String field) {
+ return this;
+ }
}
// Tests whether the DocumentWriter correctly enable the
@@ -215,7 +219,7 @@ public class TestOmitTf extends LuceneTe
Directory ram = newDirectory();
Analyzer analyzer = new MockAnalyzer();
IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig(
- TEST_VERSION_CURRENT, analyzer).setMaxBufferedDocs(3));
+ TEST_VERSION_CURRENT, analyzer).setMaxBufferedDocs(3).setMergePolicy(newLogMergePolicy()));
LogMergePolicy lmp = (LogMergePolicy) writer.getConfig().getMergePolicy();
lmp.setMergeFactor(2);
lmp.setUseCompoundFile(false);
@@ -250,9 +254,10 @@ public class TestOmitTf extends LuceneTe
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).
setMaxBufferedDocs(2).
- setSimilarity(new SimpleSimilarity()).
- setMergePolicy(newLogMergePolicy(2))
+ setSimilarityProvider(new SimpleSimilarity()).
+ setMergePolicy(newInOrderLogMergePolicy(2))
);
+ writer.setInfoStream(VERBOSE ? System.out : null);
StringBuilder sb = new StringBuilder(265);
String term = "term";
@@ -280,7 +285,7 @@ public class TestOmitTf extends LuceneTe
* Verify the index
*/
IndexSearcher searcher = new IndexSearcher(dir, true);
- searcher.setSimilarity(new SimpleSimilarity());
+ searcher.setSimilarityProvider(new SimpleSimilarity());
Term a = new Term("noTf", term);
Term b = new Term("tf", term);
@@ -330,7 +335,7 @@ public class TestOmitTf extends LuceneTe
public final void collect(int doc) throws IOException {
//System.out.println("Q2: Doc=" + doc + " score=" + score);
float score = scorer.score();
- assertTrue(score==1.0f+doc);
+ assertEquals(1.0f+doc, score, 0.00001f);
super.collect(doc);
}
});
@@ -414,8 +419,8 @@ public class TestOmitTf extends LuceneTe
public static int getSum() { return sum; }
@Override
- public void setNextReader(IndexReader reader, int docBase) {
- this.docBase = docBase;
+ public void setNextReader(AtomicReaderContext context) {
+ docBase = context.docBase;
}
@Override
public boolean acceptsDocsOutOfOrder() {
Modified: lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestParallelReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestParallelReader.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestParallelReader.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestParallelReader.java Wed Feb 9 09:35:27 2011
@@ -47,7 +47,9 @@ public class TestParallelReader extends
@Override
public void tearDown() throws Exception {
single.getIndexReader().close();
+ single.close();
parallel.getIndexReader().close();
+ parallel.close();
dir.close();
dir1.close();
dir2.close();
@@ -147,7 +149,8 @@ public class TestParallelReader extends
assertTrue(pr.isCurrent());
IndexReader modifier = IndexReader.open(dir1, false);
- modifier.setNorm(0, "f1", 100);
+ SimilarityProvider sim = new DefaultSimilarity();
+ modifier.setNorm(0, "f1", sim.get("f1").encodeNormValue(100f));
modifier.close();
// one of the two IndexReaders which ParallelReader is using
@@ -155,7 +158,7 @@ public class TestParallelReader extends
assertFalse(pr.isCurrent());
modifier = IndexReader.open(dir2, false);
- modifier.setNorm(0, "f3", 100);
+ modifier.setNorm(0, "f3", sim.get("f3").encodeNormValue(100f));
modifier.close();
// now both are not current anymore
@@ -266,7 +269,7 @@ public class TestParallelReader extends
ParallelReader pr = new ParallelReader();
pr.add(IndexReader.open(dir1, false));
pr.add(IndexReader.open(dir2, false));
- return new IndexSearcher(pr);
+ return newSearcher(pr);
}
private Directory getDir1(Random random) throws IOException {
Modified: lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestPayloads.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestPayloads.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestPayloads.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestPayloads.java Wed Feb 9 09:35:27 2011
@@ -163,7 +163,8 @@ public class TestPayloads extends Lucene
PayloadAnalyzer analyzer = new PayloadAnalyzer();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, analyzer)
- .setOpenMode(OpenMode.CREATE));
+ .setOpenMode(OpenMode.CREATE)
+ .setMergePolicy(newInOrderLogMergePolicy()));
// should be in sync with value in TermInfosWriter
final int skipInterval = 16;
Modified: lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestPerFieldCodecSupport.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestPerFieldCodecSupport.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestPerFieldCodecSupport.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestPerFieldCodecSupport.java Wed Feb 9 09:35:27 2011
@@ -123,7 +123,7 @@ public class TestPerFieldCodecSupport ex
IndexWriterConfig iwconf = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer()).setOpenMode(OpenMode.CREATE).setCodecProvider(provider);
iwconf.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH);
- ((LogMergePolicy) iwconf.getMergePolicy()).setMergeFactor(10);
+ //((LogMergePolicy) iwconf.getMergePolicy()).setMergeFactor(10);
IndexWriter writer = newWriter(dir, iwconf);
addDocs(writer, 10);
@@ -143,8 +143,8 @@ public class TestPerFieldCodecSupport ex
iwconf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())
.setOpenMode(OpenMode.APPEND).setCodecProvider(provider);
- ((LogMergePolicy) iwconf.getMergePolicy()).setUseCompoundFile(false);
- ((LogMergePolicy) iwconf.getMergePolicy()).setMergeFactor(10);
+ //((LogMergePolicy) iwconf.getMergePolicy()).setUseCompoundFile(false);
+ //((LogMergePolicy) iwconf.getMergePolicy()).setMergeFactor(10);
iwconf.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH);
provider = new MockCodecProvider2(); // uses standard for field content
@@ -227,7 +227,7 @@ public class TestPerFieldCodecSupport ex
}
IndexReader reader = IndexReader.open(dir, null, true,
IndexReader.DEFAULT_TERMS_INDEX_DIVISOR, codecs);
- IndexSearcher searcher = new IndexSearcher(reader);
+ IndexSearcher searcher = newSearcher(reader);
TopDocs search = searcher.search(new TermQuery(t), num + 10);
assertEquals(num, search.totalHits);
searcher.close();
Modified: lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestPerSegmentDeletes.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestPerSegmentDeletes.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestPerSegmentDeletes.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestPerSegmentDeletes.java Wed Feb 9 09:35:27 2011
@@ -46,6 +46,7 @@ public class TestPerSegmentDeletes exten
RangeMergePolicy fsmp = new RangeMergePolicy(false);
iwc.setMergePolicy(fsmp);
IndexWriter writer = new IndexWriter(dir, iwc);
+ writer.setInfoStream(VERBOSE ? System.out : null);
for (int x = 0; x < 5; x++) {
writer.addDocument(TestIndexWriterReader.createDocument(x, "1", 2));
//System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs());
@@ -73,12 +74,12 @@ public class TestPerSegmentDeletes exten
// flushing without applying deletes means
// there will still be deletes in the segment infos
writer.flush(false, false);
- assertTrue(writer.bufferedDeletes.any());
+ assertTrue(writer.bufferedDeletesStream.any());
// get reader flushes pending deletes
// so there should not be anymore
IndexReader r1 = writer.getReader();
- assertFalse(writer.bufferedDeletes.any());
+ assertFalse(writer.bufferedDeletesStream.any());
r1.close();
// delete id:2 from the first segment
@@ -256,6 +257,7 @@ public class TestPerSegmentDeletes exten
@Override
public void close() {}
+ @Override
public MergeSpecification findMerges(SegmentInfos segmentInfos)
throws CorruptIndexException, IOException {
MergeSpecification ms = new MergeSpecification();
Modified: lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestSegmentReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestSegmentReader.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestSegmentReader.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestSegmentReader.java Wed Feb 9 09:35:27 2011
@@ -27,7 +27,6 @@ import org.apache.lucene.util.BytesRef;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Fieldable;
-import org.apache.lucene.search.Similarity;
import org.apache.lucene.store.Directory;
public class TestSegmentReader extends LuceneTestCase {
@@ -180,15 +179,9 @@ public class TestSegmentReader extends L
assertEquals(reader.hasNorms(f.name()), !f.getOmitNorms());
assertEquals(reader.hasNorms(f.name()), !DocHelper.noNorms.containsKey(f.name()));
if (!reader.hasNorms(f.name())) {
- // test for fake norms of 1.0 or null depending on the flag
+ // test for norms of null
byte [] norms = MultiNorms.norms(reader, f.name());
- byte norm1 = Similarity.getDefault().encodeNormValue(1.0f);
assertNull(norms);
- norms = new byte[reader.maxDoc()];
- MultiNorms.norms(reader, f.name(),norms, 0);
- for (int j=0; j<reader.maxDoc(); j++) {
- assertEquals(norms[j], norm1);
- }
}
}
}
Modified: lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestSegmentTermDocs.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestSegmentTermDocs.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestSegmentTermDocs.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestSegmentTermDocs.java Wed Feb 9 09:35:27 2011
@@ -105,7 +105,7 @@ public class TestSegmentTermDocs extends
public void testSkipTo(int indexDivisor) throws IOException {
Directory dir = newDirectory();
- IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
+ IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newInOrderLogMergePolicy()));
Term ta = new Term("content","aaa");
for(int i = 0; i < 10; i++)
Modified: lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestSizeBoundedOptimize.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestSizeBoundedOptimize.java?rev=1068809&r1=1068808&r2=1068809&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestSizeBoundedOptimize.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/test/org/apache/lucene/index/TestSizeBoundedOptimize.java Wed Feb 9 09:35:27 2011
@@ -63,7 +63,7 @@ public class TestSizeBoundedOptimize ext
conf = newWriterConfig();
LogByteSizeMergePolicy lmp = new LogByteSizeMergePolicy();
- lmp.setMaxMergeMB((min + 1) / (1 << 20));
+ lmp.setMaxMergeMBForOptimize((min + 1) / (1 << 20));
conf.setMergePolicy(lmp);
writer = new IndexWriter(dir, conf);