You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by mi...@apache.org on 2011/05/05 21:15:41 UTC
svn commit: r1099926 - in /lucene/dev/branches/branch_3x: ./ lucene/
lucene/backwards/ lucene/backwards/src/test-framework/org/apache/lucene/util/
lucene/backwards/src/test/org/apache/lucene/index/
lucene/backwards/src/test/org/apache/lucene/search/ lu...
Author: mikemccand
Date: Thu May 5 19:15:40 2011
New Revision: 1099926
URL: http://svn.apache.org/viewvc?rev=1099926&view=rev
Log:
LUCENE-2918: prune 100% del segments the moment they are created (in applyDeletes) instead of on commit, so that NRT usage doens't waste time on fully deleted segs (back port from trunk)
Added:
lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestRollingUpdates.java
- copied, changed from r1071569, lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestRollingUpdates.java
Modified:
lucene/dev/branches/branch_3x/ (props changed)
lucene/dev/branches/branch_3x/lucene/ (props changed)
lucene/dev/branches/branch_3x/lucene/backwards/ (props changed)
lucene/dev/branches/branch_3x/lucene/backwards/src/test-framework/org/apache/lucene/util/_TestUtil.java
lucene/dev/branches/branch_3x/lucene/backwards/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java
lucene/dev/branches/branch_3x/lucene/backwards/src/test/org/apache/lucene/search/TestCachingSpanFilter.java
lucene/dev/branches/branch_3x/lucene/backwards/src/test/org/apache/lucene/search/TestCachingWrapperFilter.java
lucene/dev/branches/branch_3x/lucene/contrib/xml-query-parser/src/test/org/apache/lucene/xmlparser/TestParser.java
lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/BufferedDeletesStream.java
lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/DirectoryReader.java
lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/DocumentsWriter.java
lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/IndexWriter.java
lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/MergePolicy.java
lucene/dev/branches/branch_3x/lucene/src/test-framework/org/apache/lucene/index/RandomIndexWriter.java
lucene/dev/branches/branch_3x/lucene/src/test-framework/org/apache/lucene/search/QueryUtils.java
lucene/dev/branches/branch_3x/lucene/src/test-framework/org/apache/lucene/util/_TestUtil.java
lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexWriterDelete.java
lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java
lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIsCurrent.java
lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestThreadedOptimize.java
lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/search/TestCachingSpanFilter.java
lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/search/TestCachingWrapperFilter.java
lucene/dev/branches/branch_3x/solr/ (props changed)
Modified: lucene/dev/branches/branch_3x/lucene/backwards/src/test-framework/org/apache/lucene/util/_TestUtil.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/backwards/src/test-framework/org/apache/lucene/util/_TestUtil.java?rev=1099926&r1=1099925&r2=1099926&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/backwards/src/test-framework/org/apache/lucene/util/_TestUtil.java (original)
+++ lucene/dev/branches/branch_3x/lucene/backwards/src/test-framework/org/apache/lucene/util/_TestUtil.java Thu May 5 19:15:40 2011
@@ -25,6 +25,7 @@ import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.PrintStream;
+import java.lang.reflect.Method;
import java.util.Enumeration;
import java.util.Random;
import java.util.Map;
@@ -330,4 +331,17 @@ public class _TestUtil {
});
Assert.assertEquals("Reflection does not produce same map", reflectedValues, map);
}
+
+ public static void keepFullyDeletedSegments(IndexWriter w) {
+ try {
+ // Carefully invoke what is a package-private (test
+ // only, internal) method on IndexWriter:
+ Method m = IndexWriter.class.getDeclaredMethod("keepFullyDeletedSegments");
+ m.setAccessible(true);
+ m.invoke(w);
+ } catch (Exception e) {
+ // Should not happen?
+ throw new RuntimeException(e);
+ }
+ }
}
Modified: lucene/dev/branches/branch_3x/lucene/backwards/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/backwards/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java?rev=1099926&r1=1099925&r2=1099926&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/backwards/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java (original)
+++ lucene/dev/branches/branch_3x/lucene/backwards/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java Thu May 5 19:15:40 2011
@@ -459,6 +459,8 @@ public class TestIndexWriterOnDiskFull e
//IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setReaderPooling(true));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergeScheduler(new SerialMergeScheduler()).setReaderPooling(true));
+ _TestUtil.keepFullyDeletedSegments(w);
+
((LogMergePolicy) w.getMergePolicy()).setMergeFactor(2);
Document doc = new Document();
Modified: lucene/dev/branches/branch_3x/lucene/backwards/src/test/org/apache/lucene/search/TestCachingSpanFilter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/backwards/src/test/org/apache/lucene/search/TestCachingSpanFilter.java?rev=1099926&r1=1099925&r2=1099926&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/backwards/src/test/org/apache/lucene/search/TestCachingSpanFilter.java (original)
+++ lucene/dev/branches/branch_3x/lucene/backwards/src/test/org/apache/lucene/search/TestCachingSpanFilter.java Thu May 5 19:15:40 2011
@@ -30,6 +30,7 @@ import org.apache.lucene.search.spans.Sp
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util._TestUtil;
public class TestCachingSpanFilter extends LuceneTestCase {
@@ -71,6 +72,7 @@ public class TestCachingSpanFilter exten
assertEquals("[just filter] Should find a hit...", 1, docs.totalHits);
// now delete the doc, refresh the reader, and see that it's not there
+ _TestUtil.keepFullyDeletedSegments(writer.w);
writer.deleteDocuments(new Term("id", "1"));
reader = refreshReader(reader);
Modified: lucene/dev/branches/branch_3x/lucene/backwards/src/test/org/apache/lucene/search/TestCachingWrapperFilter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/backwards/src/test/org/apache/lucene/search/TestCachingWrapperFilter.java?rev=1099926&r1=1099925&r2=1099926&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/backwards/src/test/org/apache/lucene/search/TestCachingWrapperFilter.java (original)
+++ lucene/dev/branches/branch_3x/lucene/backwards/src/test/org/apache/lucene/search/TestCachingWrapperFilter.java Thu May 5 19:15:40 2011
@@ -31,6 +31,7 @@ import org.apache.lucene.store.Directory
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.OpenBitSet;
import org.apache.lucene.util.OpenBitSetDISI;
+import org.apache.lucene.util._TestUtil;
public class TestCachingWrapperFilter extends LuceneTestCase {
@@ -187,6 +188,7 @@ public class TestCachingWrapperFilter ex
assertEquals("[just filter] Should find a hit...", 1, docs.totalHits);
// now delete the doc, refresh the reader, and see that it's not there
+ _TestUtil.keepFullyDeletedSegments(writer.w);
writer.deleteDocuments(new Term("id", "1"));
reader = refreshReader(reader);
Modified: lucene/dev/branches/branch_3x/lucene/contrib/xml-query-parser/src/test/org/apache/lucene/xmlparser/TestParser.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/contrib/xml-query-parser/src/test/org/apache/lucene/xmlparser/TestParser.java?rev=1099926&r1=1099925&r2=1099926&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/contrib/xml-query-parser/src/test/org/apache/lucene/xmlparser/TestParser.java (original)
+++ lucene/dev/branches/branch_3x/lucene/contrib/xml-query-parser/src/test/org/apache/lucene/xmlparser/TestParser.java Thu May 5 19:15:40 2011
@@ -185,8 +185,8 @@ public class TestParser extends LuceneTe
}
public void testDuplicateFilterQueryXML() throws ParserException, IOException
{
- Assume.assumeTrue(searcher.getIndexReader().getSequentialSubReaders() == null ||
- searcher.getIndexReader().getSequentialSubReaders().length == 1);
+ Assume.assumeTrue(searcher.getIndexReader().getSequentialSubReaders() == null ||
+ searcher.getIndexReader().getSequentialSubReaders().length == 1);
Query q=parse("DuplicateFilterQuery.xml");
int h = searcher.search(q, null, 1000).totalHits;
assertEquals("DuplicateFilterQuery should produce 1 result ", 1,h);
Modified: lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/BufferedDeletesStream.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/BufferedDeletesStream.java?rev=1099926&r1=1099925&r2=1099926&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/BufferedDeletesStream.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/BufferedDeletesStream.java Thu May 5 19:15:40 2011
@@ -120,9 +120,13 @@ class BufferedDeletesStream {
// Current gen, for the merged segment:
public final long gen;
- ApplyDeletesResult(boolean anyDeletes, long gen) {
+ // If non-null, contains segments that are 100% deleted
+ public final SegmentInfos allDeleted;
+
+ ApplyDeletesResult(boolean anyDeletes, long gen, SegmentInfos allDeleted) {
this.anyDeletes = anyDeletes;
this.gen = gen;
+ this.allDeleted = allDeleted;
}
}
@@ -153,14 +157,14 @@ class BufferedDeletesStream {
final long t0 = System.currentTimeMillis();
if (infos.size() == 0) {
- return new ApplyDeletesResult(false, nextGen++);
+ return new ApplyDeletesResult(false, nextGen++, null);
}
assert checkDeleteStats();
if (!any()) {
message("applyDeletes: no deletes; skipping");
- return new ApplyDeletesResult(false, nextGen++);
+ return new ApplyDeletesResult(false, nextGen++, null);
}
if (infoStream != null) {
@@ -177,6 +181,8 @@ class BufferedDeletesStream {
int infosIDX = infos2.size()-1;
int delIDX = deletes.size()-1;
+ SegmentInfos allDeleted = null;
+
while (infosIDX >= 0) {
//System.out.println("BD: cycle delIDX=" + delIDX + " infoIDX=" + infosIDX);
@@ -198,6 +204,7 @@ class BufferedDeletesStream {
assert readerPool.infoIsLive(info);
SegmentReader reader = readerPool.get(info, false);
int delCount = 0;
+ final boolean segAllDeletes;
try {
if (coalescedDeletes != null) {
//System.out.println(" del coalesced");
@@ -208,13 +215,21 @@ class BufferedDeletesStream {
// Don't delete by Term here; DocumentsWriter
// already did that on flush:
delCount += applyQueryDeletes(packet.queriesIterable(), reader);
+ segAllDeletes = reader.numDocs() == 0;
} finally {
readerPool.release(reader);
}
anyNewDeletes |= delCount > 0;
+ if (segAllDeletes) {
+ if (allDeleted == null) {
+ allDeleted = new SegmentInfos();
+ }
+ allDeleted.add(info);
+ }
+
if (infoStream != null) {
- message("seg=" + info + " segGen=" + segGen + " segDeletes=[" + packet + "]; coalesced deletes=[" + (coalescedDeletes == null ? "null" : coalescedDeletes) + "] delCount=" + delCount);
+ message("seg=" + info + " segGen=" + segGen + " segDeletes=[" + packet + "]; coalesced deletes=[" + (coalescedDeletes == null ? "null" : coalescedDeletes) + "] delCount=" + delCount + (segAllDeletes ? " 100% deleted" : ""));
}
if (coalescedDeletes == null) {
@@ -233,16 +248,25 @@ class BufferedDeletesStream {
assert readerPool.infoIsLive(info);
SegmentReader reader = readerPool.get(info, false);
int delCount = 0;
+ final boolean segAllDeletes;
try {
delCount += applyTermDeletes(coalescedDeletes.termsIterable(), reader);
delCount += applyQueryDeletes(coalescedDeletes.queriesIterable(), reader);
+ segAllDeletes = reader.numDocs() == 0;
} finally {
readerPool.release(reader);
}
anyNewDeletes |= delCount > 0;
+ if (segAllDeletes) {
+ if (allDeleted == null) {
+ allDeleted = new SegmentInfos();
+ }
+ allDeleted.add(info);
+ }
+
if (infoStream != null) {
- message("seg=" + info + " segGen=" + segGen + " coalesced deletes=[" + (coalescedDeletes == null ? "null" : coalescedDeletes) + "] delCount=" + delCount);
+ message("seg=" + info + " segGen=" + segGen + " coalesced deletes=[" + (coalescedDeletes == null ? "null" : coalescedDeletes) + "] delCount=" + delCount + (segAllDeletes ? " 100% deleted" : ""));
}
}
info.setBufferedDeletesGen(nextGen);
@@ -257,7 +281,7 @@ class BufferedDeletesStream {
}
// assert infos != segmentInfos || !any() : "infos=" + infos + " segmentInfos=" + segmentInfos + " any=" + any;
- return new ApplyDeletesResult(anyNewDeletes, nextGen++);
+ return new ApplyDeletesResult(anyNewDeletes, nextGen++, allDeleted);
}
public synchronized long getNextGen() {
Modified: lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/DirectoryReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/DirectoryReader.java?rev=1099926&r1=1099925&r2=1099926&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/DirectoryReader.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/DirectoryReader.java Thu May 5 19:15:40 2011
@@ -136,7 +136,6 @@ class DirectoryReader extends IndexReade
this.readOnly = true;
this.applyAllDeletes = applyAllDeletes; // saved for reopen
- segmentInfos = (SegmentInfos) infos.clone();// make sure we clone otherwise we share mutable state with IW
this.termInfosIndexDivisor = termInfosIndexDivisor;
readerFinishedListeners = writer.getReaderFinishedListeners();
@@ -144,23 +143,33 @@ class DirectoryReader extends IndexReade
// us, which ensures infos will not change; so there's
// no need to process segments in reverse order
final int numSegments = infos.size();
- SegmentReader[] readers = new SegmentReader[numSegments];
+
+ List<SegmentReader> readers = new ArrayList<SegmentReader>();
final Directory dir = writer.getDirectory();
+ segmentInfos = (SegmentInfos) infos.clone();
+ int infosUpto = 0;
for (int i=0;i<numSegments;i++) {
boolean success = false;
try {
final SegmentInfo info = infos.info(i);
assert info.dir == dir;
- readers[i] = writer.readerPool.getReadOnlyClone(info, true, termInfosIndexDivisor);
- readers[i].readerFinishedListeners = readerFinishedListeners;
+ final SegmentReader reader = writer.readerPool.getReadOnlyClone(info, true, termInfosIndexDivisor);
+ if (reader.numDocs() > 0 || writer.getKeepFullyDeletedSegments()) {
+ reader.readerFinishedListeners = readerFinishedListeners;
+ readers.add(reader);
+ infosUpto++;
+ } else {
+ reader.close();
+ segmentInfos.remove(infosUpto);
+ }
success = true;
} finally {
if (!success) {
// Close all readers we had opened:
- for(i--;i>=0;i--) {
+ for(SegmentReader reader : readers) {
try {
- readers[i].close();
+ reader.close();
} catch (Throwable ignore) {
// keep going - we want to clean up as much as possible
}
@@ -171,7 +180,7 @@ class DirectoryReader extends IndexReade
this.writer = writer;
- initialize(readers);
+ initialize(readers.toArray(new SegmentReader[readers.size()]));
}
/** This constructor is only used for {@link #reopen()} */
Modified: lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/DocumentsWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/DocumentsWriter.java?rev=1099926&r1=1099925&r2=1099926&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/DocumentsWriter.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/DocumentsWriter.java Thu May 5 19:15:40 2011
@@ -607,8 +607,16 @@ final class DocumentsWriter {
newSegment.setDelCount(delCount);
newSegment.advanceDelGen();
final String delFileName = newSegment.getDelFileName();
+ if (infoStream != null) {
+ message("flush: write " + delCount + " deletes to " + delFileName);
+ }
boolean success2 = false;
try {
+ // TODO: in the NRT case it'd be better to hand
+ // this del vector over to the
+ // shortly-to-be-opened SegmentReader and let it
+ // carry the changes; there's no reason to use
+ // filesystem as intermediary here.
flushState.deletedDocs.write(directory, delFileName);
success2 = true;
} finally {
Modified: lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/IndexWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/IndexWriter.java?rev=1099926&r1=1099925&r2=1099926&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/IndexWriter.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/IndexWriter.java Thu May 5 19:15:40 2011
@@ -484,8 +484,7 @@ public class IndexWriter implements Clos
private final Map<SegmentInfo,SegmentReader> readerMap = new HashMap<SegmentInfo,SegmentReader>();
- /** Forcefully clear changes for the specified segments,
- * and remove from the pool. This is called on successful merge. */
+ /** Forcefully clear changes for the specified segments. This is called on successful merge. */
synchronized void clear(SegmentInfos infos) throws IOException {
if (infos == null) {
for (Map.Entry<SegmentInfo,SegmentReader> ent: readerMap.entrySet()) {
@@ -493,8 +492,9 @@ public class IndexWriter implements Clos
}
} else {
for (final SegmentInfo info: infos) {
- if (readerMap.containsKey(info)) {
- readerMap.get(info).hasChanges = false;
+ final SegmentReader r = readerMap.get(info);
+ if (r != null) {
+ r.hasChanges = false;
}
}
}
@@ -503,8 +503,8 @@ public class IndexWriter implements Clos
// used only by asserts
public synchronized boolean infoIsLive(SegmentInfo info) {
int idx = segmentInfos.indexOf(info);
- assert idx != -1;
- assert segmentInfos.get(idx) == info;
+ assert idx != -1: "info=" + info + " isn't in pool";
+ assert segmentInfos.get(idx) == info: "info=" + info + " doesn't match live info in segmentInfos";
return true;
}
@@ -574,6 +574,21 @@ public class IndexWriter implements Clos
return false;
}
+
+ public synchronized void drop(SegmentInfos infos) throws IOException {
+ for(SegmentInfo info : infos) {
+ drop(info);
+ }
+ }
+
+ public synchronized void drop(SegmentInfo info) throws IOException {
+ final SegmentReader sr = readerMap.get(info);
+ if (sr != null) {
+ sr.hasChanges = false;
+ readerMap.remove(info);
+ sr.close();
+ }
+ }
/** Remove all our references to readers, and commits
* any pending changes. */
@@ -612,19 +627,18 @@ public class IndexWriter implements Clos
* Commit all segment reader in the pool.
* @throws IOException
*/
- synchronized void commit() throws IOException {
+ synchronized void commit(SegmentInfos infos) throws IOException {
// We invoke deleter.checkpoint below, so we must be
// sync'd on IW:
assert Thread.holdsLock(IndexWriter.this);
- for (Map.Entry<SegmentInfo,SegmentReader> ent : readerMap.entrySet()) {
+ for (SegmentInfo info : infos) {
- SegmentReader sr = ent.getValue();
- if (sr.hasChanges) {
- assert infoIsLive(sr.getSegmentInfo());
+ final SegmentReader sr = readerMap.get(info);
+ if (sr != null && sr.hasChanges) {
+ assert infoIsLive(info);
sr.doCommit(null);
-
// Must checkpoint w/ deleter, because this
// segment reader will have created new _X_N.del
// file.
@@ -3378,6 +3392,24 @@ public class IndexWriter implements Clos
if (result.anyDeletes) {
checkpoint();
}
+ if (!keepFullyDeletedSegments && result.allDeleted != null) {
+ if (infoStream != null) {
+ message("drop 100% deleted segments: " + result.allDeleted);
+ }
+ for(SegmentInfo info : result.allDeleted) {
+ // If a merge has already registered for this
+ // segment, we leave it in the readerPool; the
+ // merge will skip merging it and will then drop
+ // it once it's done:
+ if (!mergingSegments.contains(info)) {
+ segmentInfos.remove(info);
+ if (readerPool != null) {
+ readerPool.drop(info);
+ }
+ }
+ }
+ checkpoint();
+ }
bufferedDeletesStream.prune(segmentInfos);
assert !bufferedDeletesStream.any();
flushControl.clearDeletes();
@@ -3454,8 +3486,12 @@ public class IndexWriter implements Clos
SegmentInfo info = sourceSegments.info(i);
minGen = Math.min(info.getBufferedDeletesGen(), minGen);
int docCount = info.docCount;
- SegmentReader previousReader = merge.readersClone[i];
- SegmentReader currentReader = merge.readers[i];
+ final SegmentReader previousReader = merge.readerClones.get(i);
+ if (previousReader == null) {
+ // Reader was skipped because it was 100% deletions
+ continue;
+ }
+ final SegmentReader currentReader = merge.readers.get(i);
if (previousReader.hasDeletions()) {
// There were deletes on this segment when the merge
@@ -3537,18 +3573,21 @@ public class IndexWriter implements Clos
return false;
}
- ensureValidMerge(merge);
-
commitMergedDeletes(merge, mergedReader);
// If the doc store we are using has been closed and
// is in now compound format (but wasn't when we
// started), then we will switch to the compound
// format as well:
- setMergeDocStoreIsCompoundFile(merge);
assert !segmentInfos.contains(merge.info);
+ final boolean allDeleted = mergedReader.numDocs() == 0;
+
+ if (infoStream != null && allDeleted) {
+ message("merged segment " + merge.info + " is 100% deleted" + (keepFullyDeletedSegments ? "" : "; skipping insert"));
+ }
+
final Set mergedAway = new HashSet<SegmentInfo>(merge.segments);
int segIdx = 0;
int newSegIdx = 0;
@@ -3557,7 +3596,7 @@ public class IndexWriter implements Clos
while(segIdx < curSegCount) {
final SegmentInfo info = segmentInfos.info(segIdx++);
if (mergedAway.contains(info)) {
- if (!inserted) {
+ if (!inserted && (!allDeleted || keepFullyDeletedSegments)) {
segmentInfos.set(segIdx-1, merge.info);
inserted = true;
newSegIdx++;
@@ -3566,7 +3605,20 @@ public class IndexWriter implements Clos
segmentInfos.set(newSegIdx++, info);
}
}
- assert newSegIdx == curSegCount - merge.segments.size() + 1;
+
+ // Either we found place to insert segment, or, we did
+ // not, but only because all segments we merged became
+ // deleted while we are merging, in which case it should
+ // be the case that the new segment is also all deleted:
+ if (!inserted) {
+ assert allDeleted;
+ if (keepFullyDeletedSegments) {
+ segmentInfos.add(0, merge.info);
+ } else {
+ readerPool.drop(merge.info);
+ }
+ }
+
segmentInfos.subList(newSegIdx, segmentInfos.size()).clear();
if (infoStream != null) {
@@ -3588,7 +3640,6 @@ public class IndexWriter implements Clos
// cascade the optimize:
segmentsToOptimize.add(merge.info);
}
-
return true;
}
@@ -3732,8 +3783,9 @@ public class IndexWriter implements Clos
// is running (while synchronized) to avoid race
// condition where two conflicting merges from different
// threads, start
- for(int i=0;i<count;i++)
+ for(int i=0;i<count;i++) {
mergingSegments.add(merge.segments.info(i));
+ }
// Merge is now registered
merge.registerDone = true;
@@ -3792,10 +3844,28 @@ public class IndexWriter implements Clos
// Lock order: IW -> BD
final BufferedDeletesStream.ApplyDeletesResult result = bufferedDeletesStream.applyDeletes(readerPool, merge.segments);
+
if (result.anyDeletes) {
checkpoint();
}
+ if (!keepFullyDeletedSegments && result.allDeleted != null) {
+ if (infoStream != null) {
+ message("drop 100% deleted segments: " + result.allDeleted);
+ }
+ for(SegmentInfo info : result.allDeleted) {
+ segmentInfos.remove(info);
+ if (merge.segments.contains(info)) {
+ mergingSegments.remove(info);
+ merge.segments.remove(info);
+ }
+ }
+ if (readerPool != null) {
+ readerPool.drop(result.allDeleted);
+ }
+ checkpoint();
+ }
+
merge.info.setBufferedDeletesGen(result.gen);
// Lock order: IW -> BD
@@ -3849,8 +3919,9 @@ public class IndexWriter implements Clos
if (merge.registerDone) {
final SegmentInfos sourceSegments = merge.segments;
final int end = sourceSegments.size();
- for(int i=0;i<end;i++)
+ for(int i=0;i<end;i++) {
mergingSegments.remove(sourceSegments.info(i));
+ }
mergingSegments.remove(merge.info);
merge.registerDone = false;
}
@@ -3858,47 +3929,30 @@ public class IndexWriter implements Clos
runningMerges.remove(merge);
}
- private synchronized void setMergeDocStoreIsCompoundFile(MergePolicy.OneMerge merge) {
- final String mergeDocStoreSegment = merge.info.getDocStoreSegment();
- if (mergeDocStoreSegment != null && !merge.info.getDocStoreIsCompoundFile()) {
- final int size = segmentInfos.size();
- for(int i=0;i<size;i++) {
- final SegmentInfo info = segmentInfos.info(i);
- final String docStoreSegment = info.getDocStoreSegment();
- if (docStoreSegment != null &&
- docStoreSegment.equals(mergeDocStoreSegment) &&
- info.getDocStoreIsCompoundFile()) {
- merge.info.setDocStoreIsCompoundFile(true);
- break;
- }
- }
- }
- }
-
private final synchronized void closeMergeReaders(MergePolicy.OneMerge merge, boolean suppressExceptions) throws IOException {
- final int numSegments = merge.segments.size();
+ final int numSegments = merge.readers.size();
if (suppressExceptions) {
// Suppress any new exceptions so we throw the
// original cause
boolean anyChanges = false;
for (int i=0;i<numSegments;i++) {
- if (merge.readers[i] != null) {
+ if (merge.readers.get(i) != null) {
try {
- anyChanges |= readerPool.release(merge.readers[i], false);
+ anyChanges |= readerPool.release(merge.readers.get(i), false);
} catch (Throwable t) {
}
- merge.readers[i] = null;
+ merge.readers.set(i, null);
}
- if (merge.readersClone[i] != null) {
+ if (i < merge.readerClones.size() && merge.readerClones.get(i) != null) {
try {
- merge.readersClone[i].close();
+ merge.readerClones.get(i).close();
} catch (Throwable t) {
}
// This was a private clone and we had the
// only reference
- assert merge.readersClone[i].getRefCount() == 0: "refCount should be 0 but is " + merge.readersClone[i].getRefCount();
- merge.readersClone[i] = null;
+ assert merge.readerClones.get(i).getRefCount() == 0: "refCount should be 0 but is " + merge.readerClones.get(i).getRefCount();
+ merge.readerClones.set(i, null);
}
}
if (anyChanges) {
@@ -3906,16 +3960,16 @@ public class IndexWriter implements Clos
}
} else {
for (int i=0;i<numSegments;i++) {
- if (merge.readers[i] != null) {
- readerPool.release(merge.readers[i], true);
- merge.readers[i] = null;
+ if (merge.readers.get(i) != null) {
+ readerPool.release(merge.readers.get(i), true);
+ merge.readers.set(i, null);
}
- if (merge.readersClone[i] != null) {
- merge.readersClone[i].close();
+ if (i < merge.readerClones.size() && merge.readerClones.get(i) != null) {
+ merge.readerClones.get(i).close();
// This was a private clone and we had the only reference
- assert merge.readersClone[i].getRefCount() == 0;
- merge.readersClone[i] = null;
+ assert merge.readerClones.get(i).getRefCount() == 0;
+ merge.readerClones.set(i, null);
}
}
}
@@ -3934,7 +3988,6 @@ public class IndexWriter implements Clos
int mergedDocCount = 0;
SegmentInfos sourceSegments = merge.segments;
- final int numSegments = sourceSegments.size();
SegmentMerger merger = new SegmentMerger(directory, config.getTermIndexInterval(), mergedName, merge,
payloadProcessorProvider,
@@ -3944,36 +3997,43 @@ public class IndexWriter implements Clos
message("merging " + merge.segString(directory) + " mergeVectors=" + merge.info.getHasVectors());
}
+ merge.readers = new ArrayList<SegmentReader>();
+ merge.readerClones = new ArrayList<SegmentReader>();
+
merge.info.setHasVectors(merger.fieldInfos().hasVectors());
- merge.readers = new SegmentReader[numSegments];
- merge.readersClone = new SegmentReader[numSegments];
// This is try/finally to make sure merger's readers are
// closed:
boolean success = false;
try {
int totDocCount = 0;
+ int segUpto = 0;
+ while(segUpto < sourceSegments.size()) {
- for (int i = 0; i < numSegments; i++) {
- final SegmentInfo info = sourceSegments.info(i);
+ final SegmentInfo info = sourceSegments.info(segUpto);
// Hold onto the "live" reader; we will use this to
// commit merged deletes
- SegmentReader reader = merge.readers[i] = readerPool.get(info, true,
- MERGE_READ_BUFFER_SIZE,
- -1);
+ final SegmentReader reader = readerPool.get(info, true,
+ MERGE_READ_BUFFER_SIZE,
+ -1);
+ merge.readers.add(reader);
// We clone the segment readers because other
// deletes may come in while we're merging so we
// need readers that will not change
- SegmentReader clone = merge.readersClone[i] = (SegmentReader) reader.clone(true);
- merger.add(clone);
+ final SegmentReader clone = (SegmentReader) reader.clone(true);
+ merge.readerClones.add(clone);
+ if (reader.numDocs() > 0) {
+ merger.add(clone);
+ }
totDocCount += clone.numDocs();
+ segUpto++;
}
if (infoStream != null) {
- message("merge: total "+totDocCount+" docs");
+ message("merge: total " + totDocCount + " docs");
}
merge.checkAborted(directory);
@@ -3984,11 +4044,13 @@ public class IndexWriter implements Clos
assert mergedDocCount == totDocCount;
if (infoStream != null) {
- message("merge store matchedCount=" + merger.getMatchedSubReaderCount() + " vs " + numSegments);
+ message("merge store matchedCount=" + merger.getMatchedSubReaderCount() + " vs " + merge.readers.size());
}
- anyNonBulkMerges |= merger.getMatchedSubReaderCount() != numSegments;
+ anyNonBulkMerges |= merger.getMatchedSubReaderCount() != merge.readers.size();
+ assert mergedDocCount == totDocCount: "mergedDocCount=" + mergedDocCount + " vs " + totDocCount;
+
// Very important to do this before opening the reader
// because SegmentReader must know if prox was written for
// this segment:
@@ -4172,6 +4234,10 @@ public class IndexWriter implements Clos
keepFullyDeletedSegments = true;
}
+ boolean getKeepFullyDeletedSegments() {
+ return keepFullyDeletedSegments;
+ }
+
// called only from assert
private boolean filesExist(SegmentInfos toSync) throws IOException {
Collection<String> files = toSync.files(directory, false);
@@ -4227,12 +4293,8 @@ public class IndexWriter implements Clos
if (infoStream != null)
message("startCommit index=" + segString(segmentInfos) + " changeCount=" + changeCount);
- readerPool.commit();
-
+ readerPool.commit(segmentInfos);
toSync = (SegmentInfos) segmentInfos.clone();
- if (!keepFullyDeletedSegments) {
- toSync.pruneDeletedSegments();
- }
assert filesExist(toSync);
Modified: lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/MergePolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/MergePolicy.java?rev=1099926&r1=1099925&r2=1099926&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/MergePolicy.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/MergePolicy.java Thu May 5 19:15:40 2011
@@ -72,8 +72,8 @@ public abstract class MergePolicy implem
long mergeGen; // used by IndexWriter
boolean isExternal; // used by IndexWriter
int maxNumSegmentsOptimize; // used by IndexWriter
- SegmentReader[] readers; // used by IndexWriter
- SegmentReader[] readersClone; // used by IndexWriter
+ List<SegmentReader> readers; // used by IndexWriter
+ List<SegmentReader> readerClones; // used by IndexWriter
public final SegmentInfos segments;
boolean aborted;
Throwable error;
Modified: lucene/dev/branches/branch_3x/lucene/src/test-framework/org/apache/lucene/index/RandomIndexWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/test-framework/org/apache/lucene/index/RandomIndexWriter.java?rev=1099926&r1=1099925&r2=1099926&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/test-framework/org/apache/lucene/index/RandomIndexWriter.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/test-framework/org/apache/lucene/index/RandomIndexWriter.java Thu May 5 19:15:40 2011
@@ -110,6 +110,17 @@ public class RandomIndexWriter implement
}
}
+ public void updateDocument(Term t, Document doc) throws IOException {
+ w.updateDocument(t, doc);
+ if (docCount++ == flushAt) {
+ if (LuceneTestCase.VERBOSE) {
+ System.out.println("RIW.updateDocument: now doing a commit");
+ }
+ w.commit();
+ flushAt += _TestUtil.nextInt(r, 10, 1000);
+ }
+ }
+
public void addIndexes(Directory... dirs) throws CorruptIndexException, IOException {
w.addIndexes(dirs);
}
@@ -148,6 +159,10 @@ public class RandomIndexWriter implement
}
public IndexReader getReader() throws IOException {
+ return getReader(true);
+ }
+
+ public IndexReader getReader(boolean applyDeletions) throws IOException {
getReaderCalled = true;
if (r.nextInt(4) == 2) {
doRandomOptimize();
@@ -156,7 +171,7 @@ public class RandomIndexWriter implement
if (LuceneTestCase.VERBOSE) {
System.out.println("RIW.getReader: use NRT reader");
}
- return w.getReader();
+ return w.getReader(applyDeletions);
} else {
if (LuceneTestCase.VERBOSE) {
System.out.println("RIW.getReader: open new reader");
Modified: lucene/dev/branches/branch_3x/lucene/src/test-framework/org/apache/lucene/search/QueryUtils.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/test-framework/org/apache/lucene/search/QueryUtils.java?rev=1099926&r1=1099925&r2=1099926&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/test-framework/org/apache/lucene/search/QueryUtils.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/test-framework/org/apache/lucene/search/QueryUtils.java Thu May 5 19:15:40 2011
@@ -6,12 +6,10 @@ import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.Random;
-import java.lang.reflect.Method;
import junit.framework.Assert;
import org.apache.lucene.analysis.MockAnalyzer;
-import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
@@ -20,6 +18,7 @@ import org.apache.lucene.index.MultiRead
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.store.RAMDirectory;
+import org.apache.lucene.util._TestUtil;
import static org.apache.lucene.util.LuceneTestCase.TEST_VERSION_CURRENT;
@@ -217,16 +216,7 @@ public class QueryUtils {
}
w.commit();
w.deleteDocuments( new MatchAllDocsQuery() );
- try {
- // Carefully invoke what is a package-private (test
- // only, internal) method on IndexWriter:
- Method m = IndexWriter.class.getDeclaredMethod("keepFullyDeletedSegments");
- m.setAccessible(true);
- m.invoke(w);
- } catch (Exception e) {
- // Should not happen?
- throw new RuntimeException(e);
- }
+ _TestUtil.keepFullyDeletedSegments(w);
w.commit();
if (0 < numDeletedDocs)
Modified: lucene/dev/branches/branch_3x/lucene/src/test-framework/org/apache/lucene/util/_TestUtil.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/test-framework/org/apache/lucene/util/_TestUtil.java?rev=1099926&r1=1099925&r2=1099926&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/test-framework/org/apache/lucene/util/_TestUtil.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/test-framework/org/apache/lucene/util/_TestUtil.java Thu May 5 19:15:40 2011
@@ -25,6 +25,7 @@ import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.PrintStream;
+import java.lang.reflect.Method;
import java.util.Enumeration;
import java.util.Random;
import java.util.Map;
@@ -342,4 +343,17 @@ public class _TestUtil {
});
Assert.assertEquals("Reflection does not produce same map", reflectedValues, map);
}
+
+ public static void keepFullyDeletedSegments(IndexWriter w) {
+ try {
+ // Carefully invoke what is a package-private (test
+ // only, internal) method on IndexWriter:
+ Method m = IndexWriter.class.getDeclaredMethod("keepFullyDeletedSegments");
+ m.setAccessible(true);
+ m.invoke(w);
+ } catch (Exception e) {
+ // Should not happen?
+ throw new RuntimeException(e);
+ }
+ }
}
Modified: lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexWriterDelete.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexWriterDelete.java?rev=1099926&r1=1099925&r2=1099926&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexWriterDelete.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexWriterDelete.java Thu May 5 19:15:40 2011
@@ -81,7 +81,7 @@ public class TestIndexWriterDelete exten
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockAnalyzer.WHITESPACE, false)).setMaxBufferedDocs(2)
.setMaxBufferedDeleteTerms(2));
-
+ modifier.setInfoStream(VERBOSE ? System.out : null);
int id = 0;
int value = 100;
Modified: lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java?rev=1099926&r1=1099925&r2=1099926&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java Thu May 5 19:15:40 2011
@@ -464,14 +464,14 @@ public class TestIndexWriterOnDiskFull e
setReaderPooling(true).
setMergePolicy(newLogMergePolicy(2))
);
- //_TestUtil.keepFullyDeletedSegments(w);
+
+ _TestUtil.keepFullyDeletedSegments(w);
((LogMergePolicy) w.getMergePolicy()).setMergeFactor(2);
Document doc = new Document();
doc.add(newField("f", "doctor who", Field.Store.YES, Field.Index.ANALYZED));
w.addDocument(doc);
-
w.commit();
w.deleteDocuments(new Term("f", "who"));
Modified: lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIsCurrent.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIsCurrent.java?rev=1099926&r1=1099925&r2=1099926&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIsCurrent.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestIsCurrent.java Thu May 5 19:15:40 2011
@@ -68,7 +68,7 @@ public class TestIsCurrent extends Lucen
// assert index has a document and reader is up2date
assertEquals("One document should be in the index", 1, writer.numDocs());
- assertTrue("Document added, reader should be stale ", reader.isCurrent());
+ assertTrue("One document added, reader should be current", reader.isCurrent());
// remove document
Term idTerm = new Term("UUID", "1");
Copied: lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestRollingUpdates.java (from r1071569, lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestRollingUpdates.java)
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestRollingUpdates.java?p2=lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestRollingUpdates.java&p1=lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestRollingUpdates.java&r1=1071569&r2=1099926&rev=1099926&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestRollingUpdates.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestRollingUpdates.java Thu May 5 19:15:40 2011
@@ -34,7 +34,7 @@ public class TestRollingUpdates extends
final LineFileDocs docs = new LineFileDocs(random);
- final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+ final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
final int SIZE = 200 * RANDOM_MULTIPLIER;
int id = 0;
IndexReader r = null;
Modified: lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestThreadedOptimize.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestThreadedOptimize.java?rev=1099926&r1=1099925&r2=1099926&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestThreadedOptimize.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/index/TestThreadedOptimize.java Thu May 5 19:15:40 2011
@@ -66,7 +66,7 @@ public class TestThreadedOptimize extend
}
((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(4);
- //writer.setInfoStream(System.out);
+ writer.setInfoStream(VERBOSE ? System.out : null);
Thread[] threads = new Thread[NUM_THREADS];
Modified: lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/search/TestCachingSpanFilter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/search/TestCachingSpanFilter.java?rev=1099926&r1=1099925&r2=1099926&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/search/TestCachingSpanFilter.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/search/TestCachingSpanFilter.java Thu May 5 19:15:40 2011
@@ -22,15 +22,14 @@ import java.io.IOException;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.LogMergePolicy;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.SerialMergeScheduler;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.spans.SpanTermQuery;
import org.apache.lucene.analysis.MockAnalyzer;
-import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util._TestUtil;
public class TestCachingSpanFilter extends LuceneTestCase {
@@ -76,7 +75,9 @@ public class TestCachingSpanFilter exten
docs = searcher.search(constantScore, 1);
assertEquals("[just filter] Should find a hit...", 1, docs.totalHits);
- // now delete the doc, refresh the reader, and see that it's not there
+ // now delete the doc, refresh the reader, and see that
+ // it's not there
+ _TestUtil.keepFullyDeletedSegments(writer.w);
writer.deleteDocuments(new Term("id", "1"));
reader = refreshReader(reader);
Modified: lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/search/TestCachingWrapperFilter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/search/TestCachingWrapperFilter.java?rev=1099926&r1=1099925&r2=1099926&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/search/TestCachingWrapperFilter.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/test/org/apache/lucene/search/TestCachingWrapperFilter.java Thu May 5 19:15:40 2011
@@ -22,9 +22,7 @@ import java.io.IOException;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.analysis.MockAnalyzer;
-import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.LogMergePolicy;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.SerialMergeScheduler;
import org.apache.lucene.index.Term;
@@ -32,6 +30,7 @@ import org.apache.lucene.store.Directory
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.OpenBitSet;
import org.apache.lucene.util.OpenBitSetDISI;
+import org.apache.lucene.util._TestUtil;
public class TestCachingWrapperFilter extends LuceneTestCase {
@@ -193,6 +192,7 @@ public class TestCachingWrapperFilter ex
assertEquals("[just filter] Should find a hit...", 1, docs.totalHits);
// now delete the doc, refresh the reader, and see that it's not there
+ _TestUtil.keepFullyDeletedSegments(writer.w);
writer.deleteDocuments(new Term("id", "1"));
reader = refreshReader(reader);