You are viewing a plain text version of this content. The canonical link for it is here.
Posted to java-commits@lucene.apache.org by mi...@apache.org on 2009/06/19 17:37:05 UTC
svn commit: r786551 - in /lucene/java/trunk/src:
java/org/apache/lucene/index/IndexWriter.java
java/org/apache/lucene/index/LogMergePolicy.java
test/org/apache/lucene/index/TestIndexWriterReader.java
Author: mikemccand
Date: Fri Jun 19 15:37:05 2009
New Revision: 786551
URL: http://svn.apache.org/viewvc?rev=786551&view=rev
Log:
LUCENE-1700: make sure expungeDeletes does its job even when a near real-time reader it in use
Modified:
lucene/java/trunk/src/java/org/apache/lucene/index/IndexWriter.java
lucene/java/trunk/src/java/org/apache/lucene/index/LogMergePolicy.java
lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexWriterReader.java
Modified: lucene/java/trunk/src/java/org/apache/lucene/index/IndexWriter.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/java/org/apache/lucene/index/IndexWriter.java?rev=786551&r1=786550&r2=786551&view=diff
==============================================================================
--- lucene/java/trunk/src/java/org/apache/lucene/index/IndexWriter.java (original)
+++ lucene/java/trunk/src/java/org/apache/lucene/index/IndexWriter.java Fri Jun 19 15:37:05 2009
@@ -625,6 +625,15 @@
sr.incRef();
return sr;
}
+
+ // Returns a ref
+ public synchronized SegmentReader getIfExists(SegmentInfo info) throws IOException {
+ SegmentReader sr = (SegmentReader) readerMap.get(info);
+ if (sr != null) {
+ sr.incRef();
+ }
+ return sr;
+ }
}
synchronized void acquireWrite() {
@@ -4142,18 +4151,10 @@
docWriter.pushDeletes();
- if (flushDocs)
+ if (flushDocs) {
segmentInfos.add(newSegment);
-
- if (flushDeletes) {
- flushDeletesCount++;
- applyDeletes();
- }
-
- doAfterFlush();
-
- if (flushDocs)
checkpoint();
+ }
if (flushDocs && mergePolicy.useCompoundFile(segmentInfos, newSegment)) {
// Now build compound file
@@ -4173,6 +4174,16 @@
checkpoint();
}
+ if (flushDeletes) {
+ flushDeletesCount++;
+ applyDeletes();
+ }
+
+ if (flushDocs)
+ checkpoint();
+
+ doAfterFlush();
+
return flushDocs;
} catch (OutOfMemoryError oom) {
Modified: lucene/java/trunk/src/java/org/apache/lucene/index/LogMergePolicy.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/java/org/apache/lucene/index/LogMergePolicy.java?rev=786551&r1=786550&r2=786551&view=diff
==============================================================================
--- lucene/java/trunk/src/java/org/apache/lucene/index/LogMergePolicy.java (original)
+++ lucene/java/trunk/src/java/org/apache/lucene/index/LogMergePolicy.java Fri Jun 19 15:37:05 2009
@@ -303,7 +303,16 @@
int firstSegmentWithDeletions = -1;
for(int i=0;i<numSegments;i++) {
final SegmentInfo info = segmentInfos.info(i);
- if (info.hasDeletions()) {
+ boolean deletionsInRAM = false;
+ SegmentReader sr = writer.readerPool.getIfExists(info);
+ try {
+ deletionsInRAM = sr != null && sr.hasDeletions();
+ } finally {
+ if (sr != null) {
+ writer.readerPool.release(sr);
+ }
+ }
+ if (info.hasDeletions() || deletionsInRAM) {
if (verbose())
message(" segment " + info.name + " has deletions");
if (firstSegmentWithDeletions == -1)
Modified: lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexWriterReader.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexWriterReader.java?rev=786551&r1=786550&r2=786551&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexWriterReader.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexWriterReader.java Fri Jun 19 15:37:05 2009
@@ -805,4 +805,29 @@
r.close();
dir1.close();
}
+
+ public void testExpungeDeletes() throws Throwable {
+ Directory dir = new MockRAMDirectory();
+ final IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(),
+ IndexWriter.MaxFieldLength.LIMITED);
+ Document doc = new Document();
+ doc.add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
+ Field id = new Field("id", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
+ doc.add(id);
+ id.setValue("0");
+ w.addDocument(doc);
+ id.setValue("1");
+ w.addDocument(doc);
+ w.deleteDocuments(new Term("id", "0"));
+
+ IndexReader r = w.getReader();
+ w.expungeDeletes();
+ w.close();
+ r.close();
+ r = IndexReader.open(dir);
+ assertEquals(1, r.numDocs());
+ assertFalse(r.hasDeletions());
+ r.close();
+ dir.close();
+ }
}