You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by ab...@apache.org on 2018/04/10 14:11:42 UTC

[03/50] lucene-solr:jira/solr-12181: LUCENE-8233: Add support for soft deletes to IndexWriter

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ecc17f90/lucene/core/src/test/org/apache/lucene/index/TestSoftDeletesRetentionMergePolicy.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestSoftDeletesRetentionMergePolicy.java b/lucene/core/src/test/org/apache/lucene/index/TestSoftDeletesRetentionMergePolicy.java
new file mode 100644
index 0000000..3f4f405
--- /dev/null
+++ b/lucene/core/src/test/org/apache/lucene/index/TestSoftDeletesRetentionMergePolicy.java
@@ -0,0 +1,312 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.index;
+
+import java.io.IOException;
+import java.time.Duration;
+import java.time.Instant;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Set;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.function.Supplier;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.IntPoint;
+import org.apache.lucene.document.LongPoint;
+import org.apache.lucene.document.NumericDocValuesField;
+import org.apache.lucene.document.StringField;
+import org.apache.lucene.search.DocValuesFieldExistsQuery;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.search.TopDocs;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.Bits;
+import org.apache.lucene.util.IOUtils;
+import org.apache.lucene.util.LuceneTestCase;
+
+public class TestSoftDeletesRetentionMergePolicy extends LuceneTestCase {
+
+  public void testKeepFullyDeletedSegments() throws IOException {
+    Directory dir = newDirectory();
+    IndexWriterConfig indexWriterConfig = newIndexWriterConfig();
+    IndexWriter writer = new IndexWriter(dir, indexWriterConfig);
+
+    Document doc = new Document();
+    doc.add(new StringField("id", "1", Field.Store.YES));
+    doc.add(new NumericDocValuesField("soft_delete", 1));
+    writer.addDocument(doc);
+    DirectoryReader reader = writer.getReader();
+    assertEquals(1, reader.leaves().size());
+    SegmentReader segmentReader = (SegmentReader) reader.leaves().get(0).reader();
+    MergePolicy policy = new SoftDeletesRetentionMergePolicy("soft_delete",
+        () -> new DocValuesFieldExistsQuery("keep_around"), NoMergePolicy.INSTANCE);
+    assertFalse(policy.keepFullyDeletedSegment(segmentReader));
+    reader.close();
+
+    doc = new Document();
+    doc.add(new StringField("id", "1", Field.Store.YES));
+    doc.add(new NumericDocValuesField("keep_around", 1));
+    doc.add(new NumericDocValuesField("soft_delete", 1));
+    writer.addDocument(doc);
+
+    reader = writer.getReader();
+    assertEquals(2, reader.leaves().size());
+    segmentReader = (SegmentReader) reader.leaves().get(0).reader();
+    assertFalse(policy.keepFullyDeletedSegment(segmentReader));
+
+    segmentReader = (SegmentReader) reader.leaves().get(1).reader();
+    assertTrue(policy.keepFullyDeletedSegment(segmentReader));
+
+    IOUtils.close(reader, writer, dir);
+  }
+
+  public void testFieldBasedRetention() throws IOException {
+    Directory dir = newDirectory();
+    IndexWriterConfig indexWriterConfig = newIndexWriterConfig();
+    Instant now = Instant.now();
+    Instant time24HoursAgo = now.minus(Duration.ofDays(1));
+    String softDeletesField = "soft_delete";
+    Supplier<Query> docsOfLast24Hours = () -> LongPoint.newRangeQuery("creation_date", time24HoursAgo.toEpochMilli(), now.toEpochMilli());
+    indexWriterConfig.setMergePolicy(new SoftDeletesRetentionMergePolicy(softDeletesField, docsOfLast24Hours,
+        new LogDocMergePolicy()));
+    indexWriterConfig.setSoftDeletesField(softDeletesField);
+    IndexWriter writer = new IndexWriter(dir, indexWriterConfig);
+
+    long time28HoursAgo = now.minus(Duration.ofHours(28)).toEpochMilli();
+    Document doc = new Document();
+    doc.add(new StringField("id", "1", Field.Store.YES));
+    doc.add(new StringField("version", "1", Field.Store.YES));
+    doc.add(new LongPoint("creation_date", time28HoursAgo));
+    writer.addDocument(doc);
+
+    writer.flush();
+    long time26HoursAgo = now.minus(Duration.ofHours(26)).toEpochMilli();
+    doc = new Document();
+    doc.add(new StringField("id", "1", Field.Store.YES));
+    doc.add(new StringField("version", "2", Field.Store.YES));
+    doc.add(new LongPoint("creation_date", time26HoursAgo));
+    writer.softUpdateDocument(new Term("id", "1"), doc, new NumericDocValuesField("soft_delete", 1));
+
+    if (random().nextBoolean()) {
+      writer.flush();
+    }
+    long time23HoursAgo = now.minus(Duration.ofHours(23)).toEpochMilli();
+    doc = new Document();
+    doc.add(new StringField("id", "1", Field.Store.YES));
+    doc.add(new StringField("version", "3", Field.Store.YES));
+    doc.add(new LongPoint("creation_date", time23HoursAgo));
+    writer.softUpdateDocument(new Term("id", "1"), doc, new NumericDocValuesField("soft_delete", 1));
+
+    if (random().nextBoolean()) {
+      writer.flush();
+    }
+    long time12HoursAgo = now.minus(Duration.ofHours(12)).toEpochMilli();
+    doc = new Document();
+    doc.add(new StringField("id", "1", Field.Store.YES));
+    doc.add(new StringField("version", "4", Field.Store.YES));
+    doc.add(new LongPoint("creation_date", time12HoursAgo));
+    writer.softUpdateDocument(new Term("id", "1"), doc, new NumericDocValuesField("soft_delete", 1));
+
+    if (random().nextBoolean()) {
+      writer.flush();
+    }
+    doc = new Document();
+    doc.add(new StringField("id", "1", Field.Store.YES));
+    doc.add(new StringField("version", "5", Field.Store.YES));
+    doc.add(new LongPoint("creation_date", now.toEpochMilli()));
+    writer.softUpdateDocument(new Term("id", "1"), doc, new NumericDocValuesField("soft_delete", 1));
+
+    if (random().nextBoolean()) {
+      writer.flush();
+    }
+    writer.forceMerge(1);
+    DirectoryReader reader = writer.getReader();
+    assertEquals(1, reader.numDocs());
+    assertEquals(3, reader.maxDoc());
+    Set<String> versions = new HashSet<>();
+    versions.add(reader.document(0, Collections.singleton("version")).get("version"));
+    versions.add(reader.document(1, Collections.singleton("version")).get("version"));
+    versions.add(reader.document(2, Collections.singleton("version")).get("version"));
+    assertTrue(versions.contains("5"));
+    assertTrue(versions.contains("4"));
+    assertTrue(versions.contains("3"));
+    IOUtils.close(reader, writer, dir);
+  }
+
+  public void testKeepAllDocsAcrossMerges() throws IOException {
+    Directory dir = newDirectory();
+    IndexWriterConfig indexWriterConfig = newIndexWriterConfig();
+    indexWriterConfig.setMergePolicy(new SoftDeletesRetentionMergePolicy("soft_delete",
+        () -> new MatchAllDocsQuery(),
+        indexWriterConfig.getMergePolicy()));
+    indexWriterConfig.setSoftDeletesField("soft_delete");
+    IndexWriter writer = new IndexWriter(dir, indexWriterConfig);
+
+    Document doc = new Document();
+    doc.add(new StringField("id", "1", Field.Store.YES));
+    writer.softUpdateDocument(new Term("id", "1"), doc,
+        new NumericDocValuesField("soft_delete", 1));
+
+    writer.commit();
+    doc = new Document();
+    doc.add(new StringField("id", "1", Field.Store.YES));
+    writer.softUpdateDocument(new Term("id", "1"), doc,
+        new NumericDocValuesField("soft_delete", 1));
+
+    writer.commit();
+    doc = new Document();
+    doc.add(new StringField("id", "1", Field.Store.YES));
+    doc.add(new NumericDocValuesField("soft_delete", 1)); // already deleted
+    writer.softUpdateDocument(new Term("id", "1"), doc,
+        new NumericDocValuesField("soft_delete", 1));
+    writer.commit();
+    DirectoryReader reader = writer.getReader();
+    assertEquals(0, reader.numDocs());
+    assertEquals(3, reader.maxDoc());
+    assertEquals(0, writer.numDocs());
+    assertEquals(3, writer.maxDoc());
+    assertEquals(3, reader.leaves().size());
+    reader.close();
+    writer.forceMerge(1);
+    reader = writer.getReader();
+    assertEquals(0, reader.numDocs());
+    assertEquals(3, reader.maxDoc());
+    assertEquals(0, writer.numDocs());
+    assertEquals(3, writer.maxDoc());
+    assertEquals(1, reader.leaves().size());
+    IOUtils.close(reader, writer, dir);
+  }
+
+  /**
+   * tests soft deletes that carry over deleted documents on merge for history rentention.
+   */
+  public void testSoftDeleteWithRetention() throws IOException, InterruptedException {
+    AtomicInteger seqIds = new AtomicInteger(0);
+    Directory dir = newDirectory();
+    IndexWriterConfig indexWriterConfig = newIndexWriterConfig();
+    indexWriterConfig.setMergePolicy(new SoftDeletesRetentionMergePolicy("soft_delete",
+        () -> IntPoint.newRangeQuery("seq_id", seqIds.intValue() - 50, Integer.MAX_VALUE),
+        indexWriterConfig.getMergePolicy()));
+    indexWriterConfig.setSoftDeletesField("soft_delete");
+    IndexWriter writer = new IndexWriter(dir, indexWriterConfig);
+    Thread[] threads = new Thread[2 + random().nextInt(3)];
+    CountDownLatch startLatch = new CountDownLatch(1);
+    CountDownLatch started = new CountDownLatch(threads.length);
+    boolean updateSeveralDocs = random().nextBoolean();
+    Set<String> ids = Collections.synchronizedSet(new HashSet<>());
+    for (int i = 0; i < threads.length; i++) {
+      threads[i] = new Thread(() -> {
+        try {
+          started.countDown();
+          startLatch.await();
+          for (int d = 0;  d < 100; d++) {
+            String id = String.valueOf(random().nextInt(10));
+            int seqId = seqIds.incrementAndGet();
+            if (updateSeveralDocs) {
+              Document doc = new Document();
+              doc.add(new StringField("id", id, Field.Store.YES));
+              doc.add(new IntPoint("seq_id", seqId));
+              writer.softUpdateDocuments(new Term("id", id), Arrays.asList(doc, doc),
+                  new NumericDocValuesField("soft_delete", 1));
+            } else {
+              Document doc = new Document();
+              doc.add(new StringField("id", id, Field.Store.YES));
+              doc.add(new IntPoint("seq_id", seqId));
+              writer.softUpdateDocument(new Term("id", id), doc,
+                  new NumericDocValuesField("soft_delete", 1));
+            }
+            ids.add(id);
+          }
+        } catch (IOException | InterruptedException e) {
+          throw new AssertionError(e);
+        }
+      });
+      threads[i].start();
+    }
+    started.await();
+    startLatch.countDown();
+
+    for (int i = 0; i < threads.length; i++) {
+      threads[i].join();
+    }
+    DirectoryReader reader = DirectoryReader.open(writer);
+    IndexSearcher searcher = new IndexSearcher(reader);
+    for (String id : ids) {
+      TopDocs topDocs = searcher.search(new TermQuery(new Term("id", id)), 10);
+      if (updateSeveralDocs) {
+        assertEquals(2, topDocs.totalHits);
+        assertEquals(Math.abs(topDocs.scoreDocs[0].doc - topDocs.scoreDocs[1].doc), 1);
+      } else {
+        assertEquals(1, topDocs.totalHits);
+      }
+    }
+    writer.addDocument(new Document()); // add a dummy doc to trigger a segment here
+    writer.flush();
+    writer.forceMerge(1);
+    DirectoryReader oldReader = reader;
+    reader = DirectoryReader.openIfChanged(reader, writer);
+    if (reader != null) {
+      oldReader.close();
+      assertNotSame(oldReader, reader);
+    } else {
+      reader = oldReader;
+    }
+    assertEquals(1, reader.leaves().size());
+    LeafReaderContext leafReaderContext = reader.leaves().get(0);
+    LeafReader leafReader = leafReaderContext.reader();
+    searcher = new IndexSearcher(new FilterLeafReader(leafReader) {
+      @Override
+      public CacheHelper getCoreCacheHelper() {
+        return leafReader.getCoreCacheHelper();
+      }
+
+      @Override
+      public CacheHelper getReaderCacheHelper() {
+        return leafReader.getReaderCacheHelper();
+      }
+
+      @Override
+      public Bits getLiveDocs() {
+        return null;
+      }
+
+      @Override
+      public int numDocs() {
+        return maxDoc();
+      }
+    });
+    TopDocs seq_id = searcher.search(IntPoint.newRangeQuery("seq_id", seqIds.intValue() - 50, Integer.MAX_VALUE), 10);
+    assertTrue(seq_id.totalHits + " hits", seq_id.totalHits >= 50);
+    searcher = new IndexSearcher(reader);
+    for (String id : ids) {
+      if (updateSeveralDocs) {
+        assertEquals(2, searcher.search(new TermQuery(new Term("id", id)), 10).totalHits);
+      } else {
+        assertEquals(1, searcher.search(new TermQuery(new Term("id", id)), 10).totalHits);
+      }
+    }
+    IOUtils.close(reader, writer, dir);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ecc17f90/lucene/core/src/test/org/apache/lucene/index/TestStressNRT.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestStressNRT.java b/lucene/core/src/test/org/apache/lucene/index/TestStressNRT.java
index b08a85d..e6c91b8 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestStressNRT.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestStressNRT.java
@@ -73,6 +73,7 @@ public class TestStressNRT extends LuceneTestCase {
     final int ndocs = atLeast(50);
     final int nWriteThreads = TestUtil.nextInt(random(), 1, TEST_NIGHTLY ? 10 : 5);
     final int maxConcurrentCommits = TestUtil.nextInt(random(), 1, TEST_NIGHTLY ? 10 : 5);   // number of committers at a time... needed if we want to avoid commit errors due to exceeding the max
+    final boolean useSoftDeletes = random().nextInt(10) < 3;
     
     final boolean tombstones = random().nextBoolean();
 
@@ -106,10 +107,10 @@ public class TestStressNRT extends LuceneTestCase {
 
     Directory dir = newMaybeVirusCheckingDirectory();
 
-    final RandomIndexWriter writer = new RandomIndexWriter(random(), dir, newIndexWriterConfig(new MockAnalyzer(random())));
+    final RandomIndexWriter writer = new RandomIndexWriter(random(), dir, newIndexWriterConfig(new MockAnalyzer(random())), useSoftDeletes);
     writer.setDoRandomForceMergeAssert(false);
     writer.commit();
-    reader = DirectoryReader.open(dir);
+    reader = useSoftDeletes ? writer.getReader() : DirectoryReader.open(dir);
 
     for (int i=0; i<nWriteThreads; i++) {
       Thread thread = new Thread("WRITER"+i) {
@@ -135,7 +136,7 @@ public class TestStressNRT extends LuceneTestCase {
                   }
 
                   DirectoryReader newReader;
-                  if (rand.nextInt(100) < softCommitPercent) {
+                  if (rand.nextInt(100) < softCommitPercent || useSoftDeletes) {
                     // assertU(h.commit("softCommit","true"));
                     if (random().nextBoolean()) {
                       if (VERBOSE) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ecc17f90/lucene/sandbox/src/test/org/apache/lucene/codecs/idversion/TestIDVersionPostingsFormat.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/test/org/apache/lucene/codecs/idversion/TestIDVersionPostingsFormat.java b/lucene/sandbox/src/test/org/apache/lucene/codecs/idversion/TestIDVersionPostingsFormat.java
index 0574e70..0318109 100644
--- a/lucene/sandbox/src/test/org/apache/lucene/codecs/idversion/TestIDVersionPostingsFormat.java
+++ b/lucene/sandbox/src/test/org/apache/lucene/codecs/idversion/TestIDVersionPostingsFormat.java
@@ -67,7 +67,7 @@ public class TestIDVersionPostingsFormat extends LuceneTestCase {
     Directory dir = newDirectory();
     IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
     iwc.setCodec(TestUtil.alwaysPostingsFormat(new IDVersionPostingsFormat()));
-    RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
+    RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc, false);
     Document doc = new Document();
     doc.add(makeIDField("id0", 100));
     w.addDocument(doc);
@@ -192,7 +192,7 @@ public class TestIDVersionPostingsFormat extends LuceneTestCase {
     int minItemsInBlock = TestUtil.nextInt(random(), 2, 50);
     int maxItemsInBlock = 2*(minItemsInBlock-1) + random().nextInt(50);
     iwc.setCodec(TestUtil.alwaysPostingsFormat(new IDVersionPostingsFormat(minItemsInBlock, maxItemsInBlock)));
-    RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
+    RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc, false);
     //IndexWriter w = new IndexWriter(dir, iwc);
     int numDocs = atLeast(1000);
     Map<String,Long> idValues = new HashMap<String,Long>();
@@ -359,7 +359,7 @@ public class TestIDVersionPostingsFormat extends LuceneTestCase {
     Directory dir = newDirectory();
     IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
     iwc.setCodec(TestUtil.alwaysPostingsFormat(new IDVersionPostingsFormat()));
-    RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
+    RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc, false);
     Document doc = new Document();
     doc.add(makeIDField("id", 17));
     w.addDocument(doc);
@@ -415,7 +415,7 @@ public class TestIDVersionPostingsFormat extends LuceneTestCase {
     Directory dir = newDirectory();
     IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
     iwc.setCodec(TestUtil.alwaysPostingsFormat(new IDVersionPostingsFormat()));
-    RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
+    RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc, false);
     Document doc = new Document();
     doc.add(makeIDField("id", 17));
     w.addDocument(doc);
@@ -432,7 +432,7 @@ public class TestIDVersionPostingsFormat extends LuceneTestCase {
     Directory dir = newDirectory();
     IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
     iwc.setCodec(TestUtil.alwaysPostingsFormat(new IDVersionPostingsFormat()));
-    RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
+    RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc, false);
     Document doc = new Document();
     doc.add(makeIDField("id", 17));
     w.addDocument(doc);
@@ -460,7 +460,7 @@ public class TestIDVersionPostingsFormat extends LuceneTestCase {
       };
     IndexWriterConfig iwc = newIndexWriterConfig(a);
     iwc.setCodec(TestUtil.alwaysPostingsFormat(new IDVersionPostingsFormat()));
-    RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
+    RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc, false);
     Document doc = new Document();
     doc.add(newTextField("id", "id", Field.Store.NO));
     expectThrows(IllegalArgumentException.class, () -> {
@@ -476,7 +476,7 @@ public class TestIDVersionPostingsFormat extends LuceneTestCase {
     Directory dir = newDirectory();
     IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
     iwc.setCodec(TestUtil.alwaysPostingsFormat(new IDVersionPostingsFormat()));
-    RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
+    RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc, false);
     Document doc = new Document();
     doc.add(newStringField("id", "id", Field.Store.NO));
     expectThrows(IllegalArgumentException.class, () -> {
@@ -493,7 +493,7 @@ public class TestIDVersionPostingsFormat extends LuceneTestCase {
     Directory dir = newDirectory();
     IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
     iwc.setCodec(TestUtil.alwaysPostingsFormat(new IDVersionPostingsFormat()));
-    RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
+    RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc, false);
     Document doc = new Document();
     doc.add(new StringAndPayloadField("id", "id", new BytesRef("foo")));
     expectThrows(IllegalArgumentException.class, () -> {
@@ -509,7 +509,7 @@ public class TestIDVersionPostingsFormat extends LuceneTestCase {
     Directory dir = newDirectory();
     IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
     iwc.setCodec(TestUtil.alwaysPostingsFormat(new IDVersionPostingsFormat()));
-    RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
+    RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc, false);
     Document doc = new Document();
     doc.add(makeIDField("id", 17));
     w.addDocument(doc);
@@ -529,7 +529,7 @@ public class TestIDVersionPostingsFormat extends LuceneTestCase {
     Directory dir = newDirectory();
     IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
     iwc.setCodec(TestUtil.alwaysPostingsFormat(new IDVersionPostingsFormat()));
-    RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
+    RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc, false);
     Document doc = new Document();
 
     FieldType ft = new FieldType(StringAndPayloadField.TYPE);
@@ -555,7 +555,7 @@ public class TestIDVersionPostingsFormat extends LuceneTestCase {
     Directory dir = newDirectory();
     IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
     iwc.setCodec(TestUtil.alwaysPostingsFormat(new IDVersionPostingsFormat()));
-    RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
+    RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc, false);
     Document doc = new Document();
     doc.add(makeIDField("id", 17));
     doc.add(makeIDField("id", 17));
@@ -572,7 +572,7 @@ public class TestIDVersionPostingsFormat extends LuceneTestCase {
     Directory dir = newDirectory();
     IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
     iwc.setCodec(TestUtil.alwaysPostingsFormat(new IDVersionPostingsFormat()));
-    RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
+    RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc, false);
     Document doc = new Document();
     // -1
     doc.add(new StringAndPayloadField("id", "id", new BytesRef(new byte[] {(byte)0xff, (byte)0xff, (byte)0xff, (byte)0xff, (byte)0xff, (byte)0xff, (byte)0xff, (byte)0xff})));
@@ -590,7 +590,7 @@ public class TestIDVersionPostingsFormat extends LuceneTestCase {
     Directory dir = newDirectory();
     IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
     iwc.setCodec(TestUtil.alwaysPostingsFormat(new IDVersionPostingsFormat()));
-    RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
+    RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc, false);
     Document doc = new Document();
     // Long.MAX_VALUE:
     doc.add(new StringAndPayloadField("id", "id", new BytesRef(new byte[] {(byte)0x7f, (byte)0xff, (byte)0xff, (byte)0xff, (byte)0xff, (byte)0xff, (byte)0xff, (byte)0xff})));
@@ -610,7 +610,7 @@ public class TestIDVersionPostingsFormat extends LuceneTestCase {
     Directory dir = newDirectory();
     IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
     iwc.setCodec(TestUtil.alwaysPostingsFormat(new IDVersionPostingsFormat()));
-    final RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
+    final RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc, false);
 
     IDSource idsSource = getRandomIDs();
     int numIDs = atLeast(100);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ecc17f90/lucene/test-framework/src/java/org/apache/lucene/codecs/asserting/AssertingLiveDocsFormat.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/codecs/asserting/AssertingLiveDocsFormat.java b/lucene/test-framework/src/java/org/apache/lucene/codecs/asserting/AssertingLiveDocsFormat.java
index f4abb54..e02164b 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/codecs/asserting/AssertingLiveDocsFormat.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/codecs/asserting/AssertingLiveDocsFormat.java
@@ -82,7 +82,7 @@ public class AssertingLiveDocsFormat extends LiveDocsFormat {
         deletedCount++;
       }
     }
-    assert deletedCount == expectedDeleteCount;
+    assert deletedCount == expectedDeleteCount : "deleted: " + deletedCount + " != expected: " + expectedDeleteCount;
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ecc17f90/lucene/test-framework/src/java/org/apache/lucene/index/RandomIndexWriter.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/RandomIndexWriter.java b/lucene/test-framework/src/java/org/apache/lucene/index/RandomIndexWriter.java
index aa4da54..b82df68 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/RandomIndexWriter.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/RandomIndexWriter.java
@@ -18,12 +18,14 @@ package org.apache.lucene.index;
 
 import java.io.Closeable;
 import java.io.IOException;
+import java.util.Arrays;
 import java.util.Iterator;
 import java.util.Random;
 
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.document.Field;
+import org.apache.lucene.document.NumericDocValuesField;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.BytesRef;
@@ -41,13 +43,14 @@ import org.apache.lucene.util.TestUtil;
 
 public class RandomIndexWriter implements Closeable {
 
-  public IndexWriter w;
+  public final IndexWriter w;
   private final Random r;
   int docCount;
   int flushAt;
   private double flushAtFactor = 1.0;
   private boolean getReaderCalled;
   private final Analyzer analyzer; // only if WE created it (then we close it)
+  private final double softDeletesRatio;
 
   /** Returns an indexwriter that randomly mixes up thread scheduling (by yielding at test points) */
   public static IndexWriter mockIndexWriter(Directory dir, IndexWriterConfig conf, Random r) throws IOException {
@@ -94,7 +97,7 @@ public class RandomIndexWriter implements Closeable {
 
   /** create a RandomIndexWriter with a random config: Uses MockAnalyzer */
   public RandomIndexWriter(Random r, Directory dir) throws IOException {
-    this(r, dir, LuceneTestCase.newIndexWriterConfig(r, new MockAnalyzer(r)), true);
+    this(r, dir, LuceneTestCase.newIndexWriterConfig(r, new MockAnalyzer(r)), true, r.nextBoolean());
   }
   
   /** create a RandomIndexWriter with a random config */
@@ -104,12 +107,23 @@ public class RandomIndexWriter implements Closeable {
   
   /** create a RandomIndexWriter with the provided config */
   public RandomIndexWriter(Random r, Directory dir, IndexWriterConfig c) throws IOException {
-    this(r, dir, c, false);
+    this(r, dir, c, false, r.nextBoolean());
+  }
+
+  /** create a RandomIndexWriter with the provided config */
+  public RandomIndexWriter(Random r, Directory dir, IndexWriterConfig c, boolean useSoftDeletes) throws IOException {
+    this(r, dir, c, false, useSoftDeletes);
   }
       
-  private RandomIndexWriter(Random r, Directory dir, IndexWriterConfig c, boolean closeAnalyzer) throws IOException {
+  private RandomIndexWriter(Random r, Directory dir, IndexWriterConfig c, boolean closeAnalyzer, boolean useSoftDeletes) throws IOException {
     // TODO: this should be solved in a different way; Random should not be shared (!).
     this.r = new Random(r.nextLong());
+    if (useSoftDeletes) {
+      c.setSoftDeletesField("___soft_deletes");
+      softDeletesRatio = 1.d / (double)1 + r.nextInt(10);
+    } else {
+      softDeletesRatio = 0d;
+    }
     w = mockIndexWriter(dir, c, r);
     flushAt = TestUtil.nextInt(r, 10, 1000);
     if (closeAnalyzer) {
@@ -218,49 +232,39 @@ public class RandomIndexWriter implements Closeable {
 
   public long updateDocuments(Term delTerm, Iterable<? extends Iterable<? extends IndexableField>> docs) throws IOException {
     LuceneTestCase.maybeChangeLiveIndexWriterConfig(r, w.getConfig());
-    long seqNo = w.updateDocuments(delTerm, docs);
+    long seqNo;
+    if (useSoftDeletes()) {
+      seqNo = w.softUpdateDocuments(delTerm, docs, new NumericDocValuesField(w.getConfig().getSoftDeletesField(), 1));
+    } else {
+      seqNo = w.updateDocuments(delTerm, docs);
+    }
     maybeFlushOrCommit();
     return seqNo;
   }
 
+  private boolean useSoftDeletes() {
+    return r.nextDouble() < softDeletesRatio;
+  }
+
   /**
    * Updates a document.
    * @see IndexWriter#updateDocument(Term, Iterable)
    */
   public <T extends IndexableField> long updateDocument(Term t, final Iterable<T> doc) throws IOException {
     LuceneTestCase.maybeChangeLiveIndexWriterConfig(r, w.getConfig());
-    long seqNo;
-    if (r.nextInt(5) == 3) {
-      seqNo = w.updateDocuments(t, new Iterable<Iterable<T>>() {
-
-        @Override
-        public Iterator<Iterable<T>> iterator() {
-          return new Iterator<Iterable<T>>() {
-            boolean done;
-            
-            @Override
-            public boolean hasNext() {
-              return !done;
-            }
-
-            @Override
-            public void remove() {
-              throw new UnsupportedOperationException();
-            }
-
-            @Override
-            public Iterable<T> next() {
-              if (done) {
-                throw new IllegalStateException();
-              }
-              done = true;
-              return doc;
-            }
-          };
-        }
-        });
+    final long seqNo;
+    if (useSoftDeletes()) {
+      if (r.nextInt(5) == 3) {
+        seqNo = w.softUpdateDocuments(t, Arrays.asList(doc), new NumericDocValuesField(w.getConfig().getSoftDeletesField(), 1));
+      } else {
+        seqNo = w.softUpdateDocument(t, doc, new NumericDocValuesField(w.getConfig().getSoftDeletesField(), 1));
+      }
     } else {
-      seqNo = w.updateDocument(t, doc);
+      if (r.nextInt(5) == 3) {
+        seqNo = w.updateDocuments(t, Arrays.asList(doc));
+      } else {
+        seqNo = w.updateDocument(t, doc);
+      }
     }
     maybeFlushOrCommit();
 
@@ -377,7 +381,8 @@ public class RandomIndexWriter implements Closeable {
     if (r.nextInt(20) == 2) {
       doRandomForceMerge();
     }
-    if (!applyDeletions || r.nextBoolean()) {
+    if (!applyDeletions || r.nextBoolean() || w.getConfig().getSoftDeletesField() != null) {
+      // if we have soft deletes we can't open from a directory
       if (LuceneTestCase.VERBOSE) {
         System.out.println("RIW.getReader: use NRT reader");
       }