You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by sh...@apache.org on 2017/06/25 02:06:29 UTC

[07/47] lucene-solr:feature/autoscaling: LUCENE-7868: use multiple threads to concurrently resolve deletes and DV udpates

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/58105a20/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java
index c0907a5..ad35f32 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java
@@ -1758,259 +1758,6 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
     dir.close();
   }
 
-  // Make sure if we hit a transient IOException (e.g., disk
-  // full), and then the exception stops (e.g., disk frees
-  // up), so we successfully close IW or open an NRT
-  // reader, we don't lose any deletes or updates:
-  public void testNoLostDeletesOrUpdates() throws Throwable {
-    int deleteCount = 0;
-    int docBase = 0;
-    int docCount = 0;
-
-    MockDirectoryWrapper dir = newMockDirectory();
-    final AtomicBoolean shouldFail = new AtomicBoolean();
-    dir.failOn(new MockDirectoryWrapper.Failure() {
-      
-      @Override
-      public void eval(MockDirectoryWrapper dir) throws IOException {
-        if (shouldFail.get() == false) {
-          // Only sometimes throw the exc, so we get
-          // it sometimes on creating the file, on
-          // flushing buffer, on closing the file:
-          return;
-        }
-        
-        if (random().nextInt(3) != 2) {
-          return;
-        }
-
-        StackTraceElement[] trace = Thread.currentThread().getStackTrace();
-
-        boolean sawSeal = false;
-        boolean sawWrite = false;
-        for (int i = 0; i < trace.length; i++) {
-          if ("sealFlushedSegment".equals(trace[i].getMethodName())) {
-            sawSeal = true;
-            break;
-          }
-          if ("writeLiveDocs".equals(trace[i].getMethodName()) || "writeFieldUpdates".equals(trace[i].getMethodName())) {
-            sawWrite = true;
-          }
-        }
-        
-        // Don't throw exc if we are "flushing", else
-        // the segment is aborted and docs are lost:
-        if (sawWrite && sawSeal == false) {
-          if (VERBOSE) {
-            System.out.println("TEST: now fail; thread=" + Thread.currentThread().getName() + " exc:");
-            new Throwable().printStackTrace(System.out);
-          }
-          shouldFail.set(false);
-          throw new FakeIOException();
-        }
-      }
-    });
-    
-    RandomIndexWriter w = null;
-
-    boolean tragic = false;
-
-    for(int iter=0;iter<10*RANDOM_MULTIPLIER;iter++) {
-      int numDocs = atLeast(100);
-      if (VERBOSE) {
-        System.out.println("\nTEST: iter=" + iter + " numDocs=" + numDocs + " docBase=" + docBase + " delCount=" + deleteCount);
-      }
-      if (w == null) {
-        IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
-        w = new RandomIndexWriter(random(), dir, iwc);
-        // Since we hit exc during merging, a partial
-        // forceMerge can easily return when there are still
-        // too many segments in the index:
-        w.setDoRandomForceMergeAssert(false);
-      }
-      for(int i=0;i<numDocs;i++) {
-        Document doc = new Document();
-        doc.add(new StringField("id", ""+(docBase+i), Field.Store.NO));
-        doc.add(new NumericDocValuesField("f", 1L));
-        doc.add(new NumericDocValuesField("cf", 2L));
-        doc.add(new BinaryDocValuesField("bf", TestBinaryDocValuesUpdates.toBytes(1L)));
-        doc.add(new BinaryDocValuesField("bcf", TestBinaryDocValuesUpdates.toBytes(2L)));
-        w.addDocument(doc);
-      }
-      docCount += numDocs;
-
-      // TODO: we could make the test more evil, by letting
-      // it throw more than one exc, randomly, before "recovering"
-
-      // TODO: we could also install an infoStream and try
-      // to fail in "more evil" places inside BDS
-
-      shouldFail.set(true);
-      boolean doClose = false;
-      try {
-        for(int i=0;i<numDocs;i++) {
-          if (random().nextInt(10) == 7) {
-            boolean fieldUpdate = random().nextBoolean();
-            int docid = docBase + i;
-            if (fieldUpdate) {
-              long value = iter;
-              if (VERBOSE) {
-                System.out.println("  update id=" + docid + " to value " + value);
-              }
-              Term idTerm = new Term("id", Integer.toString(docid));
-              if (random().nextBoolean()) { // update only numeric field
-                w.updateDocValues(idTerm, new NumericDocValuesField("f", value), new NumericDocValuesField("cf", value*2));
-              } else if (random().nextBoolean()) {
-                w.updateDocValues(idTerm, new BinaryDocValuesField("bf", TestBinaryDocValuesUpdates.toBytes(value)),
-                    new BinaryDocValuesField("bcf", TestBinaryDocValuesUpdates.toBytes(value*2)));
-              } else {
-                w.updateDocValues(idTerm, 
-                    new NumericDocValuesField("f", value), 
-                    new NumericDocValuesField("cf", value*2),
-                    new BinaryDocValuesField("bf", TestBinaryDocValuesUpdates.toBytes(value)),
-                    new BinaryDocValuesField("bcf", TestBinaryDocValuesUpdates.toBytes(value*2)));
-              }
-            }
-            
-            // sometimes do both deletes and updates
-            if (!fieldUpdate || random().nextBoolean()) {
-              if (VERBOSE) {
-                System.out.println("  delete id=" + docid);
-              }
-              deleteCount++;
-              w.deleteDocuments(new Term("id", ""+docid));
-            }
-          }
-        }
-
-        // Trigger writeLiveDocs + writeFieldUpdates so we hit fake exc:
-        IndexReader r = w.getReader();
-
-        // Sometimes we will make it here (we only randomly
-        // throw the exc):
-        assertEquals(docCount-deleteCount, r.numDocs());
-        r.close();
-        
-        // Sometimes close, so the disk full happens on close:
-        if (random().nextBoolean()) {
-          if (VERBOSE) {
-            System.out.println("  now close writer");
-          }
-          doClose = true;
-          w.commit();
-          w.close();
-          w = null;
-        }
-
-      } catch (Throwable t) {
-        // FakeIOException can be thrown from mergeMiddle, in which case IW
-        // registers it before our CMS gets to suppress it. IW.forceMerge later
-        // throws it as a wrapped IOE, so don't fail in this case.
-        if (t instanceof FakeIOException || (t.getCause() instanceof FakeIOException)) {
-          // expected
-          if (VERBOSE) {
-            System.out.println("TEST: hit expected IOE");
-          }
-          if (t instanceof AlreadyClosedException) {
-            // FakeIOExc struck during merge and writer is now closed:
-            w = null;
-            tragic = true;
-          }
-        } else {
-          throw t;
-        }
-      }
-      shouldFail.set(false);
-
-      if (w != null) {
-        MergeScheduler ms = w.w.getConfig().getMergeScheduler();
-        if (ms instanceof ConcurrentMergeScheduler) {
-          ((ConcurrentMergeScheduler) ms).sync();
-        }
-
-        if (w.w.getTragicException() != null) {
-          // Tragic exc in CMS closed the writer
-          w = null;
-        }
-      }
-
-      IndexReader r;
-
-      if (doClose && w != null) {
-        if (VERBOSE) {
-          System.out.println("  now 2nd close writer");
-        }
-        w.close();
-        w = null;
-      }
-
-      if (w == null || random().nextBoolean()) {
-        // Open non-NRT reader, to make sure the "on
-        // disk" bits are good:
-        if (VERBOSE) {
-          System.out.println("TEST: verify against non-NRT reader");
-        }
-        if (w != null) {
-          w.commit();
-        }
-        r = DirectoryReader.open(dir);
-      } else {
-        if (VERBOSE) {
-          System.out.println("TEST: verify against NRT reader");
-        }
-        r = w.getReader();
-      }
-      if (tragic == false) {
-        assertEquals(docCount-deleteCount, r.numDocs());
-      }
-      BytesRef scratch = new BytesRef();
-      for (LeafReaderContext context : r.leaves()) {
-        LeafReader reader = context.reader();
-        Bits liveDocs = reader.getLiveDocs();
-        NumericDocValues f = reader.getNumericDocValues("f");
-        NumericDocValues cf = reader.getNumericDocValues("cf");
-        BinaryDocValues bf = reader.getBinaryDocValues("bf");
-        BinaryDocValues bcf = reader.getBinaryDocValues("bcf");
-        for (int i = 0; i < reader.maxDoc(); i++) {
-          if (liveDocs == null || liveDocs.get(i)) {
-            assertEquals(i, f.advance(i));
-            assertEquals(i, cf.advance(i));
-            assertEquals(i, bf.advance(i));
-            assertEquals(i, bcf.advance(i));
-            assertEquals("doc=" + (docBase + i), cf.longValue(), f.longValue() * 2);
-            assertEquals("doc=" + (docBase + i), TestBinaryDocValuesUpdates.getValue(bcf), TestBinaryDocValuesUpdates.getValue(bf) * 2);
-          }
-        }
-      }
-
-      r.close();
-
-      // Sometimes re-use RIW, other times open new one:
-      if (w != null && random().nextBoolean()) {
-        if (VERBOSE) {
-          System.out.println("TEST: close writer");
-        }
-        w.close();
-        w = null;
-      }
-
-      docBase += numDocs;
-    }
-
-    if (w != null) {
-      w.close();
-    }
-
-    // Final verify:
-    if (tragic == false) {
-      IndexReader r = DirectoryReader.open(dir);
-      assertEquals(docCount-deleteCount, r.numDocs());
-      r.close();
-    }
-
-    dir.close();
-  }
-  
   // kind of slow, but omits positions, so just CPU
   @Nightly
   public void testTooManyTokens() throws Exception {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/58105a20/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterReader.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterReader.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterReader.java
index f8abc82..db95f5f 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterReader.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterReader.java
@@ -158,6 +158,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
     writer.updateDocument(new Term("id", id10), newDoc);
     assertFalse(r1.isCurrent());
 
+    System.out.println("TEST: now get reader");
     DirectoryReader r2 = writer.getReader();
     assertTrue(r2.isCurrent());
     assertEquals(0, count(new Term("id", id10), r2));

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/58105a20/lucene/core/src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java b/lucene/core/src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java
index 871715f..d7d6262 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java
@@ -53,16 +53,16 @@ public class TestNRTReaderWithThreads extends LuceneTestCase {
     while ((System.currentTimeMillis() - startTime) < duration) {
       Thread.sleep(100);
     }
-    int delCount = 0;
-    int addCount = 0;
     for (int x=0; x < indexThreads.length; x++) {
       indexThreads[x].run = false;
       assertNull("Exception thrown: "+indexThreads[x].ex, indexThreads[x].ex);
-      addCount += indexThreads[x].addCount;
-      delCount += indexThreads[x].delCount;
     }
+    int delCount = 0;
+    int addCount = 0;
     for (int x=0; x < indexThreads.length; x++) {
       indexThreads[x].join();
+      addCount += indexThreads[x].addCount;
+      delCount += indexThreads[x].delCount;
     }
     for (int x=0; x < indexThreads.length; x++) {
       assertNull("Exception thrown: "+indexThreads[x].ex, indexThreads[x].ex);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/58105a20/lucene/core/src/test/org/apache/lucene/index/TestNumericDocValuesUpdates.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestNumericDocValuesUpdates.java b/lucene/core/src/test/org/apache/lucene/index/TestNumericDocValuesUpdates.java
index 94da587..f6a328a 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestNumericDocValuesUpdates.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestNumericDocValuesUpdates.java
@@ -17,8 +17,10 @@
 package org.apache.lucene.index;
 
 import java.io.IOException;
-import java.util.HashSet;
+import java.util.ArrayList;
 import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
 import java.util.Map;
 import java.util.Random;
 import java.util.Set;
@@ -33,6 +35,7 @@ import org.apache.lucene.codecs.asserting.AssertingDocValuesFormat;
 import org.apache.lucene.document.BinaryDocValuesField;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field.Store;
+import org.apache.lucene.document.Field;
 import org.apache.lucene.document.NumericDocValuesField;
 import org.apache.lucene.document.SortedDocValuesField;
 import org.apache.lucene.document.SortedSetDocValuesField;
@@ -50,7 +53,6 @@ import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.TestUtil;
-import org.junit.Test;
 
 import com.carrotsearch.randomizedtesting.generators.RandomPicks;
 
@@ -83,9 +85,14 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
     writer.updateDocument       (new Term("id","doc-2"), doc(2, 2000000000L ));
     writer.updateDocument       (new Term("id","doc-2"), doc(2, 2222222222L ));
     writer.updateNumericDocValue(new Term("id","doc-1"), "val", 1111111111L );
-    writer.commit();
-    
-    final DirectoryReader reader = DirectoryReader.open(dir);
+
+    final DirectoryReader reader;
+    if (random().nextBoolean()) {
+      writer.commit();
+      reader = DirectoryReader.open(dir);
+    } else {
+      reader = DirectoryReader.open(writer);
+    }
     final IndexSearcher searcher = new IndexSearcher(reader);
     TopFieldDocs td;
     
@@ -128,14 +135,11 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
     int numDocUpdates = 0;
     int numValueUpdates = 0;
 
-    //System.out.println("TEST: numOperations=" + numOperations + " ADD_CUTOFF=" + ADD_CUTOFF + " UPD_CUTOFF=" + UPD_CUTOFF);
-
     for (int i = 0; i < numOperations; i++) {
       final int op = TestUtil.nextInt(random(), 1, 100);
       final long val = random().nextLong();
       if (op <= ADD_CUTOFF) {
         final int id = expected.size();
-        //System.out.println("TEST i=" + i + ": addDocument id=" + id + " val=" + val);
         expected.put(id, val);
         writer.addDocument(doc(id, val));
       } else {
@@ -143,11 +147,9 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
         expected.put(id, val);
         if (op <= UPD_CUTOFF) {
           numDocUpdates++;
-          //System.out.println("TEST i=" + i + ": updateDocument id=" + id + " val=" + val);
           writer.updateDocument(new Term("id","doc-" + id), doc(id, val));
         } else {
           numValueUpdates++;
-          //System.out.println("TEST i=" + i + ": updateDV id=" + id + " val=" + val);
           writer.updateNumericDocValue(new Term("id","doc-" + id), "val", val);
         }
       }
@@ -171,7 +173,6 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
   }
 
   
-  @Test
   public void testUpdatesAreFlushed() throws IOException {
     Directory dir = newDirectory();
     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))
@@ -194,7 +195,6 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
     dir.close();
   }
   
-  @Test
   public void testSimple() throws Exception {
     Directory dir = newDirectory();
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
@@ -230,7 +230,6 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
     dir.close();
   }
   
-  @Test
   public void testUpdateFewSegments() throws Exception {
     Directory dir = newDirectory();
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
@@ -279,7 +278,6 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
     dir.close();
   }
   
-  @Test
   public void testReopen() throws Exception {
     Directory dir = newDirectory();
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
@@ -295,6 +293,9 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
       writer.commit();
       reader1 = DirectoryReader.open(dir);
     }
+    if (VERBOSE) {
+      System.out.println("TEST: isNRT=" + isNRT);
+    }
 
     // update doc
     writer.updateNumericDocValue(new Term("id", "doc-0"), "val", 10L); // update doc-0's value to 10
@@ -303,6 +304,9 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
     }
     
     // reopen reader and assert only it sees the update
+    if (VERBOSE) {
+      System.out.println("TEST: openIfChanged");
+    }
     final DirectoryReader reader2 = DirectoryReader.openIfChanged(reader1);
     assertNotNull(reader2);
     assertTrue(reader1 != reader2);
@@ -318,7 +322,6 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
     IOUtils.close(reader1, reader2, dir);
   }
   
-  @Test
   public void testUpdatesAndDeletes() throws Exception {
     // create an index with a segment with only deletes, a segment with both
     // deletes and updates and a segment with only updates
@@ -368,7 +371,6 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
     dir.close();
   }
   
-  @Test
   public void testUpdatesWithDeletes() throws Exception {
     // update and delete different documents in the same commit session
     Directory dir = newDirectory();
@@ -405,7 +407,6 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
     dir.close();
   }
 
-  @Test
   public void testMultipleDocValuesTypes() throws Exception {
     Directory dir = newDirectory();
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
@@ -460,7 +461,6 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
     dir.close();
   }
   
-  @Test
   public void testMultipleNumericDocValues() throws Exception {
     Directory dir = newDirectory();
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
@@ -495,7 +495,6 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
     dir.close();
   }
   
-  @Test
   public void testDocumentWithNoValue() throws Exception {
     Directory dir = newDirectory();
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
@@ -510,24 +509,35 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
       writer.addDocument(doc);
     }
     writer.commit();
+    if (VERBOSE) {
+      System.out.println("TEST: first commit");
+    }
     
     // update all docs' ndv field
     writer.updateNumericDocValue(new Term("dvUpdateKey", "dv"), "ndv", 17L);
+    if (VERBOSE) {
+      System.out.println("TEST: first close");
+    }
     writer.close();
+    if (VERBOSE) {
+      System.out.println("TEST: done close");
+    }
     
     final DirectoryReader reader = DirectoryReader.open(dir);
+    if (VERBOSE) {
+      System.out.println("TEST: got reader=reader");
+    }
     LeafReader r = reader.leaves().get(0).reader();
     NumericDocValues ndv = r.getNumericDocValues("ndv");
     for (int i = 0; i < r.maxDoc(); i++) {
       assertEquals(i, ndv.nextDoc());
-      assertEquals(17, ndv.longValue());
+      assertEquals("doc=" + i + " has wrong numeric doc value", 17, ndv.longValue());
     }
     
     reader.close();
     dir.close();
   }
   
-  @Test
   public void testUpdateNonNumericDocValuesField() throws Exception {
     // we don't support adding new fields or updating existing non-numeric-dv
     // fields through numeric updates
@@ -554,7 +564,6 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
     dir.close();
   }
   
-  @Test
   public void testDifferentDVFormatPerField() throws Exception {
     // test relies on separate instances of the "same thing"
     assert TestUtil.getDefaultDocValuesFormat() != TestUtil.getDefaultDocValuesFormat();
@@ -595,7 +604,6 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
     dir.close();
   }
   
-  @Test
   public void testUpdateSameDocMultipleTimes() throws Exception {
     Directory dir = newDirectory();
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
@@ -622,7 +630,6 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
     dir.close();
   }
   
-  @Test
   public void testSegmentMerges() throws Exception {
     Directory dir = newDirectory();
     Random random = random();
@@ -631,28 +638,54 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
     
     int docid = 0;
     int numRounds = atLeast(10);
+    if (VERBOSE) {
+      System.out.println("TEST: " + numRounds + " rounds");
+    }
     for (int rnd = 0; rnd < numRounds; rnd++) {
+      if (VERBOSE) {
+        System.out.println("\nTEST: round=" + rnd);
+      }
       Document doc = new Document();
       doc.add(new StringField("key", "doc", Store.NO));
       doc.add(new NumericDocValuesField("ndv", -1));
       int numDocs = atLeast(30);
+      if (VERBOSE) {
+        System.out.println("TEST: " + numDocs + " docs");
+      }
       for (int i = 0; i < numDocs; i++) {
         doc.removeField("id");
-        doc.add(new StringField("id", Integer.toString(docid++), Store.NO));
+        doc.add(new StringField("id", Integer.toString(docid), Store.YES));
+        if (VERBOSE) {
+          System.out.println("TEST: add doc id=" + docid);
+        }
         writer.addDocument(doc);
+        docid++;
       }
       
       long value = rnd + 1;
+      if (VERBOSE) {
+        System.out.println("TEST: update all ndv values to " + value);
+      }
       writer.updateNumericDocValue(new Term("key", "doc"), "ndv", value);
       
-      if (random.nextDouble() < 0.2) { // randomly delete some docs
-        writer.deleteDocuments(new Term("id", Integer.toString(random.nextInt(docid))));
+      if (random.nextDouble() < 0.2) { // randomly delete one doc
+        int delID = random.nextInt(docid);
+        if (VERBOSE) {
+          System.out.println("TEST: delete random doc id=" + delID);
+        }
+        writer.deleteDocuments(new Term("id", Integer.toString(delID)));
       }
       
       // randomly commit or reopen-IW (or nothing), before forceMerge
       if (random.nextDouble() < 0.4) {
+        if (VERBOSE) {
+          System.out.println("\nTEST: commit writer");
+        }
         writer.commit();
       } else if (random.nextDouble() < 0.1) {
+        if (VERBOSE) {
+          System.out.println("\nTEST: close writer");
+        }
         writer.close();
         conf = newIndexWriterConfig(new MockAnalyzer(random));
         writer = new IndexWriter(dir, conf);
@@ -665,28 +698,49 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
       // and some MPs might now merge it, thereby invalidating test's
       // assumption that the reader has no deletes).
       doc = new Document();
-      doc.add(new StringField("id", Integer.toString(docid++), Store.NO));
+      doc.add(new StringField("id", Integer.toString(docid), Store.YES));
       doc.add(new StringField("key", "doc", Store.NO));
       doc.add(new NumericDocValuesField("ndv", value));
+      if (VERBOSE) {
+        System.out.println("\nTEST: add one more doc id=" + docid);
+      }
       writer.addDocument(doc);
+      docid++;
 
+      if (VERBOSE) {
+        System.out.println("\nTEST: force merge");
+      }
       writer.forceMerge(1, true);
+      
       final DirectoryReader reader;
       if (random.nextBoolean()) {
+        if (VERBOSE) {
+          System.out.println("\nTEST: commit and open non-NRT reader");
+        }
         writer.commit();
         reader = DirectoryReader.open(dir);
       } else {
+        if (VERBOSE) {
+          System.out.println("\nTEST: open NRT reader");
+        }
         reader = DirectoryReader.open(writer);
       }
+      if (VERBOSE) {
+        System.out.println("TEST: got reader=" + reader);
+      }
       
       assertEquals(1, reader.leaves().size());
       final LeafReader r = reader.leaves().get(0).reader();
       assertNull("index should have no deletes after forceMerge", r.getLiveDocs());
       NumericDocValues ndv = r.getNumericDocValues("ndv");
       assertNotNull(ndv);
+      if (VERBOSE) {
+        System.out.println("TEST: maxDoc=" + r.maxDoc());
+      }
       for (int i = 0; i < r.maxDoc(); i++) {
+        Document rdoc = r.document(i);
         assertEquals(i, ndv.nextDoc());
-        assertEquals(value, ndv.longValue());
+        assertEquals("docid=" + i + " has wrong ndv value; doc=" + rdoc, value, ndv.longValue());
       }
       reader.close();
     }
@@ -695,7 +749,6 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
     dir.close();
   }
   
-  @Test
   public void testUpdateDocumentByMultipleTerms() throws Exception {
     // make sure the order of updates is respected, even when multiple terms affect same document
     Directory dir = newDirectory();
@@ -723,8 +776,141 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
     reader.close();
     dir.close();
   }
+
+  static class OneSortDoc implements Comparable<OneSortDoc> {
+    public long value;
+    public final long sortValue;
+    public final int id;
+    public boolean deleted;
+
+    public OneSortDoc(int id, long value, long sortValue) {
+      this.value = value;
+      this.sortValue = sortValue;
+      this.id = id;
+    }
+
+    @Override
+    public int compareTo(OneSortDoc other) {
+      int cmp = Long.compare(sortValue, other.sortValue);
+      if (cmp == 0) {
+        cmp = Integer.compare(id, other.id);
+        assert cmp != 0;
+      }
+      return cmp;
+    }
+  }
+
+  public void testSortedIndex() throws Exception {
+    Directory dir = newDirectory();
+    IndexWriterConfig iwc = newIndexWriterConfig();
+    iwc.setIndexSort(new Sort(new SortField("sort", SortField.Type.LONG)));
+    RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
+
+    int valueRange = TestUtil.nextInt(random(), 1, 1000);
+    int sortValueRange = TestUtil.nextInt(random(), 1, 1000);
+
+    int refreshChance = TestUtil.nextInt(random(), 5, 200);
+    int deleteChance = TestUtil.nextInt(random(), 2, 100);
+
+    int idUpto = 0;
+    int deletedCount = 0;
+    
+    List<OneSortDoc> docs = new ArrayList<>();
+    DirectoryReader r = w.getReader();
+
+    int numIters = atLeast(1000);
+    for(int iter=0;iter<numIters;iter++) {
+      int value = random().nextInt(valueRange);
+      if (docs.isEmpty() || random().nextInt(3) == 1) {
+        int id = docs.size();
+        // add new doc
+        Document doc = new Document();
+        doc.add(newStringField("id", Integer.toString(id), Field.Store.YES));
+        doc.add(new NumericDocValuesField("number", value));
+        int sortValue = random().nextInt(sortValueRange);
+        doc.add(new NumericDocValuesField("sort", sortValue));
+        if (VERBOSE) {
+          System.out.println("TEST: iter=" + iter + " add doc id=" + id + " sortValue=" + sortValue + " value=" + value);
+        }
+        w.addDocument(doc);
+
+        docs.add(new OneSortDoc(id, value, sortValue));
+      } else {
+        // update existing doc value
+        int idToUpdate = random().nextInt(docs.size());
+        if (VERBOSE) {
+          System.out.println("TEST: iter=" + iter + " update doc id=" + idToUpdate + " new value=" + value);
+        }
+        w.updateNumericDocValue(new Term("id", Integer.toString(idToUpdate)), "number", (long) value);
+
+        docs.get(idToUpdate).value = value;
+      }
+
+      if (random().nextInt(deleteChance) == 0) {
+        int idToDelete = random().nextInt(docs.size());
+        if (VERBOSE) {
+          System.out.println("TEST: delete doc id=" + idToDelete);
+        }
+        w.deleteDocuments(new Term("id", Integer.toString(idToDelete)));
+        if (docs.get(idToDelete).deleted == false) {
+          docs.get(idToDelete).deleted = true;
+          deletedCount++;
+        }
+      }
+
+      if (random().nextInt(refreshChance) == 0) {
+        if (VERBOSE) {
+          System.out.println("TEST: now get reader; old reader=" + r);
+        }
+        DirectoryReader r2 = w.getReader();
+        r.close();
+        r = r2;
+
+        if (VERBOSE) {
+          System.out.println("TEST: got reader=" + r);
+        }
+
+        int liveCount = 0;
+
+        for (LeafReaderContext ctx : r.leaves()) {
+          LeafReader leafReader = ctx.reader();
+          NumericDocValues values = leafReader.getNumericDocValues("number");
+          NumericDocValues sortValues = leafReader.getNumericDocValues("sort");
+          Bits liveDocs = leafReader.getLiveDocs();
+
+          long lastSortValue = Long.MIN_VALUE;
+          for (int i=0;i<leafReader.maxDoc();i++) {
+
+            Document doc = leafReader.document(i);
+            OneSortDoc sortDoc = docs.get(Integer.parseInt(doc.get("id")));
+
+            assertEquals(i, values.nextDoc());
+            assertEquals(i, sortValues.nextDoc());
+
+            if (liveDocs != null && liveDocs.get(i) == false) {
+              assertTrue(sortDoc.deleted);
+              continue;
+            }
+            assertFalse(sortDoc.deleted);
+        
+            assertEquals(sortDoc.value, values.longValue());
+
+            long sortValue = sortValues.longValue();
+            assertEquals(sortDoc.sortValue, sortValue);
+            
+            assertTrue(sortValue >= lastSortValue);
+            lastSortValue = sortValue;
+            liveCount++;
+          }
+        }
+
+        assertEquals(docs.size() - deletedCount, liveCount);
+      }
+    }
+
+    IOUtils.close(r, w, dir);
+  }
   
-  @Test
   public void testManyReopensAndFields() throws Exception {
     Directory dir = newDirectory();
     final Random random = random();
@@ -735,6 +921,9 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
     IndexWriter writer = new IndexWriter(dir, conf);
     
     final boolean isNRT = random.nextBoolean();
+    if (VERBOSE) {
+      System.out.println("TEST: isNRT=" + isNRT);
+    }
     DirectoryReader reader;
     if (isNRT) {
       reader = DirectoryReader.open(writer);
@@ -753,45 +942,58 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
     int docID = 0;
     for (int i = 0; i < numRounds; i++) {
       int numDocs = atLeast(5);
-//      System.out.println("[" + Thread.currentThread().getName() + "]: round=" + i + ", numDocs=" + numDocs);
+      if (VERBOSE) {
+        System.out.println("TEST: round=" + i + ", numDocs=" + numDocs);
+      }
       for (int j = 0; j < numDocs; j++) {
         Document doc = new Document();
-        doc.add(new StringField("id", "doc-" + docID, Store.NO));
+        doc.add(new StringField("id", "doc-" + docID, Store.YES));
         doc.add(new StringField("key", "all", Store.NO)); // update key
         // add all fields with their current value
         for (int f = 0; f < fieldValues.length; f++) {
           doc.add(new NumericDocValuesField("f" + f, fieldValues[f]));
         }
         writer.addDocument(doc);
+        if (VERBOSE) {
+          System.out.println("TEST add doc id=" + docID);
+        }
         ++docID;
       }
       
       int fieldIdx = random.nextInt(fieldValues.length);
+
       String updateField = "f" + fieldIdx;
+      if (VERBOSE) {
+        System.out.println("TEST: update field=" + updateField + " for all docs to value=" + (fieldValues[fieldIdx]+1));
+      }
       writer.updateNumericDocValue(new Term("key", "all"), updateField, ++fieldValues[fieldIdx]);
-//      System.out.println("[" + Thread.currentThread().getName() + "]: updated field '" + updateField + "' to value " + fieldValues[fieldIdx]);
       
       if (random.nextDouble() < 0.2) {
-        int deleteDoc = random.nextInt(docID); // might also delete an already deleted document, ok!
+        int deleteDoc = random.nextInt(numDocs); // might also delete an already deleted document, ok!
+        if (VERBOSE) {
+          System.out.println("TEST: delete doc id=" + deleteDoc);
+        }
         writer.deleteDocuments(new Term("id", "doc-" + deleteDoc));
-//        System.out.println("[" + Thread.currentThread().getName() + "]: deleted document: doc-" + deleteDoc);
       }
       
       // verify reader
-      if (!isNRT) {
+      if (isNRT == false) {
+        if (VERBOSE) {
+          System.out.println("TEST: now commit");
+        }
         writer.commit();
       }
       
-//      System.out.println("[" + Thread.currentThread().getName() + "]: reopen reader: " + reader);
       DirectoryReader newReader = DirectoryReader.openIfChanged(reader);
       assertNotNull(newReader);
       reader.close();
       reader = newReader;
-//      System.out.println("[" + Thread.currentThread().getName() + "]: reopened reader: " + reader);
+      if (VERBOSE) {
+        System.out.println("TEST: got reader maxDoc=" + reader.maxDoc() + " " + reader);
+      }
       assertTrue(reader.numDocs() > 0); // we delete at most one document per round
       for (LeafReaderContext context : reader.leaves()) {
         LeafReader r = context.reader();
-//        System.out.println(((SegmentReader) r).getSegmentName());
         Bits liveDocs = r.getLiveDocs();
         for (int field = 0; field < fieldValues.length; field++) {
           String f = "f" + field;
@@ -800,21 +1002,18 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
           int maxDoc = r.maxDoc();
           for (int doc = 0; doc < maxDoc; doc++) {
             if (liveDocs == null || liveDocs.get(doc)) {
-//              System.out.println("doc=" + (doc + context.docBase) + " f='" + f + "' vslue=" + ndv.get(doc));
-              assertEquals(doc, ndv.advance(doc));
-              assertEquals("invalid value for doc=" + doc + ", field=" + f + ", reader=" + r, fieldValues[field], ndv.longValue());
+              assertEquals("advanced to wrong doc in seg=" + r, doc, ndv.advance(doc));
+              assertEquals("invalid value for docID=" + doc + " id=" + r.document(doc).get("id") + ", field=" + f + ", reader=" + r + " doc=" + r.document(doc), fieldValues[field], ndv.longValue());
             }
           }
         }
       }
-//      System.out.println();
     }
 
     writer.close();
     IOUtils.close(reader, dir);
   }
   
-  @Test
   public void testUpdateSegmentWithNoDocValues() throws Exception {
     Directory dir = newDirectory();
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
@@ -866,7 +1065,6 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
     dir.close();
   }
   
-  @Test
   public void testUpdateSegmentWithNoDocValues2() throws Exception {
     Directory dir = newDirectory();
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
@@ -949,7 +1147,6 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
     dir.close();
   }
   
-  @Test
   public void testUpdateSegmentWithPostingButNoDocValues() throws Exception {
     Directory dir = newDirectory();
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
@@ -992,7 +1189,6 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
     dir.close();
   }
   
-  @Test
   public void testUpdateNumericDVFieldWithSameNameAsPostingField() throws Exception {
     // this used to fail because FieldInfos.Builder neglected to update
     // globalFieldMaps.docValuesTypes map
@@ -1017,7 +1213,6 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
     dir.close();
   }
   
-  @Test
   public void testStressMultiThreading() throws Exception {
     final Directory dir = newDirectory();
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
@@ -1069,28 +1264,23 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
               final int field = random().nextInt(numFields);
               final String f = "f" + field;
               final String cf = "cf" + field;
-//              System.out.println("[" + Thread.currentThread().getName() + "] numUpdates=" + numUpdates + " updateTerm=" + t + " field=" + field);
               long updValue = random.nextInt();
               writer.updateDocValues(t, new NumericDocValuesField(f, updValue), new NumericDocValuesField(cf, updValue*2));
               
               if (random.nextDouble() < 0.2) {
                 // delete a random document
                 int doc = random.nextInt(numDocs);
-//                System.out.println("[" + Thread.currentThread().getName() + "] deleteDoc=doc" + doc);
                 writer.deleteDocuments(new Term("id", "doc" + doc));
               }
   
               if (random.nextDouble() < 0.05) { // commit every 20 updates on average
-//                  System.out.println("[" + Thread.currentThread().getName() + "] commit");
                 writer.commit();
               }
               
               if (random.nextDouble() < 0.1) { // reopen NRT reader (apply updates), on average once every 10 updates
                 if (reader == null) {
-//                  System.out.println("[" + Thread.currentThread().getName() + "] open NRT");
                   reader = DirectoryReader.open(writer);
                 } else {
-//                  System.out.println("[" + Thread.currentThread().getName() + "] reopen NRT");
                   DirectoryReader r2 = DirectoryReader.openIfChanged(reader, writer);
                   if (r2 != null) {
                     reader.close();
@@ -1099,7 +1289,6 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
                 }
               }
             }
-//            System.out.println("[" + Thread.currentThread().getName() + "] DONE");
             success = true;
           } catch (IOException e) {
             throw new RuntimeException(e);
@@ -1144,7 +1333,6 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
     dir.close();
   }
 
-  @Test
   public void testUpdateDifferentDocsInDifferentGens() throws Exception {
     // update same document multiple times across generations
     Directory dir = newDirectory();
@@ -1184,7 +1372,6 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
     dir.close();
   }
 
-  @Test
   public void testChangeCodec() throws Exception {
     Directory dir = newDirectory();
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
@@ -1236,7 +1423,6 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
     dir.close();
   }
 
-  @Test
   public void testAddIndexes() throws Exception {
     Directory dir1 = newDirectory();
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
@@ -1296,7 +1482,6 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
     IOUtils.close(dir1, dir2);
   }
 
-  @Test
   public void testDeleteUnusedUpdatesFiles() throws Exception {
     Directory dir = newDirectory();
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
@@ -1326,7 +1511,6 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
     dir.close();
   }
 
-  @Test @Nightly
   public void testTonsOfUpdates() throws Exception {
     // LUCENE-5248: make sure that when there are many updates, we don't use too much RAM
     Directory dir = newDirectory();
@@ -1345,8 +1529,6 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
       updateTerms.add(TestUtil.randomSimpleString(random));
     }
 
-//    System.out.println("numDocs=" + numDocs + " numNumericFields=" + numNumericFields + " numTerms=" + numTerms);
-    
     // build a large index with many NDV fields and update terms
     for (int i = 0; i < numDocs; i++) {
       Document doc = new Document();
@@ -1368,7 +1550,6 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
     // many flushes during numeric updates
     writer.getConfig().setRAMBufferSizeMB(2048.0 / 1024 / 1024);
     final int numUpdates = atLeast(100);
-//    System.out.println("numUpdates=" + numUpdates);
     for (int i = 0; i < numUpdates; i++) {
       int field = random.nextInt(numNumericFields);
       Term updateTerm = new Term("upd", RandomPicks.randomFrom(random, updateTerms));
@@ -1396,7 +1577,6 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
     dir.close();
   }
   
-  @Test
   public void testUpdatesOrder() throws Exception {
     Directory dir = newDirectory();
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
@@ -1413,6 +1593,9 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
     writer.updateNumericDocValue(new Term("upd", "t2"), "f1", 3L); // update f1 to 3
     writer.updateNumericDocValue(new Term("upd", "t2"), "f2", 3L); // update f2 to 3
     writer.updateNumericDocValue(new Term("upd", "t1"), "f1", 4L); // update f1 to 4 (but not f2)
+    if (VERBOSE) {
+      System.out.println("TEST: now close");
+    }
     writer.close();
     
     DirectoryReader reader = DirectoryReader.open(dir);
@@ -1427,7 +1610,6 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
     dir.close();
   }
   
-  @Test
   public void testUpdateAllDeletedSegment() throws Exception {
     Directory dir = newDirectory();
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
@@ -1455,7 +1637,6 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
     dir.close();
   }
 
-  @Test
   public void testUpdateTwoNonexistingTerms() throws Exception {
     Directory dir = newDirectory();
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
@@ -1480,7 +1661,6 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
     dir.close();
   }
 
-  @Test
   public void testIOContext() throws Exception {
     // LUCENE-5591: make sure we pass an IOContext with an approximate
     // segmentSize in FlushInfo

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/58105a20/lucene/core/src/test/org/apache/lucene/index/TestPerSegmentDeletes.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestPerSegmentDeletes.java b/lucene/core/src/test/org/apache/lucene/index/TestPerSegmentDeletes.java
index 112a108..fc56614 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestPerSegmentDeletes.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestPerSegmentDeletes.java
@@ -68,10 +68,11 @@ public class TestPerSegmentDeletes extends LuceneTestCase {
 
     writer.deleteDocuments(new Term("id", "11"));
 
-    // flushing without applying deletes means
-    // there will still be deletes in the segment infos
     writer.flush(false, false);
-    assertTrue(writer.bufferedUpdatesStream.any());
+
+    // deletes are now resolved on flush, so there shouldn't be
+    // any deletes after flush
+    assertFalse(writer.bufferedUpdatesStream.any());
 
     // get reader flushes pending deletes
     // so there should not be anymore

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/58105a20/lucene/core/src/test/org/apache/lucene/index/TestPrefixCodedTerms.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestPrefixCodedTerms.java b/lucene/core/src/test/org/apache/lucene/index/TestPrefixCodedTerms.java
index 89d4ad1..3ec0b56 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestPrefixCodedTerms.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestPrefixCodedTerms.java
@@ -16,14 +16,11 @@
  */
 package org.apache.lucene.index;
 
-
-import java.util.Arrays;
 import java.util.Iterator;
 import java.util.Set;
 import java.util.TreeSet;
 
 import org.apache.lucene.index.PrefixCodedTerms.TermIterator;
-import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.TestUtil;
 
@@ -73,77 +70,4 @@ public class TestPrefixCodedTerms extends LuceneTestCase {
 
     assertFalse(expected.hasNext());
   }
-
-  @SuppressWarnings("unchecked")
-  public void testMergeOne() {
-    Term t1 = new Term("foo", "a");
-    PrefixCodedTerms.Builder b1 = new PrefixCodedTerms.Builder();
-    b1.add(t1);
-    PrefixCodedTerms pb1 = b1.finish();
-    
-    Term t2 = new Term("foo", "b");
-    PrefixCodedTerms.Builder b2 = new PrefixCodedTerms.Builder();
-    b2.add(t2);
-    PrefixCodedTerms pb2 = b2.finish();
-
-    MergedPrefixCodedTermsIterator merged = new MergedPrefixCodedTermsIterator(Arrays.asList(new PrefixCodedTerms[] {pb1, pb2}));
-    BytesRef term = merged.next();
-    assertNotNull(term);
-    assertEquals("foo", merged.field());
-    assertEquals("a", term.utf8ToString());
-    term = merged.next();
-    assertNotNull(term);
-    assertEquals("b", term.utf8ToString());
-    assertNull(merged.next());
-  }
-
-  @SuppressWarnings({"unchecked","rawtypes"})
-  public void testMergeRandom() {
-    PrefixCodedTerms pb[] = new PrefixCodedTerms[TestUtil.nextInt(random(), 2, 10)];
-    Set<Term> superSet = new TreeSet<>();
-    
-    for (int i = 0; i < pb.length; i++) {
-      Set<Term> terms = new TreeSet<>();
-      int nterms = TestUtil.nextInt(random(), 0, 10000);
-      for (int j = 0; j < nterms; j++) {
-        String field = TestUtil.randomUnicodeString(random(), 2);
-        //String field = TestUtil.randomSimpleString(random(), 2);
-        Term term = new Term(field, TestUtil.randomUnicodeString(random(), 4));
-        terms.add(term);
-      }
-      superSet.addAll(terms);
-    
-      PrefixCodedTerms.Builder b = new PrefixCodedTerms.Builder();
-      //System.out.println("TEST: sub " + i + " has " + terms.size() + " terms");
-      for (Term ref: terms) {
-        //System.out.println("  add " + ref.field() + " " + ref.bytes());
-        b.add(ref);
-      }
-      pb[i] = b.finish();
-    }
-    
-    Iterator<Term> expected = superSet.iterator();
-
-    MergedPrefixCodedTermsIterator actual = new MergedPrefixCodedTermsIterator(Arrays.asList(pb));
-    String field = "";
-
-    BytesRef lastTerm = null;
-    BytesRef term;
-    while ((term = actual.next()) != null) {
-      if (field != actual.field()) {
-        field = actual.field();
-        lastTerm = null;
-      }
-      if (lastTerm != null && lastTerm.equals(term)) {
-        continue;
-      }
-      lastTerm = BytesRef.deepCopyOf(term);
-      assertTrue(expected.hasNext());
-
-      Term expectedTerm = expected.next();
-      assertEquals(expectedTerm, new Term(field, term));
-    }
-
-    assertFalse(expected.hasNext());
-  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/58105a20/lucene/core/src/test/org/apache/lucene/search/TestControlledRealTimeReopenThread.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestControlledRealTimeReopenThread.java b/lucene/core/src/test/org/apache/lucene/search/TestControlledRealTimeReopenThread.java
index a1b2a5c..7a7b0ac 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestControlledRealTimeReopenThread.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestControlledRealTimeReopenThread.java
@@ -333,9 +333,11 @@ public class TestControlledRealTimeReopenThread extends ThreadedIndexingAndSearc
     };
     t.start();
     writer.waitAfterUpdate = true; // wait in addDocument to let some reopens go through
+
     final long lastGen = writer.updateDocument(new Term("foo", "bar"), doc); // once this returns the doc is already reflected in the last reopen
 
-    assertFalse(manager.isSearcherCurrent()); // false since there is a delete in the queue
+    // We now eagerly resolve deletes so the manager should see it after update:
+    assertTrue(manager.isSearcherCurrent());
     
     IndexSearcher searcher = manager.acquire();
     try {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/58105a20/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java
----------------------------------------------------------------------
diff --git a/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java b/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java
index 1503de8..0bd4784 100644
--- a/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java
+++ b/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java
@@ -1188,13 +1188,16 @@ public class TestJoinUtil extends LuceneTestCase {
   private void executeRandomJoin(boolean multipleValuesPerDocument, int maxIndexIter, int maxSearchIter, int numberOfDocumentsToIndex) throws Exception {
     for (int indexIter = 1; indexIter <= maxIndexIter; indexIter++) {
       if (VERBOSE) {
-        System.out.println("indexIter=" + indexIter);
+        System.out.println("TEST: indexIter=" + indexIter + " numDocs=" + numberOfDocumentsToIndex);
       }
       IndexIterationContext context = createContext(numberOfDocumentsToIndex, multipleValuesPerDocument, false);
       IndexSearcher indexSearcher = context.searcher;
+      if (VERBOSE) {
+        System.out.println("TEST: got searcher=" + indexSearcher);
+      }
       for (int searchIter = 1; searchIter <= maxSearchIter; searchIter++) {
         if (VERBOSE) {
-          System.out.println("searchIter=" + searchIter);
+          System.out.println("TEST: searchIter=" + searchIter);
         }
 
         int r = random().nextInt(context.randomUniqueValues.length);
@@ -1360,9 +1363,9 @@ public class TestJoinUtil extends LuceneTestCase {
       }
       final List<String> subValues;
       {
-      int start = randomUniqueValuesReplica.size()==numberOfLinkValues? 0 : random.nextInt(randomUniqueValuesReplica.size()-numberOfLinkValues);
-      subValues = randomUniqueValuesReplica.subList(start, start+numberOfLinkValues);
-      Collections.shuffle(subValues, random);
+        int start = randomUniqueValuesReplica.size()==numberOfLinkValues? 0 : random.nextInt(randomUniqueValuesReplica.size()-numberOfLinkValues);
+        subValues = randomUniqueValuesReplica.subList(start, start+numberOfLinkValues);
+        Collections.shuffle(subValues, random);
       }
       for (String linkValue : subValues) {
 
@@ -1404,6 +1407,9 @@ public class TestJoinUtil extends LuceneTestCase {
     }
 
     if (random.nextBoolean()) {
+      if (VERBOSE) {
+        System.out.println("TEST: now force merge");
+      }
       w.forceMerge(1);
     }
     w.close();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/58105a20/lucene/sandbox/src/java/org/apache/lucene/codecs/idversion/IDVersionPostingsWriter.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/java/org/apache/lucene/codecs/idversion/IDVersionPostingsWriter.java b/lucene/sandbox/src/java/org/apache/lucene/codecs/idversion/IDVersionPostingsWriter.java
index 334f784..fc643d2 100644
--- a/lucene/sandbox/src/java/org/apache/lucene/codecs/idversion/IDVersionPostingsWriter.java
+++ b/lucene/sandbox/src/java/org/apache/lucene/codecs/idversion/IDVersionPostingsWriter.java
@@ -45,6 +45,7 @@ final class IDVersionPostingsWriter extends PushPostingsWriterBase {
   private long lastVersion;
 
   private final Bits liveDocs;
+  private String segment;
 
   public IDVersionPostingsWriter(Bits liveDocs) {
     this.liveDocs = liveDocs;
@@ -58,6 +59,7 @@ final class IDVersionPostingsWriter extends PushPostingsWriterBase {
   @Override
   public void init(IndexOutput termsOut, SegmentWriteState state) throws IOException {
     CodecUtil.writeIndexHeader(termsOut, TERMS_CODEC, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
+    segment = state.segmentInfo.name;
   }
 
   @Override
@@ -87,7 +89,7 @@ final class IDVersionPostingsWriter extends PushPostingsWriterBase {
       return;
     }
     if (lastDocID != -1) {
-      throw new IllegalArgumentException("term appears in more than one document");
+      throw new IllegalArgumentException("term appears in more than one document: " + lastDocID + " and " + docID);
     }
     if (termDocFreq != 1) {
       throw new IllegalArgumentException("term appears more than once in the document");

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/58105a20/lucene/sandbox/src/java/org/apache/lucene/codecs/idversion/VersionBlockTreeTermsWriter.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/java/org/apache/lucene/codecs/idversion/VersionBlockTreeTermsWriter.java b/lucene/sandbox/src/java/org/apache/lucene/codecs/idversion/VersionBlockTreeTermsWriter.java
index d83b915..e9187af 100644
--- a/lucene/sandbox/src/java/org/apache/lucene/codecs/idversion/VersionBlockTreeTermsWriter.java
+++ b/lucene/sandbox/src/java/org/apache/lucene/codecs/idversion/VersionBlockTreeTermsWriter.java
@@ -161,7 +161,7 @@ public final class VersionBlockTreeTermsWriter extends FieldsConsumer {
   }
 
   private final List<FieldMetaData> fields = new ArrayList<>();
-  // private final String segment;
+  private final String segment;
 
   /** Create a new writer.  The number of items (terms or
    *  sub-blocks) per block will aim to be between
@@ -175,6 +175,7 @@ public final class VersionBlockTreeTermsWriter extends FieldsConsumer {
     throws IOException
   {
     BlockTreeTermsWriter.validateSettings(minItemsInBlock, maxItemsInBlock);
+    segment = state.segmentInfo.name;
 
     maxDoc = state.segmentInfo.maxDoc();
 
@@ -729,7 +730,6 @@ public final class VersionBlockTreeTermsWriter extends FieldsConsumer {
     
     /** Writes one term's worth of postings. */
     public void write(BytesRef text, TermsEnum termsEnum) throws IOException {
-
       BlockTermState state = postingsWriter.writeTerm(text, termsEnum, docsSeen);
       // TODO: LUCENE-5693: we don't need this check if we fix IW to not send deleted docs to us on flush:
       if (state != null && ((IDVersionPostingsWriter) postingsWriter).lastDocID != -1) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/58105a20/lucene/test-framework/src/java/org/apache/lucene/index/BaseDocValuesFormatTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BaseDocValuesFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BaseDocValuesFormatTestCase.java
index 8cb6665..28ab3b6 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/BaseDocValuesFormatTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/BaseDocValuesFormatTestCase.java
@@ -2106,6 +2106,9 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     String[] uniqueValues = valueSet.toArray(new String[0]);
 
     // index some docs
+    if (VERBOSE) {
+      System.out.println("\nTEST: now add numDocs=" + numDocs);
+    }
     for (int i = 0; i < numDocs; i++) {
       Document doc = new Document();
       Field idField = new StringField("id", Integer.toString(i), Field.Store.NO);
@@ -2137,12 +2140,18 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
     
     // delete some docs
     int numDeletions = random().nextInt(numDocs/10);
+    if (VERBOSE) {
+      System.out.println("\nTEST: now delete " + numDeletions + " docs");
+    }
     for (int i = 0; i < numDeletions; i++) {
       int id = random().nextInt(numDocs);
       writer.deleteDocuments(new Term("id", Integer.toString(id)));
     }
     
     // compare
+    if (VERBOSE) {
+      System.out.println("\nTEST: now get reader");
+    }
     DirectoryReader ir = writer.getReader();
     TestUtil.checkReader(ir);
     for (LeafReaderContext context : ir.leaves()) {
@@ -2168,7 +2177,13 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
         }
       }
     }
+    if (VERBOSE) {
+      System.out.println("\nTEST: now close reader");
+    }
     ir.close();
+    if (VERBOSE) {
+      System.out.println("TEST: force merge");
+    }
     writer.forceMerge(1);
     
     // compare again
@@ -2195,8 +2210,17 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
         }
       }
     }
+    if (VERBOSE) {
+      System.out.println("TEST: close reader");
+    }
     ir.close();
+    if (VERBOSE) {
+      System.out.println("TEST: close writer");
+    }
     writer.close();
+    if (VERBOSE) {
+      System.out.println("TEST: close dir");
+    }
     dir.close();
   }
   

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/58105a20/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java
index 959466a..ab92946 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java
@@ -564,7 +564,7 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
           handleFakeIOException(e, exceptionStream);
           allowAlreadyClosed = true;
         }
-        
+
         if (random().nextInt(10) == 0) {
           // trigger flush:
           try {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/58105a20/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
index 0243a56..3a87c1e 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
@@ -1183,17 +1183,6 @@ public abstract class LuceneTestCase extends Assert {
     }
     
     if (rarely(r)) {
-      // change buffered deletes parameters
-      boolean limitBufferedDeletes = r.nextBoolean();
-      if (limitBufferedDeletes) {
-        c.setMaxBufferedDeleteTerms(TestUtil.nextInt(r, 1, 1000));
-      } else {
-        c.setMaxBufferedDeleteTerms(IndexWriterConfig.DISABLE_AUTO_FLUSH);
-      }
-      didChange = true;
-    }
-    
-    if (rarely(r)) {
       IndexWriter.IndexReaderWarmer curWarmer = c.getMergedSegmentWarmer();
       if (curWarmer == null || curWarmer instanceof SimpleMergedSegmentWarmer) {
         // change warmer parameters