You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by rm...@apache.org on 2011/05/14 15:51:59 UTC

svn commit: r1103112 [13/24] - in /lucene/dev/branches/flexscoring: ./ dev-tools/eclipse/ dev-tools/idea/.idea/ dev-tools/idea/lucene/contrib/ant/ dev-tools/idea/lucene/contrib/db/bdb-je/ dev-tools/idea/lucene/contrib/db/bdb/ dev-tools/idea/lucene/cont...

Modified: lucene/dev/branches/flexscoring/lucene/src/test/org/apache/lucene/index/TestIndexWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/flexscoring/lucene/src/test/org/apache/lucene/index/TestIndexWriter.java?rev=1103112&r1=1103111&r2=1103112&view=diff
==============================================================================
--- lucene/dev/branches/flexscoring/lucene/src/test/org/apache/lucene/index/TestIndexWriter.java (original)
+++ lucene/dev/branches/flexscoring/lucene/src/test/org/apache/lucene/index/TestIndexWriter.java Sat May 14 13:51:35 2011
@@ -18,7 +18,6 @@ package org.apache.lucene.index;
  */
 
 import java.io.ByteArrayOutputStream;
-import java.io.File;
 import java.io.IOException;
 import java.io.PrintStream;
 import java.io.Reader;
@@ -37,6 +36,7 @@ import java.util.concurrent.atomic.Atomi
 
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.MockAnalyzer;
+import org.apache.lucene.analysis.MockFixedLengthPayloadFilter;
 import org.apache.lucene.analysis.MockTokenizer;
 import org.apache.lucene.analysis.TokenStream;
 import org.apache.lucene.analysis.Tokenizer;
@@ -86,7 +86,7 @@ public class TestIndexWriter extends Luc
         try {
           IndexWriterConfig.setDefaultWriteLockTimeout(2000);
           assertEquals(2000, IndexWriterConfig.getDefaultWriteLockTimeout());
-          writer  = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
+          writer  = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
         } finally {
           IndexWriterConfig.setDefaultWriteLockTimeout(savedWriteLockTimeout);
         }
@@ -110,7 +110,7 @@ public class TestIndexWriter extends Luc
         reader.close();
 
         // optimize the index and check that the new doc count is correct
-        writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
+        writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
         assertEquals(60, writer.numDocs());
         writer.optimize();
         assertEquals(60, writer.maxDoc());
@@ -125,7 +125,7 @@ public class TestIndexWriter extends Luc
 
         // make sure opening a new index for create over
         // this existing one works correctly:
-        writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
+        writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
         assertEquals(0, writer.maxDoc());
         assertEquals(0, writer.numDocs());
         writer.close();
@@ -147,13 +147,13 @@ public class TestIndexWriter extends Luc
         writer.addDocument(doc);
     }
 
-                              
+
 
     public static void assertNoUnreferencedFiles(Directory dir, String message) throws IOException {
       String[] startFiles = dir.listAll();
       SegmentInfos infos = new SegmentInfos();
       infos.read(dir);
-      new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())).rollback();
+      new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))).rollback();
       String[] endFiles = dir.listAll();
 
       Arrays.sort(startFiles);
@@ -170,13 +170,13 @@ public class TestIndexWriter extends Luc
 
       final Document doc = new Document();
       doc.add(newField("content", "aaa", Field.Store.YES, Field.Index.ANALYZED));
-
-      for(int numDocs=38;numDocs<500;numDocs += 38) {
+      final int incrMin = TEST_NIGHTLY ? 15 : 40;
+      for(int numDocs=10;numDocs<500;numDocs += _TestUtil.nextInt(random, incrMin, 5*incrMin)) {
         LogDocMergePolicy ldmp = new LogDocMergePolicy();
         ldmp.setMinMergeDocs(1);
         ldmp.setMergeFactor(5);
         IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
-          TEST_VERSION_CURRENT, new MockAnalyzer())
+          TEST_VERSION_CURRENT, new MockAnalyzer(random))
           .setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(2).setMergePolicy(
               ldmp));
         for(int j=0;j<numDocs;j++)
@@ -190,7 +190,7 @@ public class TestIndexWriter extends Luc
         ldmp = new LogDocMergePolicy();
         ldmp.setMergeFactor(5);
         writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT,
-          new MockAnalyzer()).setMergePolicy(ldmp));
+          new MockAnalyzer(random)).setMergePolicy(ldmp));
         writer.optimize(3);
         writer.close();
 
@@ -216,7 +216,7 @@ public class TestIndexWriter extends Luc
       ldmp.setMinMergeDocs(1);
       ldmp.setMergeFactor(4);
       IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
-        TEST_VERSION_CURRENT, new MockAnalyzer())
+        TEST_VERSION_CURRENT, new MockAnalyzer(random))
         .setMaxBufferedDocs(2).setMergePolicy(ldmp).setMergeScheduler(new ConcurrentMergeScheduler()));
 
       for(int iter=0;iter<10;iter++) {
@@ -257,11 +257,11 @@ public class TestIndexWriter extends Luc
     public void testOptimizeTempSpaceUsage() throws IOException {
 
       MockDirectoryWrapper dir = newDirectory();
-      IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(10).setMergePolicy(newLogMergePolicy()));
+      IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(10).setMergePolicy(newLogMergePolicy()));
       if (VERBOSE) {
         System.out.println("TEST: config1=" + writer.getConfig());
       }
-      
+
       for(int j=0;j<500;j++) {
         addDocWithIndex(writer, j);
       }
@@ -290,7 +290,7 @@ public class TestIndexWriter extends Luc
       // Import to use same term index interval else a
       // smaller one here could increase the disk usage and
       // cause a false failure:
-      writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND).setTermIndexInterval(termIndexInterval).setMergePolicy(newLogMergePolicy()));
+      writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND).setTermIndexInterval(termIndexInterval).setMergePolicy(newLogMergePolicy()));
       writer.setInfoStream(VERBOSE ? System.out : null);
       writer.optimize();
       writer.close();
@@ -318,7 +318,7 @@ public class TestIndexWriter extends Luc
       Directory dir = newDirectory();
 
       // add one document & close writer
-      IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
+      IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
       addDoc(writer);
       writer.close();
 
@@ -327,7 +327,7 @@ public class TestIndexWriter extends Luc
       assertEquals("should be one document", reader.numDocs(), 1);
 
       // now open index for create:
-      writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
+      writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
       assertEquals("should be zero documents", writer.maxDoc(), 0);
       addDoc(writer);
       writer.close();
@@ -337,7 +337,7 @@ public class TestIndexWriter extends Luc
       assertEquals("should be one document", reader2.numDocs(), 1);
       reader.close();
       reader2.close();
-      
+
       dir.close();
     }
 
@@ -346,7 +346,7 @@ public class TestIndexWriter extends Luc
 
         IndexWriter writer = null;
 
-        writer  = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
+        writer  = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
         addDoc(writer);
 
         // close
@@ -366,14 +366,14 @@ public class TestIndexWriter extends Luc
      * these docs until writer is closed.
      */
     public void testCommitOnClose() throws IOException {
-        Directory dir = newDirectory();      
-        IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
+        Directory dir = newDirectory();
+        IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
         for (int i = 0; i < 14; i++) {
           addDoc(writer);
         }
         writer.close();
 
-        Term searchTerm = new Term("content", "aaa");        
+        Term searchTerm = new Term("content", "aaa");
         IndexSearcher searcher = new IndexSearcher(dir, false);
         ScoreDoc[] hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
         assertEquals("first number of hits", 14, hits.length);
@@ -381,7 +381,7 @@ public class TestIndexWriter extends Luc
 
         IndexReader reader = IndexReader.open(dir, true);
 
-        writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
+        writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
         for(int i=0;i<3;i++) {
           for(int j=0;j<11;j++) {
             addDoc(writer);
@@ -414,20 +414,20 @@ public class TestIndexWriter extends Luc
      * and add docs to it.
      */
     public void testCommitOnCloseAbort() throws IOException {
-      MockDirectoryWrapper dir = newDirectory();      
-      IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(10));
+      MockDirectoryWrapper dir = newDirectory();
+      IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(10));
       for (int i = 0; i < 14; i++) {
         addDoc(writer);
       }
       writer.close();
 
-      Term searchTerm = new Term("content", "aaa");        
+      Term searchTerm = new Term("content", "aaa");
       IndexSearcher searcher = new IndexSearcher(dir, false);
       ScoreDoc[] hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
       assertEquals("first number of hits", 14, hits.length);
       searcher.close();
 
-      writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())
+      writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))
         .setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(10));
       for(int j=0;j<17;j++) {
         addDoc(writer);
@@ -449,10 +449,10 @@ public class TestIndexWriter extends Luc
       hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
       assertEquals("saw changes after writer.abort", 14, hits.length);
       searcher.close();
-          
+
       // Now make sure we can re-open the index, add docs,
       // and all is good:
-      writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())
+      writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))
         .setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(10));
 
       // On abort, writer in fact may write to the same
@@ -486,10 +486,32 @@ public class TestIndexWriter extends Luc
      * measure max temp disk space used.
      */
     public void testCommitOnCloseDiskUsage() throws IOException {
-      MockDirectoryWrapper dir = newDirectory();      
+      MockDirectoryWrapper dir = newDirectory();
+      Analyzer analyzer;
+      if (random.nextBoolean()) {
+        // no payloads
+       analyzer = new Analyzer() {
+          @Override
+          public TokenStream tokenStream(String fieldName, Reader reader) {
+            return new MockTokenizer(reader, MockTokenizer.WHITESPACE, true);
+          }
+        };
+      } else {
+        // fixed length payloads
+        final int length = random.nextInt(200);
+        analyzer = new Analyzer() {
+          @Override
+          public TokenStream tokenStream(String fieldName, Reader reader) {
+            return new MockFixedLengthPayloadFilter(random,
+                new MockTokenizer(reader, MockTokenizer.WHITESPACE, true),
+                length);
+          }
+        };
+      }
+      
       IndexWriter writer  = new IndexWriter(
           dir,
-          newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).
+          newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer).
               setMaxBufferedDocs(10).
               setReaderPooling(false).
               setMergePolicy(newLogMergePolicy(10))
@@ -504,7 +526,7 @@ public class TestIndexWriter extends Luc
       long startDiskUsage = dir.getMaxUsedSizeInBytes();
       writer = new IndexWriter(
           dir,
-          newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())
+          newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer)
               .setOpenMode(OpenMode.APPEND).
               setMaxBufferedDocs(10).
               setMergeScheduler(new SerialMergeScheduler()).
@@ -544,14 +566,14 @@ public class TestIndexWriter extends Luc
      * and close().
      */
     public void testCommitOnCloseOptimize() throws IOException {
-      MockDirectoryWrapper dir = newDirectory();  
+      MockDirectoryWrapper dir = newDirectory();
       // Must disable throwing exc on double-write: this
       // test uses IW.rollback which easily results in
       // writing to same file more than once
       dir.setPreventDoubleWrite(false);
       IndexWriter writer = new IndexWriter(
           dir,
-          newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
+          newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
               setMaxBufferedDocs(10).
               setMergePolicy(newLogMergePolicy(10))
       );
@@ -560,7 +582,7 @@ public class TestIndexWriter extends Luc
       }
       writer.close();
 
-      writer  = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
+      writer  = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
       writer.optimize();
 
       if (VERBOSE) {
@@ -589,7 +611,7 @@ public class TestIndexWriter extends Luc
       if (VERBOSE) {
         System.out.println("TEST: do real optimize");
       }
-      writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
+      writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
       if (VERBOSE) {
         writer.setInfoStream(System.out);
       }
@@ -611,8 +633,8 @@ public class TestIndexWriter extends Luc
     }
 
     public void testIndexNoDocuments() throws IOException {
-      MockDirectoryWrapper dir = newDirectory();      
-      IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
+      MockDirectoryWrapper dir = newDirectory();
+      IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
       writer.commit();
       writer.close();
 
@@ -621,7 +643,7 @@ public class TestIndexWriter extends Luc
       assertEquals(0, reader.numDocs());
       reader.close();
 
-      writer  = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
+      writer  = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
       writer.commit();
       writer.close();
 
@@ -633,8 +655,8 @@ public class TestIndexWriter extends Luc
     }
 
     public void testManyFields() throws IOException {
-      MockDirectoryWrapper dir = newDirectory();      
-      IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(10));
+      MockDirectoryWrapper dir = newDirectory();
+      IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(10));
       for(int j=0;j<100;j++) {
         Document doc = new Document();
         doc.add(newField("a"+j, "aaa" + j, Field.Store.YES, Field.Index.ANALYZED));
@@ -663,10 +685,10 @@ public class TestIndexWriter extends Luc
     }
 
     public void testSmallRAMBuffer() throws IOException {
-      MockDirectoryWrapper dir = newDirectory();      
+      MockDirectoryWrapper dir = newDirectory();
       IndexWriter writer  = new IndexWriter(
           dir,
-          newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).
+          newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).
               setRAMBufferSizeMB(0.000001).
               setMergePolicy(newLogMergePolicy(10))
       );
@@ -689,7 +711,7 @@ public class TestIndexWriter extends Luc
     // maxBufferedDocs in a write session
     public void testChangingRAMBuffer() throws IOException {
       Directory dir = newDirectory();      
-      IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+      IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
       writer.getConfig().setMaxBufferedDocs(10);
       writer.getConfig().setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH);
 
@@ -743,7 +765,7 @@ public class TestIndexWriter extends Luc
 
     public void testChangingRAMBuffer2() throws IOException {
       Directory dir = newDirectory();      
-      IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+      IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
       writer.getConfig().setMaxBufferedDocs(10);
       writer.getConfig().setMaxBufferedDeleteTerms(10);
       writer.getConfig().setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH);
@@ -759,13 +781,14 @@ public class TestIndexWriter extends Luc
         writer.deleteDocuments(new Term("field", "aaa" + j));
         _TestUtil.syncConcurrentMerges(writer);
         int flushCount = writer.getFlushCount();
+       
         if (j == 1)
           lastFlushCount = flushCount;
         else if (j < 10) {
           // No new files should be created
           assertEquals(flushCount, lastFlushCount);
         } else if (10 == j) {
-          assertTrue(flushCount > lastFlushCount);
+          assertTrue("" + j, flushCount > lastFlushCount);
           lastFlushCount = flushCount;
           writer.getConfig().setRAMBufferSizeMB(0.000001);
           writer.getConfig().setMaxBufferedDeleteTerms(1);
@@ -802,8 +825,8 @@ public class TestIndexWriter extends Luc
     }
 
     public void testDiverseDocs() throws IOException {
-      MockDirectoryWrapper dir = newDirectory();      
-      IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setRAMBufferSizeMB(0.5));
+      MockDirectoryWrapper dir = newDirectory();
+      IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setRAMBufferSizeMB(0.5));
       for(int i=0;i<3;i++) {
         // First, docs where every term is unique (heavy on
         // Posting instances)
@@ -849,12 +872,12 @@ public class TestIndexWriter extends Luc
     }
 
     public void testEnablingNorms() throws IOException {
-      MockDirectoryWrapper dir = newDirectory();      
-      IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(10));
+      MockDirectoryWrapper dir = newDirectory();
+      IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(10));
       // Enable norms for only 1 doc, pre flush
       for(int j=0;j<10;j++) {
         Document doc = new Document();
-        Field f = newField("field", "aaa", Field.Store.YES, Field.Index.ANALYZED); 
+        Field f = newField("field", "aaa", Field.Store.YES, Field.Index.ANALYZED);
         if (j != 8) {
           f.setOmitNorms(true);
         }
@@ -870,12 +893,12 @@ public class TestIndexWriter extends Luc
       assertEquals(10, hits.length);
       searcher.close();
 
-      writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())
+      writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))
         .setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(10));
       // Enable norms for only 1 doc, post flush
       for(int j=0;j<27;j++) {
         Document doc = new Document();
-        Field f = newField("field", "aaa", Field.Store.YES, Field.Index.ANALYZED); 
+        Field f = newField("field", "aaa", Field.Store.YES, Field.Index.ANALYZED);
         if (j != 26) {
           f.setOmitNorms(true);
         }
@@ -895,9 +918,9 @@ public class TestIndexWriter extends Luc
     }
 
     public void testHighFreqTerm() throws IOException {
-      MockDirectoryWrapper dir = newDirectory();      
+      MockDirectoryWrapper dir = newDirectory();
       IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
-          TEST_VERSION_CURRENT, new MockAnalyzer()).setRAMBufferSizeMB(0.01));
+          TEST_VERSION_CURRENT, new MockAnalyzer(random)).setRAMBufferSizeMB(0.01));
       // Massive doc that has 128 K a's
       StringBuilder b = new StringBuilder(1024*1024);
       for(int i=0;i<4096;i++) {
@@ -945,21 +968,21 @@ public class TestIndexWriter extends Luc
           return myLockFactory.makeLock(name);
         }
       }
-      
+
       Directory dir = new MyRAMDirectory(new RAMDirectory());
       IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
-        TEST_VERSION_CURRENT, new MockAnalyzer()));
+        TEST_VERSION_CURRENT, new MockAnalyzer(random)));
       for (int i = 0; i < 100; i++) {
         addDoc(writer);
       }
       writer.close();
-      Term searchTerm = new Term("content", "aaa");        
+      Term searchTerm = new Term("content", "aaa");
       IndexSearcher searcher = new IndexSearcher(dir, false);
       ScoreDoc[] hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
       assertEquals("did not get right number of hits", 100, hits.length);
       searcher.close();
 
-      writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())
+      writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))
         .setOpenMode(OpenMode.CREATE));
       writer.close();
       searcher.close();
@@ -970,7 +993,7 @@ public class TestIndexWriter extends Luc
       Directory dir = newDirectory();
       IndexWriter writer = new IndexWriter(
           dir,
-          newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
+          newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
               setMaxBufferedDocs(2).
               setMergePolicy(newLogMergePolicy(10))
       );
@@ -992,7 +1015,7 @@ public class TestIndexWriter extends Luc
     // empty doc (no norms) and flush
     public void testEmptyDocAfterFlushingRealDoc() throws IOException {
       Directory dir = newDirectory();
-      IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
+      IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
       writer.setInfoStream(VERBOSE ? System.out : null);
       Document doc = new Document();
       doc.add(newField("field", "aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
@@ -1019,7 +1042,7 @@ public class TestIndexWriter extends Luc
       for(int pass=0;pass<2;pass++) {
         IndexWriter writer = new IndexWriter(
             dir,
-            newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
+            newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
                 setOpenMode(OpenMode.CREATE).
                 setMaxBufferedDocs(2).
                 setMergePolicy(newLogMergePolicy(101))
@@ -1050,7 +1073,7 @@ public class TestIndexWriter extends Luc
           infos.read(dir);
           assertEquals(2, infos.size());
         }
-      }      
+      }
 
       dir.close();
     }
@@ -1065,8 +1088,8 @@ public class TestIndexWriter extends Luc
   public void testBadSegment() throws IOException {
     Directory dir = newDirectory();
     IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(
-        TEST_VERSION_CURRENT, new MockAnalyzer()));
-    
+        TEST_VERSION_CURRENT, new MockAnalyzer(random)));
+
     Document document = new Document();
     document.add(newField("tvtest", "", Store.NO, Index.ANALYZED, TermVector.YES));
     iw.addDocument(document);
@@ -1080,7 +1103,7 @@ public class TestIndexWriter extends Luc
     try {
       Directory dir = newDirectory();
       IndexWriterConfig conf = newIndexWriterConfig(
-          TEST_VERSION_CURRENT, new MockAnalyzer())
+          TEST_VERSION_CURRENT, new MockAnalyzer(random))
         .setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy());
       ((LogMergePolicy) conf.getMergePolicy()).setMergeFactor(2);
       IndexWriter iw = new IndexWriter(dir, conf);
@@ -1106,10 +1129,12 @@ public class TestIndexWriter extends Luc
 
       while(true) {
         MergePolicy.OneMerge merge = writer.getNextMerge();
-        if (merge == null)
+        if (merge == null) {
           break;
-        for(int i=0;i<merge.segments.size();i++)
-          assert merge.segments.info(i).docCount < 20;
+        }
+        for(int i=0;i<merge.segments.size();i++) {
+          assert merge.segments.get(i).docCount < 20;
+        }
         writer.merge(merge);
       }
     }
@@ -1122,12 +1147,13 @@ public class TestIndexWriter extends Luc
   public void testSetMaxMergeDocs() throws IOException {
     Directory dir = newDirectory();
     IndexWriterConfig conf = newIndexWriterConfig(
-        TEST_VERSION_CURRENT, new MockAnalyzer())
+        TEST_VERSION_CURRENT, new MockAnalyzer(random))
       .setMergeScheduler(new MyMergeScheduler()).setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy());
     LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
     lmp.setMaxMergeDocs(20);
     lmp.setMergeFactor(2);
     IndexWriter iw = new IndexWriter(dir, conf);
+    iw.setInfoStream(VERBOSE ? System.out : null);
     Document document = new Document();
     document.add(newField("tvtest", "a b c", Field.Store.NO, Field.Index.ANALYZED,
                            Field.TermVector.YES));
@@ -1146,7 +1172,7 @@ public class TestIndexWriter extends Luc
       if (VERBOSE) {
         System.out.println("TEST: iter=" + i);
       }
-      IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy()));
+      IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy()));
       writer.setInfoStream(VERBOSE ? System.out : null);
       //LogMergePolicy lmp = (LogMergePolicy) writer.getConfig().getMergePolicy();
       //lmp.setMergeFactor(2);
@@ -1183,7 +1209,7 @@ public class TestIndexWriter extends Luc
       reader.close();
 
       if (0 == i % 4) {
-        writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
+        writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
         //LogMergePolicy lmp2 = (LogMergePolicy) writer.getConfig().getMergePolicy();
         //lmp2.setUseCompoundFile(false);
         writer.optimize();
@@ -1207,7 +1233,7 @@ public class TestIndexWriter extends Luc
 
       IndexWriter writer = new IndexWriter(
           directory,
-          newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
+          newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
               setOpenMode(OpenMode.CREATE).
               setMaxBufferedDocs(2).
               setMergePolicy(newLogMergePolicy())
@@ -1276,7 +1302,7 @@ public class TestIndexWriter extends Luc
         reader.close();
 
         // Reopen
-        writer = new IndexWriter(directory, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
+        writer = new IndexWriter(directory, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
         writer.setInfoStream(VERBOSE ? System.out : null);
       }
       writer.close();
@@ -1290,7 +1316,7 @@ public class TestIndexWriter extends Luc
   public void testUnlimitedMaxFieldLength() throws IOException {
     Directory dir = newDirectory();
 
-    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
 
     Document doc = new Document();
     StringBuilder b = new StringBuilder();
@@ -1314,12 +1340,12 @@ public class TestIndexWriter extends Luc
 
     IndexWriter writer = new IndexWriter(
         dir,
-        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
+        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
             setMaxBufferedDocs(2).
             setMergePolicy(newLogMergePolicy(5))
     );
     writer.commit();
-    
+
     for (int i = 0; i < 23; i++)
       addDoc(writer);
 
@@ -1346,13 +1372,13 @@ public class TestIndexWriter extends Luc
     writer.close();
     dir.close();
   }
-  
+
   // LUCENE-325: test expungeDeletes, when 2 singular merges
   // are required
   public void testExpungeDeletes() throws IOException {
     Directory dir = newDirectory();
-    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( 
-        TEST_VERSION_CURRENT, new MockAnalyzer())
+    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
+        TEST_VERSION_CURRENT, new MockAnalyzer(random))
         .setMaxBufferedDocs(2).setRAMBufferSizeMB(
                                                   IndexWriterConfig.DISABLE_AUTO_FLUSH));
     writer.setInfoStream(VERBOSE ? System.out : null);
@@ -1378,7 +1404,7 @@ public class TestIndexWriter extends Luc
     assertEquals(8, ir.numDocs());
     ir.close();
 
-    writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy()));
+    writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
     assertEquals(8, writer.numDocs());
     assertEquals(10, writer.maxDoc());
     writer.expungeDeletes();
@@ -1396,7 +1422,7 @@ public class TestIndexWriter extends Luc
     Directory dir = newDirectory();
     IndexWriter writer = new IndexWriter(
         dir,
-        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
+        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
             setMaxBufferedDocs(2).
             setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH).
             setMergePolicy(newLogMergePolicy(50))
@@ -1426,7 +1452,7 @@ public class TestIndexWriter extends Luc
 
     writer = new IndexWriter(
         dir,
-        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
+        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
             setMergePolicy(newLogMergePolicy(3))
     );
     assertEquals(49, writer.numDocs());
@@ -1445,7 +1471,7 @@ public class TestIndexWriter extends Luc
     Directory dir = newDirectory();
     IndexWriter writer = new IndexWriter(
         dir,
-        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
+        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
             setMaxBufferedDocs(2).
             setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH).
             setMergePolicy(newLogMergePolicy(50))
@@ -1475,7 +1501,7 @@ public class TestIndexWriter extends Luc
 
     writer = new IndexWriter(
         dir,
-        newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).
+        newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).
             setMergePolicy(newLogMergePolicy(3))
     );
     writer.expungeDeletes(false);
@@ -1490,7 +1516,7 @@ public class TestIndexWriter extends Luc
   // LUCENE-1179
   public void testEmptyFieldName() throws IOException {
     Directory dir = newDirectory();
-    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
     Document doc = new Document();
     doc.add(newField("", "a b c", Field.Store.NO, Field.Index.ANALYZED));
     writer.addDocument(doc);
@@ -1513,18 +1539,18 @@ public class TestIndexWriter extends Luc
     public void doAfterFlush() {
       afterWasCalled = true;
     }
-    
+
     @Override
     protected void doBeforeFlush() throws IOException {
       beforeWasCalled = true;
     }
   }
-  
+
 
   // LUCENE-1222
   public void testDoBeforeAfterFlush() throws IOException {
     Directory dir = newDirectory();
-    MockIndexWriter w = new MockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
+    MockIndexWriter w = new MockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
     Document doc = new Document();
     doc.add(newField("field", "a field", Field.Store.YES,
                       Field.Index.ANALYZED));
@@ -1548,7 +1574,7 @@ public class TestIndexWriter extends Luc
   }
 
 
-  
+
   final String[] utf8Data = new String[] {
     // unpaired low surrogate
     "ab\udc17cd", "ab\ufffdcd",
@@ -1578,7 +1604,7 @@ public class TestIndexWriter extends Luc
   // LUCENE-510
   public void testInvalidUTF16() throws Throwable {
     Directory dir = newDirectory();
-    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new StringSplitAnalyzer()));
     Document doc = new Document();
 
     final int count = utf8Data.length/2;
@@ -1618,7 +1644,7 @@ public class TestIndexWriter extends Luc
       }
 
       UnicodeUtil.UTF16toUTF8(chars, 0, len, utf8);
-      
+
       String s1 = new String(chars, 0, len);
       String s2 = new String(utf8.bytes, 0, utf8.length, "UTF-8");
       assertEquals("codepoint " + ch, s1, s2);
@@ -1675,7 +1701,7 @@ public class TestIndexWriter extends Luc
           expected[i++] = 0xfffd;
           expected[i] = buffer[i] = (char) nextInt(0x800, 0xd800);
           hasIllegal = true;
-        } else 
+        } else
           expected[i] = buffer[i] = (char) nextInt(0x800, 0xd800);
       } else {
         expected[i] = buffer[i] = ' ';
@@ -1772,10 +1798,10 @@ public class TestIndexWriter extends Luc
     final TokenStream tokens = new TokenStream() {
       final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class);
       final PositionIncrementAttribute posIncrAtt = addAttribute(PositionIncrementAttribute.class);
-      
+
       final Iterator<String> terms = Arrays.asList("a","b","c").iterator();
       boolean first = true;
-      
+
       @Override
       public boolean incrementToken() {
         if (!terms.hasNext()) return false;
@@ -1788,7 +1814,7 @@ public class TestIndexWriter extends Luc
     };
 
     Directory dir = newDirectory();
-    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
     Document doc = new Document();
     doc.add(new Field("field", tokens));
     w.addDocument(doc);
@@ -1827,12 +1853,12 @@ public class TestIndexWriter extends Luc
 
     IndexWriter writer = new IndexWriter(
         dir,
-        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
+        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
             setMaxBufferedDocs(2).
             setMergePolicy(newLogMergePolicy(5))
     );
     writer.commit();
-    
+
     for (int i = 0; i < 23; i++)
       addDoc(writer);
 
@@ -1883,12 +1909,12 @@ public class TestIndexWriter extends Luc
 
     IndexWriter writer = new IndexWriter(
         dir,
-        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
+        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
             setMaxBufferedDocs(2).
             setMergePolicy(newLogMergePolicy(5))
     );
     writer.commit();
-    
+
     for (int i = 0; i < 23; i++)
       addDoc(writer);
 
@@ -1909,7 +1935,7 @@ public class TestIndexWriter extends Luc
     reader.close();
     reader2.close();
 
-    writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
+    writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
     for (int i = 0; i < 17; i++)
       addDoc(writer);
 
@@ -1937,7 +1963,7 @@ public class TestIndexWriter extends Luc
   public void testPrepareCommitNoChanges() throws IOException {
     Directory dir = newDirectory();
 
-    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
     writer.prepareCommit();
     writer.commit();
     writer.close();
@@ -1951,11 +1977,11 @@ public class TestIndexWriter extends Luc
   // LUCENE-1219
   public void testBinaryFieldOffsetLength() throws IOException {
     Directory dir = newDirectory();
-    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
     byte[] b = new byte[50];
     for(int i=0;i<50;i++)
       b[i] = (byte) (i+77);
-    
+
     Document doc = new Document();
     Field f = new Field("binary", b, 10, 17);
     byte[] bx = f.getBinaryValue();
@@ -1981,7 +2007,7 @@ public class TestIndexWriter extends Luc
   // LUCENE-1382
   public void testCommitUserData() throws IOException {
     Directory dir = newDirectory();
-    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2));
+    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2));
     for(int j=0;j<17;j++)
       addDoc(w);
     w.close();
@@ -1992,27 +2018,27 @@ public class TestIndexWriter extends Luc
     // commit(Map) never called for this index
     assertEquals(0, r.getCommitUserData().size());
     r.close();
-      
-    w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2));
+
+    w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2));
     for(int j=0;j<17;j++)
       addDoc(w);
     Map<String,String> data = new HashMap<String,String>();
     data.put("label", "test1");
     w.commit(data);
     w.close();
-      
+
     assertEquals("test1", IndexReader.getCommitUserData(dir).get("label"));
 
     r = IndexReader.open(dir, true);
     assertEquals("test1", r.getCommitUserData().get("label"));
     r.close();
 
-    w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
+    w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
     w.optimize();
     w.close();
 
     assertEquals("test1", IndexReader.getCommitUserData(dir).get("label"));
-      
+
     dir.close();
   }
 
@@ -2020,9 +2046,9 @@ public class TestIndexWriter extends Luc
   // LUCENE-2529
   public void testPositionIncrementGapEmptyField() throws Exception {
     Directory dir = newDirectory();
-    MockAnalyzer analyzer = new MockAnalyzer();
+    MockAnalyzer analyzer = new MockAnalyzer(random);
     analyzer.setPositionIncrementGap( 100 );
-    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( 
+    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(
         TEST_VERSION_CURRENT, analyzer));
     Document doc = new Document();
     Field f = newField("field", "", Field.Store.NO,
@@ -2049,7 +2075,7 @@ public class TestIndexWriter extends Luc
 
   // LUCENE-1468 -- make sure opening an IndexWriter with
   // create=true does not remove non-index files
-  
+
   public void testOtherFiles() throws Throwable {
     Directory dir = newDirectory();
     try {
@@ -2058,7 +2084,7 @@ public class TestIndexWriter extends Luc
       out.writeByte((byte) 42);
       out.close();
 
-      new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())).close();
+      new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))).close();
 
       assertTrue(dir.fileExists("myrandomfile"));
     } finally {
@@ -2068,7 +2094,7 @@ public class TestIndexWriter extends Luc
 
   public void testDeadlock() throws Exception {
     Directory dir = newDirectory();
-    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2));
+    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2));
     Document doc = new Document();
     doc.add(newField("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES,
                       Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
@@ -2079,7 +2105,7 @@ public class TestIndexWriter extends Luc
     // index has 2 segments
 
     Directory dir2 = newDirectory();
-    IndexWriter writer2 = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter writer2 = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
     writer2.addDocument(doc);
     writer2.close();
 
@@ -2108,7 +2134,7 @@ public class TestIndexWriter extends Luc
     @Override
     public void run() {
       // LUCENE-2239: won't work with NIOFS/MMAP
-      Directory dir = new MockDirectoryWrapper(random, new RAMDirectory()); 
+      Directory dir = new MockDirectoryWrapper(random, new RAMDirectory());
       IndexWriter w = null;
       while(!finish) {
         try {
@@ -2116,9 +2142,10 @@ public class TestIndexWriter extends Luc
           while(true) {
             if (w != null) {
               w.close();
+              w = null;
             }
-            IndexWriterConfig conf = newIndexWriterConfig( 
-                                                          TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2);
+            IndexWriterConfig conf = newIndexWriterConfig(
+                                                          TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2);
             w = new IndexWriter(dir, conf);
 
             Document doc = new Document();
@@ -2161,10 +2188,12 @@ public class TestIndexWriter extends Luc
       if (!failed) {
         // clear interrupt state:
         Thread.interrupted();
-        try {
-          w.rollback();
-        } catch (IOException ioe) {
-          throw new RuntimeException(ioe);
+        if (w != null) {
+          try {
+            w.rollback();
+          } catch (IOException ioe) {
+            throw new RuntimeException(ioe);
+          }
         }
 
         try {
@@ -2184,10 +2213,10 @@ public class TestIndexWriter extends Luc
           e.printStackTrace(System.out);
         }
       }
-      try { 
+      try {
         dir.close();
-      } catch (IOException e) { 
-        throw new RuntimeException(e); 
+      } catch (IOException e) {
+        throw new RuntimeException(e);
       }
     }
   }
@@ -2202,7 +2231,7 @@ public class TestIndexWriter extends Luc
     // interrupt arrives while class loader is trying to
     // init this class (in servicing a first interrupt):
     assertTrue(new ThreadInterruptedException(new InterruptedException()).getCause() instanceof InterruptedException);
-    
+
     // issue 100 interrupts to child thread
     int i = 0;
     while(i < 100) {
@@ -2223,7 +2252,7 @@ public class TestIndexWriter extends Luc
 
   public void testIndexStoreCombos() throws Exception {
     Directory dir = newDirectory();
-    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
     byte[] b = new byte[50];
     for(int i=0;i<50;i++)
       b[i] = (byte) (i+77);
@@ -2236,12 +2265,12 @@ public class TestIndexWriter extends Luc
     doc.add(f);
     doc.add(f2);
     w.addDocument(doc);
-    
+
     // add 2 docs to test in-memory merging
     f.setTokenStream(new MockTokenizer(new StringReader("doc2field1"), MockTokenizer.WHITESPACE, false));
     f2.setTokenStream(new MockTokenizer(new StringReader("doc2field2"), MockTokenizer.WHITESPACE, false));
     w.addDocument(doc);
-  
+
     // force segment flush so we can force a segment merge with doc3 later.
     w.commit();
 
@@ -2264,7 +2293,7 @@ public class TestIndexWriter extends Luc
     assertTrue(ir.document(0).getFieldable("binary").isBinary());
     assertTrue(ir.document(1).getFieldable("binary").isBinary());
     assertTrue(ir.document(2).getFieldable("binary").isBinary());
-    
+
     assertEquals("value", ir.document(0).get("string"));
     assertEquals("value", ir.document(1).get("string"));
     assertEquals("value", ir.document(2).get("string"));
@@ -2286,7 +2315,7 @@ public class TestIndexWriter extends Luc
   // LUCENE-1727: make sure doc fields are stored in order
   public void testStoredFieldsOrder() throws Throwable {
     Directory d = newDirectory();
-    IndexWriter w = new IndexWriter(d, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter w = new IndexWriter(d, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
     Document doc = new Document();
     doc.add(newField("zzz", "a b c", Field.Store.YES, Field.Index.NO));
     doc.add(newField("aaa", "a b c", Field.Store.YES, Field.Index.NO));
@@ -2318,7 +2347,7 @@ public class TestIndexWriter extends Luc
   public void testEmbeddedFFFF() throws Throwable {
 
     Directory d = newDirectory();
-    IndexWriter w = new IndexWriter(d, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter w = new IndexWriter(d, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
     Document doc = new Document();
     doc.add(newField("field", "a a\uffffb", Field.Store.NO, Field.Index.ANALYZED));
     w.addDocument(doc);
@@ -2335,8 +2364,8 @@ public class TestIndexWriter extends Luc
 
   public void testNoDocsIndex() throws Throwable {
     Directory dir = newDirectory();
-    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( 
-        TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
+        TEST_VERSION_CURRENT, new MockAnalyzer(random)));
     ByteArrayOutputStream bos = new ByteArrayOutputStream(1024);
     writer.setInfoStream(new PrintStream(bos));
     writer.addDocument(new Document());
@@ -2345,7 +2374,7 @@ public class TestIndexWriter extends Luc
     _TestUtil.checkIndex(dir);
     dir.close();
   }
-  
+
   // LUCENE-2095: make sure with multiple threads commit
   // doesn't return until all changes are in fact in the
   // index
@@ -2353,8 +2382,8 @@ public class TestIndexWriter extends Luc
     final int NUM_THREADS = 5;
     final double RUN_SEC = 0.5;
     final Directory dir = newDirectory();
-    final RandomIndexWriter w = new RandomIndexWriter(random, dir, newIndexWriterConfig( 
-                                                                                        TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy()));
+    final RandomIndexWriter w = new RandomIndexWriter(random, dir, newIndexWriterConfig(
+                                                                                        TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
     _TestUtil.reduceOpenFiles(w.w);
     w.commit();
     final AtomicBoolean failed = new AtomicBoolean();
@@ -2526,7 +2555,7 @@ public class TestIndexWriter extends Luc
 
   public void testIndexDivisor() throws Exception {
     Directory dir = newDirectory();
-    IndexWriterConfig config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer());
+    IndexWriterConfig config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
     config.setTermIndexInterval(2);
     IndexWriter w = new IndexWriter(dir, config);
     StringBuilder s = new StringBuilder();
@@ -2538,7 +2567,7 @@ public class TestIndexWriter extends Luc
     Field f = newField("field", s.toString(), Field.Store.NO, Field.Index.ANALYZED);
     d.add(f);
     w.addDocument(d);
-    
+
     IndexReader r = w.getReader().getSequentialSubReaders()[0];
     TermsEnum t = r.fields().terms("field").iterator();
     int count = 0;
@@ -2563,7 +2592,7 @@ public class TestIndexWriter extends Luc
 
       IndexWriter w = new IndexWriter(
           dir,
-          newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
+          newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
               setMergePolicy(mergePolicy)
       );
       Document doc = new Document();
@@ -2624,10 +2653,10 @@ public class TestIndexWriter extends Luc
     // in case a deletion policy which holds onto commits is used.
     Directory dir = newDirectory();
     SnapshotDeletionPolicy sdp = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
-    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( 
-        TEST_VERSION_CURRENT, new MockAnalyzer())
+    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
+        TEST_VERSION_CURRENT, new MockAnalyzer(random))
         .setIndexDeletionPolicy(sdp));
-    
+
     // First commit
     Document doc = new Document();
     doc.add(newField("c", "val", Store.YES, Index.ANALYZED, TermVector.WITH_POSITIONS_OFFSETS));
@@ -2637,7 +2666,7 @@ public class TestIndexWriter extends Luc
 
     // Keep that commit
     sdp.snapshot("id");
-    
+
     // Second commit - now KeepOnlyLastCommit cannot delete the prev commit.
     doc = new Document();
     doc.add(newField("c", "val", Store.YES, Index.ANALYZED, TermVector.WITH_POSITIONS_OFFSETS));
@@ -2649,31 +2678,26 @@ public class TestIndexWriter extends Luc
     sdp.release("id");
     writer.deleteUnusedFiles();
     assertEquals(1, IndexReader.listCommits(dir).size());
-    
+
     writer.close();
     dir.close();
   }
-  
-  private static class FlushCountingIndexWriter extends IndexWriter {
-    int flushCount;
-    public FlushCountingIndexWriter(Directory dir, IndexWriterConfig iwc) throws IOException {
-      super(dir, iwc);
-    }
-    @Override
-    public void doAfterFlush() {
-      flushCount++;
-    }
-  }
 
   public void testIndexingThenDeleting() throws Exception {
     final Random r = random;
-
     Directory dir = newDirectory();
-    FlushCountingIndexWriter w = new FlushCountingIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, true, false)).setRAMBufferSizeMB(1.0).setMaxBufferedDocs(-1).setMaxBufferedDeleteTerms(-1));
+    // note this test explicitly disables payloads
+    final Analyzer analyzer = new Analyzer() {
+      @Override
+      public TokenStream tokenStream(String fieldName, Reader reader) {
+        return new MockTokenizer(reader, MockTokenizer.WHITESPACE, true);
+      }
+    };
+    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer).setRAMBufferSizeMB(1.0).setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH).setMaxBufferedDeleteTerms(IndexWriterConfig.DISABLE_AUTO_FLUSH));
     w.setInfoStream(VERBOSE ? System.out : null);
     Document doc = new Document();
     doc.add(newField("field", "go 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20", Field.Store.NO, Field.Index.ANALYZED));
-    int num = 6 * RANDOM_MULTIPLIER;
+    int num = TEST_NIGHTLY ? 6 * RANDOM_MULTIPLIER : 3 * RANDOM_MULTIPLIER;
     for (int iter = 0; iter < num; iter++) {
       int count = 0;
 
@@ -2683,15 +2707,15 @@ public class TestIndexWriter extends Luc
       }
       if (doIndexing) {
         // Add docs until a flush is triggered
-        final int startFlushCount = w.flushCount;
-        while(w.flushCount == startFlushCount) {
+        final int startFlushCount = w.getFlushCount();
+        while(w.getFlushCount() == startFlushCount) {
           w.addDocument(doc);
           count++;
         }
       } else {
         // Delete docs until a flush is triggered
-        final int startFlushCount = w.flushCount;
-        while(w.flushCount == startFlushCount) {
+        final int startFlushCount = w.getFlushCount();
+        while(w.getFlushCount() == startFlushCount) {
           w.deleteDocuments(new Term("foo", ""+count));
           count++;
         }
@@ -2701,13 +2725,13 @@ public class TestIndexWriter extends Luc
     w.close();
     dir.close();
   }
-  
+
   public void testNoCommits() throws Exception {
     // Tests that if we don't call commit(), the directory has 0 commits. This has
     // changed since LUCENE-2386, where before IW would always commit on a fresh
     // new index.
     Directory dir = newDirectory();
-    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
     try {
       IndexReader.listCommits(dir);
       fail("listCommits should have thrown an exception over empty index");
@@ -2722,20 +2746,20 @@ public class TestIndexWriter extends Luc
 
   public void testEmptyFSDirWithNoLock() throws Exception {
     // Tests that if FSDir is opened w/ a NoLockFactory (or SingleInstanceLF),
-    // then IndexWriter ctor succeeds. Previously (LUCENE-2386) it failed 
+    // then IndexWriter ctor succeeds. Previously (LUCENE-2386) it failed
     // when listAll() was called in IndexFileDeleter.
-    Directory dir = newFSDirectory(new File(TEMP_DIR, "emptyFSDirNoLock"), NoLockFactory.getNoLockFactory());
-    new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())).close();
+    Directory dir = newFSDirectory(_TestUtil.getTempDir("emptyFSDirNoLock"), NoLockFactory.getNoLockFactory());
+    new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))).close();
     dir.close();
   }
 
   public void testEmptyDirRollback() throws Exception {
     // Tests that if IW is created over an empty Directory, some documents are
-    // indexed, flushed (but not committed) and then IW rolls back, then no 
+    // indexed, flushed (but not committed) and then IW rolls back, then no
     // files are left in the Directory.
     Directory dir = newDirectory();
-    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( 
-        TEST_VERSION_CURRENT, new MockAnalyzer())
+    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
+        TEST_VERSION_CURRENT, new MockAnalyzer(random))
                                          .setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy()));
     String[] files = dir.listAll();
 
@@ -2758,7 +2782,7 @@ public class TestIndexWriter extends Luc
     writer.addDocument(doc);
     // Adding just one document does not call flush yet.
     assertEquals("only the stored and term vector files should exist in the directory", 5 + extraFileCount, dir.listAll().length);
-    
+
     doc = new Document();
     doc.add(newField("c", "val", Store.YES, Index.ANALYZED, TermVector.WITH_POSITIONS_OFFSETS));
     writer.addDocument(doc);
@@ -2779,17 +2803,17 @@ public class TestIndexWriter extends Luc
   public void testNoSegmentFile() throws IOException {
     Directory dir = newDirectory();
     dir.setLockFactory(NoLockFactory.getNoLockFactory());
-    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( 
-        TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2));
-    
+    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(
+        TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2));
+
     Document doc = new Document();
     doc.add(newField("c", "val", Store.YES, Index.ANALYZED, TermVector.WITH_POSITIONS_OFFSETS));
     w.addDocument(doc);
     w.addDocument(doc);
-    IndexWriter w2 = new IndexWriter(dir, newIndexWriterConfig( 
-        TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2)
+    IndexWriter w2 = new IndexWriter(dir, newIndexWriterConfig(
+        TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2)
         .setOpenMode(OpenMode.CREATE));
-    
+
     w2.close();
     // If we don't do that, the test fails on Windows
     w.rollback();
@@ -2799,7 +2823,7 @@ public class TestIndexWriter extends Luc
   public void testFutureCommit() throws Exception {
     Directory dir = newDirectory();
 
-    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE));
+    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE));
     Document doc = new Document();
     w.addDocument(doc);
 
@@ -2825,10 +2849,10 @@ public class TestIndexWriter extends Luc
 
     assertNotNull(commit);
 
-    w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE).setIndexCommit(commit));
+    w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE).setIndexCommit(commit));
 
     assertEquals(1, w.numDocs());
-    
+
     // commit IndexWriter to "third"
     w.addDocument(doc);
     commitData.put("tag", "third");
@@ -2875,7 +2899,7 @@ public class TestIndexWriter extends Luc
   public void testRandomStoredFields() throws IOException {
     Directory dir = newDirectory();
     Random rand = random;
-    RandomIndexWriter w = new RandomIndexWriter(rand, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(_TestUtil.nextInt(rand, 5, 20)));
+    RandomIndexWriter w = new RandomIndexWriter(rand, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(_TestUtil.nextInt(rand, 5, 20)));
     //w.w.setInfoStream(System.out);
     //w.w.setUseCompoundFile(false);
     if (VERBOSE) {
@@ -2883,7 +2907,7 @@ public class TestIndexWriter extends Luc
     }
     final int docCount = 200*RANDOM_MULTIPLIER;
     final int fieldCount = _TestUtil.nextInt(rand, 1, 5);
-      
+
     final List<Integer> fieldIDs = new ArrayList<Integer>();
 
     Field idField = newField("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
@@ -2893,7 +2917,7 @@ public class TestIndexWriter extends Luc
     }
 
     final Map<String,Document> docs = new HashMap<String,Document>();
-    
+
     if (VERBOSE) {
       System.out.println("TEST: build index docCount=" + docCount);
     }
@@ -2971,7 +2995,7 @@ public class TestIndexWriter extends Luc
   public void testNoUnwantedTVFiles() throws Exception {
 
     Directory dir = newDirectory();
-    IndexWriter indexWriter = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setRAMBufferSizeMB(0.01).setMergePolicy(newLogMergePolicy()));
+    IndexWriter indexWriter = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setRAMBufferSizeMB(0.01).setMergePolicy(newLogMergePolicy()));
     ((LogMergePolicy) indexWriter.getConfig().getMergePolicy()).setUseCompoundFile(false);
 
     String BIG="alskjhlaksjghlaksjfhalksvjepgjioefgjnsdfjgefgjhelkgjhqewlrkhgwlekgrhwelkgjhwelkgrhwlkejg";
@@ -3080,7 +3104,7 @@ public class TestIndexWriter extends Luc
     Directory dir = newDirectory();
     RandomIndexWriter w = new RandomIndexWriter(random, dir, new StringSplitAnalyzer());
 
-    char[] chars = new char[DocumentsWriter.MAX_TERM_LENGTH_UTF8];
+    char[] chars = new char[DocumentsWriterPerThread.MAX_TERM_LENGTH_UTF8];
     Arrays.fill(chars, 'x');
     Document doc = new Document();
     final String bigTerm = new String(chars);

Modified: lucene/dev/branches/flexscoring/lucene/src/test/org/apache/lucene/index/TestIndexWriterConfig.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/flexscoring/lucene/src/test/org/apache/lucene/index/TestIndexWriterConfig.java?rev=1103112&r1=1103111&r2=1103112&view=diff
==============================================================================
--- lucene/dev/branches/flexscoring/lucene/src/test/org/apache/lucene/index/TestIndexWriterConfig.java (original)
+++ lucene/dev/branches/flexscoring/lucene/src/test/org/apache/lucene/index/TestIndexWriterConfig.java Sat May 14 13:51:35 2011
@@ -24,7 +24,7 @@ import java.util.HashSet;
 import java.util.Set;
 
 import org.apache.lucene.analysis.MockAnalyzer;
-import org.apache.lucene.index.DocumentsWriter.IndexingChain;
+import org.apache.lucene.index.DocumentsWriterPerThread.IndexingChain;
 import org.apache.lucene.index.IndexWriterConfig.OpenMode;
 import org.apache.lucene.search.DefaultSimilarityProvider;
 import org.apache.lucene.search.IndexSearcher;
@@ -36,20 +36,20 @@ public class TestIndexWriterConfig exten
   private static final class MySimilarityProvider extends DefaultSimilarityProvider {
     // Does not implement anything - used only for type checking on IndexWriterConfig.
   }
-  
+
   private static final class MyIndexingChain extends IndexingChain {
     // Does not implement anything - used only for type checking on IndexWriterConfig.
 
     @Override
-    DocConsumer getChain(DocumentsWriter documentsWriter) {
+    DocConsumer getChain(DocumentsWriterPerThread documentsWriter) {
       return null;
     }
-    
+
   }
 
   @Test
   public void testDefaults() throws Exception {
-    IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer());
+    IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
     assertEquals(MockAnalyzer.class, conf.getAnalyzer().getClass());
     assertNull(conf.getIndexCommit());
     assertEquals(KeepOnlyLastCommitDeletionPolicy.class, conf.getIndexDeletionPolicy().getClass());
@@ -64,12 +64,16 @@ public class TestIndexWriterConfig exten
     assertEquals(IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB, conf.getRAMBufferSizeMB(), 0.0);
     assertEquals(IndexWriterConfig.DEFAULT_MAX_BUFFERED_DOCS, conf.getMaxBufferedDocs());
     assertEquals(IndexWriterConfig.DEFAULT_READER_POOLING, conf.getReaderPooling());
-    assertTrue(DocumentsWriter.defaultIndexingChain == conf.getIndexingChain());
+    assertTrue(DocumentsWriterPerThread.defaultIndexingChain == conf.getIndexingChain());
     assertNull(conf.getMergedSegmentWarmer());
-    assertEquals(IndexWriterConfig.DEFAULT_MAX_THREAD_STATES, conf.getMaxThreadStates());
     assertEquals(IndexWriterConfig.DEFAULT_READER_TERMS_INDEX_DIVISOR, conf.getReaderTermsIndexDivisor());
-    assertEquals(LogByteSizeMergePolicy.class, conf.getMergePolicy().getClass());
-    
+    assertEquals(TieredMergePolicy.class, conf.getMergePolicy().getClass());
+    assertEquals(ThreadAffinityDocumentsWriterThreadPool.class, conf.getIndexerThreadPool().getClass());
+    assertNull(conf.getFlushPolicy());
+    assertEquals(IndexWriterConfig.DEFAULT_RAM_PER_THREAD_HARD_LIMIT_MB, conf.getRAMPerThreadHardLimitMB());
+
+
+
     // Sanity check - validate that all getters are covered.
     Set<String> getters = new HashSet<String>();
     getters.add("getAnalyzer");
@@ -91,7 +95,11 @@ public class TestIndexWriterConfig exten
     getters.add("getMergePolicy");
     getters.add("getMaxThreadStates");
     getters.add("getReaderPooling");
+    getters.add("getIndexerThreadPool");
     getters.add("getReaderTermsIndexDivisor");
+    getters.add("getFlushPolicy");
+    getters.add("getRAMPerThreadHardLimitMB");
+    
     for (Method m : IndexWriterConfig.class.getDeclaredMethods()) {
       if (m.getDeclaringClass() == IndexWriterConfig.class && m.getName().startsWith("get")) {
         assertTrue("method " + m.getName() + " is not tested for defaults", getters.contains(m.getName()));
@@ -107,12 +115,12 @@ public class TestIndexWriterConfig exten
       if (m.getDeclaringClass() == IndexWriterConfig.class
           && m.getName().startsWith("set")
           && !Modifier.isStatic(m.getModifiers())) {
-        assertEquals("method " + m.getName() + " does not return IndexWriterConfig", 
+        assertEquals("method " + m.getName() + " does not return IndexWriterConfig",
             IndexWriterConfig.class, m.getReturnType());
       }
     }
   }
-  
+
   @Test
   public void testConstants() throws Exception {
     // Tests that the values of the constants does not change
@@ -123,13 +131,12 @@ public class TestIndexWriterConfig exten
     assertEquals(IndexWriterConfig.DISABLE_AUTO_FLUSH, IndexWriterConfig.DEFAULT_MAX_BUFFERED_DOCS);
     assertEquals(16.0, IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB, 0.0);
     assertEquals(false, IndexWriterConfig.DEFAULT_READER_POOLING);
-    assertEquals(8, IndexWriterConfig.DEFAULT_MAX_THREAD_STATES);
     assertEquals(IndexReader.DEFAULT_TERMS_INDEX_DIVISOR, IndexWriterConfig.DEFAULT_READER_TERMS_INDEX_DIVISOR);
   }
-  
+
   @Test
   public void testToString() throws Exception {
-    String str = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).toString();
+    String str = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).toString();
     for (Field f : IndexWriterConfig.class.getDeclaredFields()) {
       int modifiers = f.getModifiers();
       if (Modifier.isStatic(modifiers) && Modifier.isFinal(modifiers)) {
@@ -143,30 +150,30 @@ public class TestIndexWriterConfig exten
       assertTrue(f.getName() + " not found in toString", str.indexOf(f.getName()) != -1);
     }
   }
-  
+
   @Test
   public void testClone() throws Exception {
-    IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer());
+    IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
     IndexWriterConfig clone = (IndexWriterConfig) conf.clone();
-    
+
     // Clone is shallow since not all parameters are cloneable.
     assertTrue(conf.getIndexDeletionPolicy() == clone.getIndexDeletionPolicy());
-    
+
     conf.setMergeScheduler(new SerialMergeScheduler());
     assertEquals(ConcurrentMergeScheduler.class, clone.getMergeScheduler().getClass());
   }
 
   @Test
   public void testInvalidValues() throws Exception {
-    IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer());
-    
+    IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
+
     // Test IndexDeletionPolicy
     assertEquals(KeepOnlyLastCommitDeletionPolicy.class, conf.getIndexDeletionPolicy().getClass());
     conf.setIndexDeletionPolicy(new SnapshotDeletionPolicy(null));
     assertEquals(SnapshotDeletionPolicy.class, conf.getIndexDeletionPolicy().getClass());
     conf.setIndexDeletionPolicy(null);
     assertEquals(KeepOnlyLastCommitDeletionPolicy.class, conf.getIndexDeletionPolicy().getClass());
-    
+
     // Test MergeScheduler
     assertEquals(ConcurrentMergeScheduler.class, conf.getMergeScheduler().getClass());
     conf.setMergeScheduler(new SerialMergeScheduler());
@@ -183,12 +190,12 @@ public class TestIndexWriterConfig exten
     assertTrue(IndexSearcher.getDefaultSimilarityProvider() == conf.getSimilarityProvider());
 
     // Test IndexingChain
-    assertTrue(DocumentsWriter.defaultIndexingChain == conf.getIndexingChain());
+    assertTrue(DocumentsWriterPerThread.defaultIndexingChain == conf.getIndexingChain());
     conf.setIndexingChain(new MyIndexingChain());
     assertEquals(MyIndexingChain.class, conf.getIndexingChain().getClass());
     conf.setIndexingChain(null);
-    assertTrue(DocumentsWriter.defaultIndexingChain == conf.getIndexingChain());
-    
+    assertTrue(DocumentsWriterPerThread.defaultIndexingChain == conf.getIndexingChain());
+
     try {
       conf.setMaxBufferedDeleteTerms(0);
       fail("should not have succeeded to set maxBufferedDeleteTerms to 0");
@@ -239,14 +246,22 @@ public class TestIndexWriterConfig exten
       // this is expected
     }
     
-    assertEquals(IndexWriterConfig.DEFAULT_MAX_THREAD_STATES, conf.getMaxThreadStates());
-    conf.setMaxThreadStates(5);
-    assertEquals(5, conf.getMaxThreadStates());
-    conf.setMaxThreadStates(0);
-    assertEquals(IndexWriterConfig.DEFAULT_MAX_THREAD_STATES, conf.getMaxThreadStates());
+    try {
+      conf.setRAMPerThreadHardLimitMB(2048);
+      fail("should not have succeeded to set RAMPerThreadHardLimitMB to >= 2048");
+    } catch (IllegalArgumentException e) {
+      // this is expected
+    }
+    
+    try {
+      conf.setRAMPerThreadHardLimitMB(0);
+      fail("should not have succeeded to set RAMPerThreadHardLimitMB to 0");
+    } catch (IllegalArgumentException e) {
+      // this is expected
+    }
     
     // Test MergePolicy
-    assertEquals(LogByteSizeMergePolicy.class, conf.getMergePolicy().getClass());
+    assertEquals(TieredMergePolicy.class, conf.getMergePolicy().getClass());
     conf.setMergePolicy(new LogDocMergePolicy());
     assertEquals(LogDocMergePolicy.class, conf.getMergePolicy().getClass());
     conf.setMergePolicy(null);

Modified: lucene/dev/branches/flexscoring/lucene/src/test/org/apache/lucene/index/TestIndexWriterDelete.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/flexscoring/lucene/src/test/org/apache/lucene/index/TestIndexWriterDelete.java?rev=1103112&r1=1103111&r2=1103112&view=diff
==============================================================================
--- lucene/dev/branches/flexscoring/lucene/src/test/org/apache/lucene/index/TestIndexWriterDelete.java (original)
+++ lucene/dev/branches/flexscoring/lucene/src/test/org/apache/lucene/index/TestIndexWriterDelete.java Sat May 14 13:51:35 2011
@@ -33,7 +33,7 @@ import org.apache.lucene.util.LuceneTest
 import org.apache.lucene.util._TestUtil;
 
 public class TestIndexWriterDelete extends LuceneTestCase {
-  
+
   // test the simple case
   public void testSimpleCase() throws IOException {
     String[] keywords = { "1", "2" };
@@ -44,7 +44,7 @@ public class TestIndexWriterDelete exten
 
     Directory dir = newDirectory();
     IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
-        TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(1));
+        TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(1));
 
     for (int i = 0; i < keywords.length; i++) {
       Document doc = new Document();
@@ -79,7 +79,7 @@ public class TestIndexWriterDelete exten
 
     Directory dir = newDirectory();
     IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
-        TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
+        TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
         .setMaxBufferedDeleteTerms(2));
     modifier.setInfoStream(VERBOSE ? System.out : null);
     int id = 0;
@@ -113,7 +113,7 @@ public class TestIndexWriterDelete exten
   public void testMaxBufferedDeletes() throws IOException {
     Directory dir = newDirectory();
     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
-        TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(1));
+        TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(1));
 
     writer.setInfoStream(VERBOSE ? System.out : null);
     writer.addDocument(new Document());
@@ -124,7 +124,7 @@ public class TestIndexWriterDelete exten
     writer.close();
     dir.close();
   }
-
+  
   // test when delete terms only apply to ram segments
   public void testRAMDeletes() throws IOException {
     for(int t=0;t<2;t++) {
@@ -133,7 +133,7 @@ public class TestIndexWriterDelete exten
       }
       Directory dir = newDirectory();
       IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
-          TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(4)
+          TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(4)
           .setMaxBufferedDeleteTerms(4));
       modifier.setInfoStream(VERBOSE ? System.out : null);
       int id = 0;
@@ -172,7 +172,7 @@ public class TestIndexWriterDelete exten
   public void testBothDeletes() throws IOException {
     Directory dir = newDirectory();
     IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
-        TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(100)
+        TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(100)
         .setMaxBufferedDeleteTerms(100));
 
     int id = 0;
@@ -206,7 +206,7 @@ public class TestIndexWriterDelete exten
   public void testBatchDeletes() throws IOException {
     Directory dir = newDirectory();
     IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
-        TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
+        TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
         .setMaxBufferedDeleteTerms(2));
 
     int id = 0;
@@ -220,7 +220,7 @@ public class TestIndexWriterDelete exten
     IndexReader reader = IndexReader.open(dir, true);
     assertEquals(7, reader.numDocs());
     reader.close();
-      
+
     id = 0;
     modifier.deleteDocuments(new Term("id", String.valueOf(++id)));
     modifier.deleteDocuments(new Term("id", String.valueOf(++id)));
@@ -249,7 +249,7 @@ public class TestIndexWriterDelete exten
   public void testDeleteAll() throws IOException {
     Directory dir = newDirectory();
     IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
-        TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
+        TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
         .setMaxBufferedDeleteTerms(2));
 
     int id = 0;
@@ -295,35 +295,35 @@ public class TestIndexWriterDelete exten
   public void testDeleteAllRollback() throws IOException {
     Directory dir = newDirectory();
     IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
-        TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
+        TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
         .setMaxBufferedDeleteTerms(2));
-    
+
     int id = 0;
     int value = 100;
-    
+
     for (int i = 0; i < 7; i++) {
       addDoc(modifier, ++id, value);
     }
     modifier.commit();
-    
+
     addDoc(modifier, ++id, value);
 
     IndexReader reader = IndexReader.open(dir, true);
     assertEquals(7, reader.numDocs());
     reader.close();
-    
+
     // Delete all
-    modifier.deleteAll(); 
+    modifier.deleteAll();
 
     // Roll it back
     modifier.rollback();
     modifier.close();
-    
+
     // Validate that the docs are still there
     reader = IndexReader.open(dir, true);
     assertEquals(7, reader.numDocs());
     reader.close();
-    
+
     dir.close();
   }
 
@@ -332,12 +332,12 @@ public class TestIndexWriterDelete exten
   public void testDeleteAllNRT() throws IOException {
     Directory dir = newDirectory();
     IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
-        TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
+        TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
         .setMaxBufferedDeleteTerms(2));
-    
+
     int id = 0;
     int value = 100;
-    
+
     for (int i = 0; i < 7; i++) {
       addDoc(modifier, ++id, value);
     }
@@ -349,24 +349,24 @@ public class TestIndexWriterDelete exten
 
     addDoc(modifier, ++id, value);
     addDoc(modifier, ++id, value);
-    
+
     // Delete all
-    modifier.deleteAll(); 
+    modifier.deleteAll();
 
     reader = modifier.getReader();
     assertEquals(0, reader.numDocs());
     reader.close();
-    
+
 
     // Roll it back
     modifier.rollback();
     modifier.close();
-    
+
     // Validate that the docs are still there
     reader = IndexReader.open(dir, true);
     assertEquals(7, reader.numDocs());
     reader.close();
-    
+
     dir.close();
   }
 
@@ -423,7 +423,7 @@ public class TestIndexWriterDelete exten
     MockDirectoryWrapper startDir = newDirectory();
     // TODO: find the resource leak that only occurs sometimes here.
     startDir.setNoDeleteOpenFile(false);
-    IndexWriter writer = new IndexWriter(startDir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)));
+    IndexWriter writer = new IndexWriter(startDir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
     for (int i = 0; i < 157; i++) {
       Document d = new Document();
       d.add(newField("id", Integer.toString(i), Field.Store.YES,
@@ -450,7 +450,7 @@ public class TestIndexWriterDelete exten
       dir.setPreventDoubleWrite(false);
       IndexWriter modifier = new IndexWriter(dir,
                                              newIndexWriterConfig(
-                                                                  TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false))
+                                                                  TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false))
                                              .setMaxBufferedDocs(1000)
                                              .setMaxBufferedDeleteTerms(1000)
                                              .setMergeScheduler(new ConcurrentMergeScheduler()));
@@ -536,10 +536,15 @@ public class TestIndexWriterDelete exten
             fail(testName + " hit IOException after disk space was freed up");
           }
         }
-
+        // prevent throwing a random exception here!!
+        final double randomIOExceptionRate = dir.getRandomIOExceptionRate();
+        final long maxSizeInBytes = dir.getMaxSizeInBytes();
+        dir.setRandomIOExceptionRate(0.0);
+        dir.setMaxSizeInBytes(0);
         if (!success) {
           // Must force the close else the writer can have
           // open files which cause exc in MockRAMDir.close
+         
           modifier.rollback();
         }
 
@@ -549,6 +554,8 @@ public class TestIndexWriterDelete exten
           _TestUtil.checkIndex(dir);
           TestIndexWriter.assertNoUnreferencedFiles(dir, "after writer.close");
         }
+        dir.setRandomIOExceptionRate(randomIOExceptionRate);
+        dir.setMaxSizeInBytes(maxSizeInBytes);
 
         // Finally, verify index is not corrupt, and, if
         // we succeeded, we see all docs changed, and if
@@ -619,7 +626,7 @@ public class TestIndexWriterDelete exten
   // This test tests that buffered deletes are cleared when
   // an Exception is hit during flush.
   public void testErrorAfterApplyDeletes() throws IOException {
-    
+
     MockDirectoryWrapper.Failure failure = new MockDirectoryWrapper.Failure() {
         boolean sawMaybe = false;
         boolean failed = false;
@@ -682,7 +689,7 @@ public class TestIndexWriterDelete exten
 
     MockDirectoryWrapper dir = newDirectory();
     IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
-                                                                     TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(2).setReaderPooling(false).setMergePolicy(newLogMergePolicy()));
+                                                                     TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(2).setReaderPooling(false).setMergePolicy(newLogMergePolicy()));
     modifier.setInfoStream(VERBOSE ? System.out : null);
 
     LogMergePolicy lmp = (LogMergePolicy) modifier.getConfig().getMergePolicy();
@@ -783,7 +790,7 @@ public class TestIndexWriterDelete exten
   // a segment is written are cleaned up if there's an i/o error
 
   public void testErrorInDocsWriterAdd() throws IOException {
-    
+
     MockDirectoryWrapper.Failure failure = new MockDirectoryWrapper.Failure() {
         boolean failed = false;
         @Override
@@ -809,7 +816,7 @@ public class TestIndexWriterDelete exten
     String[] text = { "Amsterdam", "Venice" };
 
     MockDirectoryWrapper dir = newDirectory();
-    IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)));
+    IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
     modifier.commit();
     dir.failOn(failure.reset());
 
@@ -837,7 +844,7 @@ public class TestIndexWriterDelete exten
 
   public void testDeleteNullQuery() throws IOException {
     Directory dir = newDirectory();
-    IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)));
+    IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
 
     for (int i = 0; i < 5; i++) {
       addDoc(modifier, i, 2*i);