You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by rm...@apache.org on 2010/08/10 22:32:48 UTC

svn commit: r984202 [4/4] - in /lucene/dev/trunk/lucene/src: java/org/apache/lucene/index/ java/org/apache/lucene/index/codecs/standard/ test/org/apache/lucene/analysis/ test/org/apache/lucene/index/ test/org/apache/lucene/util/

Modified: lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestIndexWriterReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestIndexWriterReader.java?rev=984202&r1=984201&r2=984202&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestIndexWriterReader.java (original)
+++ lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestIndexWriterReader.java Tue Aug 10 20:32:47 2010
@@ -40,26 +40,16 @@ import org.apache.lucene.util.LuceneTest
 import org.apache.lucene.util._TestUtil;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.ThreadInterruptedException;
+import java.util.concurrent.atomic.AtomicInteger;
 
 public class TestIndexWriterReader extends LuceneTestCase {
   static PrintStream infoStream;
-
-  public static class HeavyAtomicInt {
-    private int value;
-    public HeavyAtomicInt(int start) {
-      value = start;
-    }
-    public synchronized int addAndGet(int inc) {
-      value += inc;
-      return value;
-    }
-    public synchronized int incrementAndGet() {
-      value++;
-      return value;
-    }
-    public synchronized int intValue() {
-      return value;
-    }
+  private Random random;
+  
+  @Override
+  public void setUp() throws Exception {
+    super.setUp();
+    random = newRandom();
   }
   
   public static int count(Term t, IndexReader r) throws IOException {
@@ -81,7 +71,7 @@ public class TestIndexWriterReader exten
     boolean optimize = true;
 
     Directory dir1 = new MockRAMDirectory();
-    IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
 
     // create the index
     createIndexNoClose(!optimize, "index1", writer);
@@ -115,7 +105,7 @@ public class TestIndexWriterReader exten
     assertEquals(0, count(new Term("id", id10), r3));
     assertEquals(1, count(new Term("id", Integer.toString(8000)), r3));
 
-    writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    writer = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
     Document doc = new Document();
     doc.add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
     writer.addDocument(doc);
@@ -142,7 +132,7 @@ public class TestIndexWriterReader exten
     boolean optimize = false;
 
     Directory dir1 = new MockRAMDirectory();
-    IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
     writer.setInfoStream(infoStream);
     // create the index
     createIndexNoClose(!optimize, "index1", writer);
@@ -150,7 +140,7 @@ public class TestIndexWriterReader exten
 
     // create a 2nd index
     Directory dir2 = new MockRAMDirectory();
-    IndexWriter writer2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter writer2 = new IndexWriter(dir2, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
     writer2.setInfoStream(infoStream);
     createIndexNoClose(!optimize, "index2", writer2);
     writer2.close();
@@ -187,12 +177,12 @@ public class TestIndexWriterReader exten
     boolean optimize = false;
 
     Directory dir1 = new MockRAMDirectory();
-    IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
     writer.setInfoStream(infoStream);
 
     // create a 2nd index
     Directory dir2 = new MockRAMDirectory();
-    IndexWriter writer2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter writer2 = new IndexWriter(dir2, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
     writer2.setInfoStream(infoStream);
     createIndexNoClose(!optimize, "index2", writer2);
     writer2.close();
@@ -220,7 +210,7 @@ public class TestIndexWriterReader exten
     boolean optimize = true;
 
     Directory dir1 = new MockRAMDirectory();
-    IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setReaderTermsIndexDivisor(2));
+    IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setReaderTermsIndexDivisor(2));
     writer.setInfoStream(infoStream);
     // create the index
     createIndexNoClose(!optimize, "index1", writer);
@@ -258,7 +248,7 @@ public class TestIndexWriterReader exten
     writer.close();
         
     // reopen the writer to verify the delete made it to the directory
-    writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    writer = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
     writer.setInfoStream(infoStream);
     IndexReader w2r1 = writer.getReader();
     assertEquals(0, count(new Term("id", id10), w2r1));
@@ -272,7 +262,7 @@ public class TestIndexWriterReader exten
     int numDirs = 3;
     
     Directory mainDir = new MockRAMDirectory();
-    IndexWriter mainWriter = new IndexWriter(mainDir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter mainWriter = new IndexWriter(mainDir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
     mainWriter.setInfoStream(infoStream);
     AddDirectoriesThreads addDirThreads = new AddDirectoriesThreads(numIter, mainWriter);
     addDirThreads.launchThreads(numDirs);
@@ -308,14 +298,14 @@ public class TestIndexWriterReader exten
     final List<Throwable> failures = new ArrayList<Throwable>();
     IndexReader[] readers;
     boolean didClose = false;
-    HeavyAtomicInt count = new HeavyAtomicInt(0);
-    HeavyAtomicInt numaddIndexes = new HeavyAtomicInt(0);
+    AtomicInteger count = new AtomicInteger(0);
+    AtomicInteger numaddIndexes = new AtomicInteger(0);
     
     public AddDirectoriesThreads(int numDirs, IndexWriter mainWriter) throws Throwable {
       this.numDirs = numDirs;
       this.mainWriter = mainWriter;
       addDir = new MockRAMDirectory();
-      IndexWriter writer = new IndexWriter(addDir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2));
+      IndexWriter writer = new IndexWriter(addDir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2));
       for (int i = 0; i < NUM_INIT_DOCS; i++) {
         Document doc = createDocument(i, "addindex", 4);
         writer.addDocument(doc);
@@ -421,7 +411,7 @@ public class TestIndexWriterReader exten
    */
   public void doTestIndexWriterReopenSegment(boolean optimize) throws Exception {
     Directory dir1 = new MockRAMDirectory();
-    IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
     writer.setInfoStream(infoStream);
     IndexReader r1 = writer.getReader();
     assertEquals(0, r1.maxDoc());
@@ -458,7 +448,7 @@ public class TestIndexWriterReader exten
     writer.close();
 
     // test whether the changes made it to the directory
-    writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    writer = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
     IndexReader w2r1 = writer.getReader();
     // insure the deletes were actually flushed to the directory
     assertEquals(200, w2r1.maxDoc());
@@ -495,9 +485,9 @@ public class TestIndexWriterReader exten
    * //} //writer.deleteDocuments(term); td.close(); return doc; }
    */
   
-  public static void createIndex(Directory dir1, String indexName,
+  public static void createIndex(Random random, Directory dir1, String indexName,
       boolean multiSegment) throws IOException {
-    IndexWriter w = new IndexWriter(dir1, new IndexWriterConfig(
+    IndexWriter w = new IndexWriter(dir1, newIndexWriterConfig(random,
         TEST_VERSION_CURRENT, new MockAnalyzer())
         .setMergePolicy(new LogDocMergePolicy()));
     for (int i = 0; i < 100; i++) {
@@ -534,9 +524,9 @@ public class TestIndexWriterReader exten
     Directory dir1 = new MockRAMDirectory();
     // Enroll warmer
     MyWarmer warmer = new MyWarmer();
-    IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(
+    IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig(random,
         TEST_VERSION_CURRENT, new MockAnalyzer())
-        .setMaxBufferedDocs(2).setMergedSegmentWarmer(warmer));
+        .setMaxBufferedDocs(2).setMergedSegmentWarmer(warmer).setMergeScheduler(new ConcurrentMergeScheduler()));
     writer.setInfoStream(infoStream);
 
     // create the index
@@ -567,7 +557,7 @@ public class TestIndexWriterReader exten
 
   public void testAfterCommit() throws Exception {
     Directory dir1 = new MockRAMDirectory();
-    IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setMergeScheduler(new ConcurrentMergeScheduler()));
     writer.commit();
     writer.setInfoStream(infoStream);
 
@@ -600,7 +590,7 @@ public class TestIndexWriterReader exten
   // Make sure reader remains usable even if IndexWriter closes
   public void testAfterClose() throws Exception {
     Directory dir1 = new MockRAMDirectory();
-    IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
     writer.setInfoStream(infoStream);
 
     // create the index
@@ -629,7 +619,7 @@ public class TestIndexWriterReader exten
   // Stress test reopen during addIndexes
   public void testDuringAddIndexes() throws Exception {
     Directory dir1 = new MockRAMDirectory();
-    final IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    final IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
     writer.setInfoStream(infoStream);
     ((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(2);
 
@@ -706,7 +696,7 @@ public class TestIndexWriterReader exten
   // Stress test reopen during add/delete
   public void testDuringAddDelete() throws Exception {
     Directory dir1 = new MockRAMDirectory();
-    final IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    final IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
     writer.setInfoStream(infoStream);
     ((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(2);
 
@@ -786,7 +776,7 @@ public class TestIndexWriterReader exten
 
   public void testExpungeDeletes() throws Throwable {
     Directory dir = new MockRAMDirectory();
-    final IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
     Document doc = new Document();
     doc.add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
     Field id = new Field("id", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
@@ -810,7 +800,7 @@ public class TestIndexWriterReader exten
 
   public void testDeletesNumDocs() throws Throwable {
     Directory dir = new MockRAMDirectory();
-    final IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
     Document doc = new Document();
     doc.add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
     Field id = new Field("id", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
@@ -840,7 +830,7 @@ public class TestIndexWriterReader exten
   public void testEmptyIndex() throws Exception {
     // Ensures that getReader works on an empty index, which hasn't been committed yet.
     Directory dir = new MockRAMDirectory();
-    IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
     IndexReader r = w.getReader();
     assertEquals(0, r.numDocs());
     r.close();
@@ -849,8 +839,9 @@ public class TestIndexWriterReader exten
 
   public void testSegmentWarmer() throws Exception {
     Directory dir = new MockRAMDirectory();
-    IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())
+    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())
                                     .setMaxBufferedDocs(2).setReaderPooling(true));
+    ((LogMergePolicy) w.getMergePolicy()).setMergeFactor(10);
     w.setMergedSegmentWarmer(new IndexWriter.IndexReaderWarmer() {
         public void warm(IndexReader r) throws IOException {
           final IndexSearcher s = new IndexSearcher(r);

Modified: lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestLazyBug.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestLazyBug.java?rev=984202&r1=984201&r2=984202&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestLazyBug.java (original)
+++ lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestLazyBug.java Tue Aug 10 20:32:47 2010
@@ -71,7 +71,7 @@ public class TestLazyBug extends LuceneT
     Directory dir = new RAMDirectory();
     try {
       Random r = newRandom();
-      IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
+      IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(r,
           TEST_VERSION_CURRENT, new MockAnalyzer()));
       LogMergePolicy lmp = (LogMergePolicy) writer.getConfig().getMergePolicy();
       lmp.setUseCompoundFile(false);

Modified: lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestLazyProxSkipping.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestLazyProxSkipping.java?rev=984202&r1=984201&r2=984202&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestLazyProxSkipping.java (original)
+++ lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestLazyProxSkipping.java Tue Aug 10 20:32:47 2010
@@ -18,6 +18,7 @@ package org.apache.lucene.index;
  */
 
 import java.io.IOException;
+import java.util.Random;
 
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.document.Document;
@@ -57,11 +58,11 @@ public class TestLazyProxSkipping extend
       }
     }
     
-    private void createIndex(int numHits) throws IOException {
+    private void createIndex(Random random, int numHits) throws IOException {
         int numDocs = 500;
         
         Directory directory = new SeekCountingDirectory();
-        IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(10));
+        IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(10));
         ((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundFile(false);
         ((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundDocStore(false);
         for (int i = 0; i < numDocs; i++) {
@@ -99,8 +100,8 @@ public class TestLazyProxSkipping extend
         return this.searcher.search(pq, null, 1000).scoreDocs;        
     }
     
-    private void performTest(int numHits) throws IOException {
-        createIndex(numHits);
+    private void performTest(Random random, int numHits) throws IOException {
+        createIndex(random, numHits);
         this.seeksCounter = 0;
         ScoreDoc[] hits = search();
         // verify that the right number of docs was found
@@ -113,13 +114,14 @@ public class TestLazyProxSkipping extend
     
     public void testLazySkipping() throws IOException {
         // test whether only the minimum amount of seeks() are performed
-        performTest(5);
-        performTest(10);
+        Random random = newRandom();
+        performTest(random, 5);
+        performTest(random, 10);
     }
     
     public void testSeek() throws IOException {
         Directory directory = new RAMDirectory();
-        IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+        IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(newRandom(), TEST_VERSION_CURRENT, new MockAnalyzer()));
         for (int i = 0; i < 10; i++) {
             Document doc = new Document();
             doc.add(new Field(this.field, "a b", Field.Store.YES, Field.Index.ANALYZED));

Modified: lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestMultiFields.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestMultiFields.java?rev=984202&r1=984201&r2=984202&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestMultiFields.java (original)
+++ lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestMultiFields.java Tue Aug 10 20:32:47 2010
@@ -33,7 +33,7 @@ public class TestMultiFields extends Luc
     for (int iter = 0; iter < num; iter++) {
       Directory dir = new MockRAMDirectory();
 
-      IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
+      IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(r, TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
 
       Map<BytesRef,List<Integer>> docs = new HashMap<BytesRef,List<Integer>>();
       Set<Integer> deleted = new HashSet<Integer>();
@@ -132,7 +132,7 @@ public class TestMultiFields extends Luc
 
   public void testSeparateEnums() throws Exception {
     Directory dir = new MockRAMDirectory();
-    IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(newRandom(), TEST_VERSION_CURRENT, new MockAnalyzer()));
     Document d = new Document();
     d.add(new Field("f", "j", Field.Store.NO, Field.Index.NOT_ANALYZED));
     w.addDocument(d);

Modified: lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestMultiLevelSkipList.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestMultiLevelSkipList.java?rev=984202&r1=984201&r2=984202&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestMultiLevelSkipList.java (original)
+++ lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestMultiLevelSkipList.java Tue Aug 10 20:32:47 2010
@@ -33,6 +33,7 @@ import org.apache.lucene.store.Directory
 import org.apache.lucene.store.IndexInput;
 import org.apache.lucene.store.MockRAMDirectory;
 import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util._TestUtil;
 import org.apache.lucene.util.BytesRef;
 
 /**
@@ -56,7 +57,7 @@ public class TestMultiLevelSkipList exte
 
   public void testSimpleSkip() throws IOException {
     Directory dir = new CountingRAMDirectory();
-    IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new PayloadAnalyzer()));
+    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(newRandom(), TEST_VERSION_CURRENT, new PayloadAnalyzer()).setCodecProvider(_TestUtil.alwaysCodec("Standard")));
     Term term = new Term("test", "a");
     for (int i = 0; i < 5000; i++) {
       Document d1 = new Document();
@@ -88,7 +89,7 @@ public class TestMultiLevelSkipList exte
   public void checkSkipTo(DocsAndPositionsEnum tp, int target, int maxCounter) throws IOException {
     tp.advance(target);
     if (maxCounter < counter) {
-      fail("Too many bytes read: " + counter);
+      fail("Too many bytes read: " + counter + " vs " + maxCounter);
     }
 
     assertEquals("Wrong document " + tp.docID() + " after skipTo target " + target, target, tp.docID());

Modified: lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java?rev=984202&r1=984201&r2=984202&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java (original)
+++ lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java Tue Aug 10 20:32:47 2010
@@ -18,21 +18,21 @@ package org.apache.lucene.index;
  */
 
 import java.util.Random;
+import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.document.Document;
-import org.apache.lucene.index.TestIndexWriterReader.HeavyAtomicInt;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.MockRAMDirectory;
 import org.apache.lucene.util.LuceneTestCase;
 
 public class TestNRTReaderWithThreads extends LuceneTestCase {
   Random random = new Random();
-  HeavyAtomicInt seq = new HeavyAtomicInt(1);
+  AtomicInteger seq = new AtomicInteger(1);
 
   public void testIndexing() throws Exception {
     Directory mainDir = new MockRAMDirectory();
-    IndexWriter writer = new IndexWriter(mainDir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(10));
+    IndexWriter writer = new IndexWriter(mainDir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(10));
     ((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(2);
     ((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundFile(false);
     ((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundDocStore(false);

Modified: lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestNewestSegment.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestNewestSegment.java?rev=984202&r1=984201&r2=984202&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestNewestSegment.java (original)
+++ lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestNewestSegment.java Tue Aug 10 20:32:47 2010
@@ -27,7 +27,7 @@ public class TestNewestSegment extends L
   public void testNewestSegment() throws Exception {
     RAMDirectory directory = new RAMDirectory();
     Analyzer analyzer = new MockAnalyzer();
-    IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(newRandom(), TEST_VERSION_CURRENT, new MockAnalyzer()));
     assertNull(writer.newestSegment());
   }
 }

Modified: lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestNoDeletionPolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestNoDeletionPolicy.java?rev=984202&r1=984201&r2=984202&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestNoDeletionPolicy.java (original)
+++ lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestNoDeletionPolicy.java Tue Aug 10 20:32:47 2010
@@ -74,7 +74,7 @@ public class TestNoDeletionPolicy extend
   @Test
   public void testAllCommitsRemain() throws Exception {
     Directory dir = new RAMDirectory();
-    IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
+    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(newRandom(),
         TEST_VERSION_CURRENT, new MockAnalyzer())
         .setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE));
     for (int i = 0; i < 10; i++) {

Modified: lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestNorms.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestNorms.java?rev=984202&r1=984201&r2=984202&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestNorms.java (original)
+++ lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestNorms.java Tue Aug 10 20:32:47 2010
@@ -19,6 +19,7 @@ package org.apache.lucene.index;
 
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util.Random;
 
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.MockAnalyzer;
@@ -74,13 +75,14 @@ public class TestNorms extends LuceneTes
    * Including optimize. 
    */
   public void testNorms() throws IOException {
+    Random random = newRandom();
     Directory dir1 = new RAMDirectory();
 
     norms = new ArrayList<Float>();
     modifiedNorms = new ArrayList<Float>();
 
-    createIndex(dir1);
-    doTestNorms(dir1);
+    createIndex(random, dir1);
+    doTestNorms(random, dir1);
 
     // test with a single index: index2
     ArrayList<Float> norms1 = norms;
@@ -93,14 +95,14 @@ public class TestNorms extends LuceneTes
     
     Directory dir2 = new RAMDirectory();
 
-    createIndex(dir2);
-    doTestNorms(dir2);
+    createIndex(random, dir2);
+    doTestNorms(random, dir2);
 
     // add index1 and index2 to a third index: index3
     Directory dir3 = new RAMDirectory();
 
-    createIndex(dir3);
-    IndexWriter iw = new IndexWriter(dir3, new IndexWriterConfig(
+    createIndex(random, dir3);
+    IndexWriter iw = new IndexWriter(dir3, newIndexWriterConfig(random,
         TEST_VERSION_CURRENT, anlzr).setOpenMode(OpenMode.APPEND)
         .setMaxBufferedDocs(5));
     ((LogMergePolicy) iw.getConfig().getMergePolicy()).setMergeFactor(3);
@@ -116,10 +118,10 @@ public class TestNorms extends LuceneTes
 
     // test with index3
     verifyIndex(dir3);
-    doTestNorms(dir3);
+    doTestNorms(random, dir3);
     
     // now with optimize
-    iw = new IndexWriter(dir3, new IndexWriterConfig(TEST_VERSION_CURRENT,
+    iw = new IndexWriter(dir3, newIndexWriterConfig(random, TEST_VERSION_CURRENT,
         anlzr).setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(5));
     ((LogMergePolicy) iw.getConfig().getMergePolicy()).setMergeFactor(3);
     iw.optimize();
@@ -131,21 +133,21 @@ public class TestNorms extends LuceneTes
     dir3.close();
   }
 
-  private void doTestNorms(Directory dir) throws IOException {
+  private void doTestNorms(Random random, Directory dir) throws IOException {
     for (int i=0; i<5; i++) {
-      addDocs(dir,12,true);
+      addDocs(random, dir,12,true);
       verifyIndex(dir);
       modifyNormsForF1(dir);
       verifyIndex(dir);
-      addDocs(dir,12,false);
+      addDocs(random, dir,12,false);
       verifyIndex(dir);
       modifyNormsForF1(dir);
       verifyIndex(dir);
     }
   }
 
-  private void createIndex(Directory dir) throws IOException {
-    IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(
+  private void createIndex(Random random, Directory dir) throws IOException {
+    IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(random,
         TEST_VERSION_CURRENT, anlzr).setOpenMode(OpenMode.CREATE)
         .setMaxBufferedDocs(5).setSimilarity(similarityOne));
     LogMergePolicy lmp = (LogMergePolicy) iw.getConfig().getMergePolicy();
@@ -189,8 +191,8 @@ public class TestNorms extends LuceneTes
     ir.close();
   }
 
-  private void addDocs(Directory dir, int ndocs, boolean compound) throws IOException {
-    IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(
+  private void addDocs(Random random, Directory dir, int ndocs, boolean compound) throws IOException {
+    IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(random,
         TEST_VERSION_CURRENT, anlzr).setOpenMode(OpenMode.APPEND)
         .setMaxBufferedDocs(5).setSimilarity(similarityOne));
     LogMergePolicy lmp = (LogMergePolicy) iw.getConfig().getMergePolicy();

Modified: lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestOmitTf.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestOmitTf.java?rev=984202&r1=984201&r2=984202&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestOmitTf.java (original)
+++ lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestOmitTf.java Tue Aug 10 20:32:47 2010
@@ -61,7 +61,7 @@ public class TestOmitTf extends LuceneTe
   public void testOmitTermFreqAndPositions() throws Exception {
     Directory ram = new MockRAMDirectory();
     Analyzer analyzer = new MockAnalyzer();
-    IndexWriter writer = new IndexWriter(ram, new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
+    IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig(newRandom(), TEST_VERSION_CURRENT, analyzer));
     Document d = new Document();
         
     // this field will have Tf
@@ -108,7 +108,7 @@ public class TestOmitTf extends LuceneTe
   public void testMixedMerge() throws Exception {
     Directory ram = new MockRAMDirectory();
     Analyzer analyzer = new MockAnalyzer();
-    IndexWriter writer = new IndexWriter(ram, new IndexWriterConfig(
+    IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig(newRandom(),
         TEST_VERSION_CURRENT, analyzer).setMaxBufferedDocs(3));
     ((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(2);
     Document d = new Document();
@@ -161,7 +161,7 @@ public class TestOmitTf extends LuceneTe
   public void testMixedRAM() throws Exception {
     Directory ram = new MockRAMDirectory();
     Analyzer analyzer = new MockAnalyzer();
-    IndexWriter writer = new IndexWriter(ram, new IndexWriterConfig(
+    IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig(newRandom(),
         TEST_VERSION_CURRENT, analyzer).setMaxBufferedDocs(10));
     ((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(2);
     Document d = new Document();
@@ -209,7 +209,7 @@ public class TestOmitTf extends LuceneTe
   public void testNoPrxFile() throws Throwable {
     Directory ram = new MockRAMDirectory();
     Analyzer analyzer = new MockAnalyzer();
-    IndexWriter writer = new IndexWriter(ram, new IndexWriterConfig(
+    IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig(newRandom(),
         TEST_VERSION_CURRENT, analyzer).setMaxBufferedDocs(3));
     LogMergePolicy lmp = (LogMergePolicy) writer.getConfig().getMergePolicy();
     lmp.setMergeFactor(2);
@@ -242,7 +242,7 @@ public class TestOmitTf extends LuceneTe
   public void testBasic() throws Exception {
     Directory dir = new MockRAMDirectory();  
     Analyzer analyzer = new MockAnalyzer();
-    IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
+    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(newRandom(),
         TEST_VERSION_CURRENT, analyzer).setMaxBufferedDocs(2)
         .setSimilarity(new SimpleSimilarity()));
     ((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(2);

Modified: lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestParallelReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestParallelReader.java?rev=984202&r1=984201&r2=984202&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestParallelReader.java (original)
+++ lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestParallelReader.java Tue Aug 10 20:32:47 2010
@@ -20,6 +20,7 @@ package org.apache.lucene.index;
 import java.io.IOException;
 import java.util.Arrays;
 import java.util.Collection;
+import java.util.Random;
 
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.document.Document;
@@ -41,12 +42,14 @@ public class TestParallelReader extends 
 
   private Searcher parallel;
   private Searcher single;
+  private Random random;
   
   @Override
   protected void setUp() throws Exception {
     super.setUp();
-    single = single();
-    parallel = parallel();
+    random = newRandom();
+    single = single(random);
+    parallel = parallel(random);
   }
 
   public void testQueries() throws Exception {
@@ -66,8 +69,8 @@ public class TestParallelReader extends 
   }
 
   public void testFieldNames() throws Exception {
-    Directory dir1 = getDir1();
-    Directory dir2 = getDir2();
+    Directory dir1 = getDir1(random);
+    Directory dir2 = getDir2(random);
     ParallelReader pr = new ParallelReader();
     pr.add(IndexReader.open(dir1, false));
     pr.add(IndexReader.open(dir2, false));
@@ -80,8 +83,8 @@ public class TestParallelReader extends 
   }
   
   public void testDocument() throws IOException {
-    Directory dir1 = getDir1();
-    Directory dir2 = getDir2();
+    Directory dir1 = getDir1(random);
+    Directory dir2 = getDir2(random);
     ParallelReader pr = new ParallelReader();
     pr.add(IndexReader.open(dir1, false));
     pr.add(IndexReader.open(dir2, false));
@@ -102,11 +105,11 @@ public class TestParallelReader extends 
   
   public void testIncompatibleIndexes() throws IOException {
     // two documents:
-    Directory dir1 = getDir1();
+    Directory dir1 = getDir1(random);
 
     // one document only:
     Directory dir2 = new MockRAMDirectory();
-    IndexWriter w2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
     Document d3 = new Document();
     d3.add(new Field("f3", "v1", Field.Store.YES, Field.Index.ANALYZED));
     w2.addDocument(d3);
@@ -123,8 +126,8 @@ public class TestParallelReader extends 
   }
   
   public void testIsCurrent() throws IOException {
-    Directory dir1 = getDir1();
-    Directory dir2 = getDir2();
+    Directory dir1 = getDir1(random);
+    Directory dir2 = getDir2(random);
     ParallelReader pr = new ParallelReader();
     pr.add(IndexReader.open(dir1, false));
     pr.add(IndexReader.open(dir2, false));
@@ -147,17 +150,19 @@ public class TestParallelReader extends 
   }
 
   public void testIsOptimized() throws IOException {
-    Directory dir1 = getDir1();
-    Directory dir2 = getDir2();
+    Directory dir1 = getDir1(random);
+    Directory dir2 = getDir2(random);
     
     // add another document to ensure that the indexes are not optimized
-    IndexWriter modifier = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter modifier = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
+    ((LogMergePolicy) modifier.getMergePolicy()).setMergeFactor(10);
     Document d = new Document();
     d.add(new Field("f1", "v1", Field.Store.YES, Field.Index.ANALYZED));
     modifier.addDocument(d);
     modifier.close();
     
-    modifier = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    modifier = new IndexWriter(dir2, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
+    ((LogMergePolicy) modifier.getMergePolicy()).setMergeFactor(10);
     d = new Document();
     d.add(new Field("f2", "v2", Field.Store.YES, Field.Index.ANALYZED));
     modifier.addDocument(d);
@@ -170,7 +175,7 @@ public class TestParallelReader extends 
     assertFalse(pr.isOptimized());
     pr.close();
     
-    modifier = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    modifier = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
     modifier.optimize();
     modifier.close();
     
@@ -182,7 +187,7 @@ public class TestParallelReader extends 
     pr.close();
 
     
-    modifier = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    modifier = new IndexWriter(dir2, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
     modifier.optimize();
     modifier.close();
     
@@ -211,9 +216,9 @@ public class TestParallelReader extends 
   }
 
   // Fields 1-4 indexed together:
-  private Searcher single() throws IOException {
+  private Searcher single(Random random) throws IOException {
     Directory dir = new MockRAMDirectory();
-    IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
     Document d1 = new Document();
     d1.add(new Field("f1", "v1", Field.Store.YES, Field.Index.ANALYZED));
     d1.add(new Field("f2", "v1", Field.Store.YES, Field.Index.ANALYZED));
@@ -232,18 +237,18 @@ public class TestParallelReader extends 
   }
 
   // Fields 1 & 2 in one index, 3 & 4 in other, with ParallelReader:
-  private Searcher parallel() throws IOException {
-    Directory dir1 = getDir1();
-    Directory dir2 = getDir2();
+  private Searcher parallel(Random random) throws IOException {
+    Directory dir1 = getDir1(random);
+    Directory dir2 = getDir2(random);
     ParallelReader pr = new ParallelReader();
     pr.add(IndexReader.open(dir1, false));
     pr.add(IndexReader.open(dir2, false));
     return new IndexSearcher(pr);
   }
 
-  private Directory getDir1() throws IOException {
+  private Directory getDir1(Random random) throws IOException {
     Directory dir1 = new MockRAMDirectory();
-    IndexWriter w1 = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter w1 = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
     Document d1 = new Document();
     d1.add(new Field("f1", "v1", Field.Store.YES, Field.Index.ANALYZED));
     d1.add(new Field("f2", "v1", Field.Store.YES, Field.Index.ANALYZED));
@@ -256,9 +261,9 @@ public class TestParallelReader extends 
     return dir1;
   }
 
-  private Directory getDir2() throws IOException {
+  private Directory getDir2(Random random) throws IOException {
     Directory dir2 = new RAMDirectory();
-    IndexWriter w2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
     Document d3 = new Document();
     d3.add(new Field("f3", "v1", Field.Store.YES, Field.Index.ANALYZED));
     d3.add(new Field("f4", "v1", Field.Store.YES, Field.Index.ANALYZED));

Modified: lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestParallelReaderEmptyIndex.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestParallelReaderEmptyIndex.java?rev=984202&r1=984201&r2=984202&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestParallelReaderEmptyIndex.java (original)
+++ lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestParallelReaderEmptyIndex.java Tue Aug 10 20:32:47 2010
@@ -18,6 +18,7 @@ package org.apache.lucene.index;
  */
 
 import java.io.IOException;
+import java.util.Random;
 
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util._TestUtil;
@@ -46,15 +47,16 @@ public class TestParallelReaderEmptyInde
    * @throws IOException
    */
   public void testEmptyIndex() throws IOException {
+    Random random = newRandom();
     RAMDirectory rd1 = new MockRAMDirectory();
-    IndexWriter iw = new IndexWriter(rd1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter iw = new IndexWriter(rd1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
     iw.close();
 
     RAMDirectory rd2 = new MockRAMDirectory(rd1);
 
     RAMDirectory rdOut = new MockRAMDirectory();
 
-    IndexWriter iwOut = new IndexWriter(rdOut, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter iwOut = new IndexWriter(rdOut, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
     ParallelReader pr = new ParallelReader();
     pr.add(IndexReader.open(rd1,true));
     pr.add(IndexReader.open(rd2,true));
@@ -77,8 +79,9 @@ public class TestParallelReaderEmptyInde
    */
   public void testEmptyIndexWithVectors() throws IOException {
     RAMDirectory rd1 = new MockRAMDirectory();
+    Random random = newRandom();
     {
-      IndexWriter iw = new IndexWriter(rd1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+      IndexWriter iw = new IndexWriter(rd1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
       Document doc = new Document();
       doc.add(new Field("test", "", Store.NO, Index.ANALYZED,
                         TermVector.YES));
@@ -92,14 +95,14 @@ public class TestParallelReaderEmptyInde
       ir.deleteDocument(0);
       ir.close();
 
-      iw = new IndexWriter(rd1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
+      iw = new IndexWriter(rd1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
       iw.optimize();
       iw.close();
     }
 
     RAMDirectory rd2 = new MockRAMDirectory();
     {
-      IndexWriter iw = new IndexWriter(rd2, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+      IndexWriter iw = new IndexWriter(rd2, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
       Document doc = new Document();
       iw.addDocument(doc);
       iw.close();
@@ -107,7 +110,7 @@ public class TestParallelReaderEmptyInde
 
     RAMDirectory rdOut = new MockRAMDirectory();
 
-    IndexWriter iwOut = new IndexWriter(rdOut, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter iwOut = new IndexWriter(rdOut, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
     ParallelReader pr = new ParallelReader();
     pr.add(IndexReader.open(rd1,true));
     pr.add(IndexReader.open(rd2,true));

Modified: lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestParallelTermEnum.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestParallelTermEnum.java?rev=984202&r1=984201&r2=984202&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestParallelTermEnum.java (original)
+++ lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestParallelTermEnum.java Tue Aug 10 20:32:47 2010
@@ -18,6 +18,7 @@ package org.apache.lucene.index;
  */
 
 import java.io.IOException;
+import java.util.Random;
 
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.analysis.MockAnalyzer;
@@ -36,9 +37,9 @@ public class TestParallelTermEnum extend
     protected void setUp() throws Exception {
         super.setUp();
         Document doc;
-
+        Random random = newRandom();
         RAMDirectory rd1 = new RAMDirectory();
-        IndexWriter iw1 = new IndexWriter(rd1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+        IndexWriter iw1 = new IndexWriter(rd1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
 
         doc = new Document();
         doc.add(new Field("field1", "the quick brown fox jumps", Store.YES,
@@ -50,7 +51,7 @@ public class TestParallelTermEnum extend
 
         iw1.close();
         RAMDirectory rd2 = new RAMDirectory();
-        IndexWriter iw2 = new IndexWriter(rd2, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+        IndexWriter iw2 = new IndexWriter(rd2, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
 
         doc = new Document();
         doc.add(new Field("field0", "", Store.NO, Index.ANALYZED));

Modified: lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestPayloadProcessorProvider.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestPayloadProcessorProvider.java?rev=984202&r1=984201&r2=984202&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestPayloadProcessorProvider.java (original)
+++ lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestPayloadProcessorProvider.java Tue Aug 10 20:32:47 2010
@@ -22,6 +22,7 @@ import static org.junit.Assert.*;
 import java.io.IOException;
 import java.util.HashMap;
 import java.util.Map;
+import java.util.Random;
 
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.analysis.MockTokenizer;
@@ -118,23 +119,24 @@ public class TestPayloadProcessorProvide
 
   private static final int NUM_DOCS = 10;
 
-  private IndexWriterConfig getConfig() {
-    return new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false));
+  private IndexWriterConfig getConfig(Random random) {
+    return newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false));
   }
 
-  private void populateDirs(Directory[] dirs, boolean multipleCommits)
+  private void populateDirs(Random random, Directory[] dirs, boolean multipleCommits)
       throws IOException {
     for (int i = 0; i < dirs.length; i++) {
       dirs[i] = new MockRAMDirectory();
-      populateDocs(dirs[i], multipleCommits);
+      populateDocs(random, dirs[i], multipleCommits);
       verifyPayloadExists(dirs[i], "p", new BytesRef("p1"), NUM_DOCS);
       verifyPayloadExists(dirs[i], "p", new BytesRef("p2"), NUM_DOCS);
     }
   }
 
-  private void populateDocs(Directory dir, boolean multipleCommits)
+  private void populateDocs(Random random, Directory dir, boolean multipleCommits)
       throws IOException {
-    IndexWriter writer = new IndexWriter(dir, getConfig());
+    IndexWriter writer = new IndexWriter(dir, getConfig(random));
+    ((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(10);
     TokenStream payloadTS1 = new PayloadTokenStream("p1");
     TokenStream payloadTS2 = new PayloadTokenStream("p2");
     for (int i = 0; i < NUM_DOCS; i++) {
@@ -172,14 +174,14 @@ public class TestPayloadProcessorProvide
     }
   }
 
-  private void doTest(boolean addToEmptyIndex,
+  private void doTest(Random random, boolean addToEmptyIndex,
       int numExpectedPayloads, boolean multipleCommits) throws IOException {
     Directory[] dirs = new Directory[2];
-    populateDirs(dirs, multipleCommits);
+    populateDirs(random, dirs, multipleCommits);
 
     Directory dir = new MockRAMDirectory();
     if (!addToEmptyIndex) {
-      populateDocs(dir, multipleCommits);
+      populateDocs(random, dir, multipleCommits);
       verifyPayloadExists(dir, "p", new BytesRef("p1"), NUM_DOCS);
       verifyPayloadExists(dir, "p", new BytesRef("p2"), NUM_DOCS);
     }
@@ -190,7 +192,7 @@ public class TestPayloadProcessorProvide
     for (Directory d : dirs) {
       processors.put(d, new PerTermPayloadProcessor());
     }
-    IndexWriter writer = new IndexWriter(dir, getConfig());
+    IndexWriter writer = new IndexWriter(dir, getConfig(random));
     writer.setPayloadProcessorProvider(new PerDirPayloadProcessor(processors));
 
     IndexReader[] readers = new IndexReader[dirs.length];
@@ -214,26 +216,29 @@ public class TestPayloadProcessorProvide
 
   @Test
   public void testAddIndexes() throws Exception {
+    Random random = newRandom();
     // addIndexes - single commit in each
-    doTest(true, 0, false);
+    doTest(random, true, 0, false);
 
     // addIndexes - multiple commits in each
-    doTest(true, 0, true);
+    doTest(random, true, 0, true);
   }
 
   @Test
   public void testAddIndexesIntoExisting() throws Exception {
+    Random random = newRandom();
     // addIndexes - single commit in each
-    doTest(false, NUM_DOCS, false);
+    doTest(random, false, NUM_DOCS, false);
 
     // addIndexes - multiple commits in each
-    doTest(false, NUM_DOCS, true);
+    doTest(random, false, NUM_DOCS, true);
   }
 
   @Test
   public void testRegularMerges() throws Exception {
+    Random random = newRandom();
     Directory dir = new MockRAMDirectory();
-    populateDocs(dir, true);
+    populateDocs(random, dir, true);
     verifyPayloadExists(dir, "p", new BytesRef("p1"), NUM_DOCS);
     verifyPayloadExists(dir, "p", new BytesRef("p2"), NUM_DOCS);
 
@@ -241,7 +246,7 @@ public class TestPayloadProcessorProvide
     // won't get processed.
     Map<Directory, DirPayloadProcessor> processors = new HashMap<Directory, DirPayloadProcessor>();
     processors.put(dir, new PerTermPayloadProcessor());
-    IndexWriter writer = new IndexWriter(dir, getConfig());
+    IndexWriter writer = new IndexWriter(dir, getConfig(random));
     writer.setPayloadProcessorProvider(new PerDirPayloadProcessor(processors));
     writer.optimize();
     writer.close();

Modified: lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestPayloads.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestPayloads.java?rev=984202&r1=984201&r2=984202&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestPayloads.java (original)
+++ lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestPayloads.java Tue Aug 10 20:32:47 2010
@@ -103,7 +103,7 @@ public class TestPayloads extends Lucene
         rnd = newRandom();
         Directory ram = new MockRAMDirectory();
         PayloadAnalyzer analyzer = new PayloadAnalyzer();
-        IndexWriter writer = new IndexWriter(ram, new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
+        IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig(rnd, TEST_VERSION_CURRENT, analyzer));
         Document d = new Document();
         // this field won't have any payloads
         d.add(new Field("f1", "This field has no payloads", Field.Store.NO, Field.Index.ANALYZED));
@@ -130,7 +130,7 @@ public class TestPayloads extends Lucene
         
         // now we add another document which has payloads for field f3 and verify if the SegmentMerger
         // enabled payloads for that field
-        writer = new IndexWriter(ram, new IndexWriterConfig(TEST_VERSION_CURRENT,
+        writer = new IndexWriter(ram, newIndexWriterConfig(rnd, TEST_VERSION_CURRENT,
             analyzer).setOpenMode(OpenMode.CREATE));
         d = new Document();
         d.add(new Field("f1", "This field has no payloads", Field.Store.NO, Field.Index.ANALYZED));
@@ -160,20 +160,20 @@ public class TestPayloads extends Lucene
         rnd = newRandom();
         // first perform the test using a RAMDirectory
         Directory dir = new MockRAMDirectory();
-        performTest(dir);
+        performTest(rnd, dir);
         
         // now use a FSDirectory and repeat same test
         File dirName = _TestUtil.getTempDir("test_payloads");
         dir = FSDirectory.open(dirName);
-        performTest(dir);
+        performTest(rnd, dir);
        _TestUtil.rmDir(dirName);
     }
     
     // builds an index with payloads in the given Directory and performs
     // different tests to verify the payload encoding
-    private void performTest(Directory dir) throws Exception {
+    private void performTest(Random random, Directory dir) throws Exception {
         PayloadAnalyzer analyzer = new PayloadAnalyzer();
-        IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
+        IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random,
             TEST_VERSION_CURRENT, analyzer)
             .setOpenMode(OpenMode.CREATE));
         
@@ -314,7 +314,7 @@ public class TestPayloads extends Lucene
         
         // test long payload
         analyzer = new PayloadAnalyzer();
-        writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT,
+        writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT,
             analyzer).setOpenMode(OpenMode.CREATE));
         String singleTerm = "lucene";
         
@@ -490,7 +490,7 @@ public class TestPayloads extends Lucene
         final ByteArrayPool pool = new ByteArrayPool(numThreads, 5);
         
         Directory dir = new MockRAMDirectory();
-        final IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
+        final IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(rnd, 
             TEST_VERSION_CURRENT, new MockAnalyzer()));
         final String field = "test";
         

Modified: lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestPersistentSnapshotDeletionPolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestPersistentSnapshotDeletionPolicy.java?rev=984202&r1=984201&r2=984202&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestPersistentSnapshotDeletionPolicy.java (original)
+++ lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestPersistentSnapshotDeletionPolicy.java Tue Aug 10 20:32:47 2010
@@ -21,6 +21,7 @@ import static org.junit.Assert.*;
 
 import java.io.IOException;
 import java.util.Map;
+import java.util.Random;
 import java.util.Map.Entry;
 
 import org.apache.lucene.document.Document;
@@ -56,10 +57,11 @@ public class TestPersistentSnapshotDelet
   @Override
   @Test
   public void testExistingSnapshots() throws Exception {
+    Random random = newRandom();
     int numSnapshots = 3;
     Directory dir = new MockRAMDirectory();
     PersistentSnapshotDeletionPolicy psdp = (PersistentSnapshotDeletionPolicy) getDeletionPolicy();
-    IndexWriter writer = new IndexWriter(dir, getConfig(psdp));
+    IndexWriter writer = new IndexWriter(dir, getConfig(random, psdp));
     prepareIndexAndSnapshots(psdp, writer, numSnapshots, "snapshot");
     writer.close();
     psdp.close();
@@ -68,7 +70,7 @@ public class TestPersistentSnapshotDelet
     psdp = new PersistentSnapshotDeletionPolicy(
         new KeepOnlyLastCommitDeletionPolicy(), snapshotDir, OpenMode.APPEND,
         TEST_VERSION_CURRENT);
-    new IndexWriter(dir, getConfig(psdp)).close();
+    new IndexWriter(dir, getConfig(random, psdp)).close();
 
     assertSnapshotExists(dir, psdp, numSnapshots);
     assertEquals(numSnapshots, psdp.getSnapshots().size());
@@ -83,7 +85,7 @@ public class TestPersistentSnapshotDelet
   @Test
   public void testInvalidSnapshotInfos() throws Exception {
     // Add the correct number of documents (1), but without snapshot information
-    IndexWriter writer = new IndexWriter(snapshotDir, getConfig(null));
+    IndexWriter writer = new IndexWriter(snapshotDir, getConfig(newRandom(), null));
     writer.addDocument(new Document());
     writer.close();
     try {
@@ -98,7 +100,7 @@ public class TestPersistentSnapshotDelet
   @Test
   public void testNoSnapshotInfos() throws Exception {
     // Initialize an empty index in snapshotDir - PSDP should initialize successfully.
-    new IndexWriter(snapshotDir, getConfig(null)).close();
+    new IndexWriter(snapshotDir, getConfig(newRandom(), null)).close();
     new PersistentSnapshotDeletionPolicy(
         new KeepOnlyLastCommitDeletionPolicy(), snapshotDir, OpenMode.APPEND,
         TEST_VERSION_CURRENT).close();
@@ -107,7 +109,7 @@ public class TestPersistentSnapshotDelet
   @Test(expected=IllegalStateException.class)
   public void testTooManySnapshotInfos() throws Exception {
     // Write two documents to the snapshots directory - illegal.
-    IndexWriter writer = new IndexWriter(snapshotDir, getConfig(null));
+    IndexWriter writer = new IndexWriter(snapshotDir, getConfig(newRandom(), null));
     writer.addDocument(new Document());
     writer.addDocument(new Document());
     writer.close();
@@ -122,7 +124,7 @@ public class TestPersistentSnapshotDelet
   public void testSnapshotRelease() throws Exception {
     Directory dir = new MockRAMDirectory();
     PersistentSnapshotDeletionPolicy psdp = (PersistentSnapshotDeletionPolicy) getDeletionPolicy();
-    IndexWriter writer = new IndexWriter(dir, getConfig(psdp));
+    IndexWriter writer = new IndexWriter(dir, getConfig(newRandom(), psdp));
     prepareIndexAndSnapshots(psdp, writer, 1, "snapshot");
     writer.close();
 

Modified: lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestRollback.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestRollback.java?rev=984202&r1=984201&r2=984202&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestRollback.java (original)
+++ lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestRollback.java Tue Aug 10 20:32:47 2010
@@ -17,6 +17,8 @@ package org.apache.lucene.index;
  * limitations under the License.
  */
 
+import java.util.Random;
+
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
@@ -31,7 +33,8 @@ public class TestRollback extends Lucene
   // LUCENE-2536
   public void testRollbackIntegrityWithBufferFlush() throws Exception {
     Directory dir = new MockRAMDirectory();
-    RandomIndexWriter rw = new RandomIndexWriter(newRandom(), dir);
+    Random random = newRandom();
+    RandomIndexWriter rw = new RandomIndexWriter(random, dir);
     for (int i = 0; i < 5; i++) {
       Document doc = new Document();
       doc.add(new Field("pk", Integer.toString(i), Store.YES, Index.ANALYZED_NO_NORMS));
@@ -40,7 +43,7 @@ public class TestRollback extends Lucene
     rw.close();
 
     // If buffer size is small enough to cause a flush, errors ensue...
-    IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2).setOpenMode(IndexWriterConfig.OpenMode.APPEND));
+    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2).setOpenMode(IndexWriterConfig.OpenMode.APPEND));
 
     Term pkTerm = new Term("pk", "");
     for (int i = 0; i < 3; i++) {

Modified: lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestSegmentTermDocs.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestSegmentTermDocs.java?rev=984202&r1=984201&r2=984202&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestSegmentTermDocs.java (original)
+++ lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestSegmentTermDocs.java Tue Aug 10 20:32:47 2010
@@ -104,7 +104,7 @@ public class TestSegmentTermDocs extends
 
   public void testSkipTo(int indexDivisor) throws IOException {
     Directory dir = new RAMDirectory();
-    IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(newRandom(), TEST_VERSION_CURRENT, new MockAnalyzer()));
     
     Term ta = new Term("content","aaa");
     for(int i = 0; i < 10; i++)

Modified: lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestSegmentTermEnum.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestSegmentTermEnum.java?rev=984202&r1=984201&r2=984202&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestSegmentTermEnum.java (original)
+++ lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestSegmentTermEnum.java Tue Aug 10 20:32:47 2010
@@ -18,6 +18,7 @@ package org.apache.lucene.index;
  */
 
 import java.io.IOException;
+import java.util.Random;
 
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.BytesRef;
@@ -36,9 +37,10 @@ public class TestSegmentTermEnum extends
   Directory dir = new RAMDirectory();
 
   public void testTermEnum() throws IOException {
+    Random random = newRandom();
     IndexWriter writer = null;
 
-    writer  = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    writer  = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
 
     // ADD 100 documents with term : aaa
     // add 100 documents with terms: aaa bbb
@@ -54,7 +56,7 @@ public class TestSegmentTermEnum extends
     verifyDocFreq();
 
     // merge segments by optimizing the index
-    writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
+    writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
     writer.optimize();
     writer.close();
 
@@ -65,7 +67,7 @@ public class TestSegmentTermEnum extends
   public void testPrevTermAtEnd() throws IOException
   {
     Directory dir = new MockRAMDirectory();
-    IndexWriter writer  = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setCodecProvider(_TestUtil.alwaysCodec("Standard")));
+    IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig(newRandom(), TEST_VERSION_CURRENT, new MockAnalyzer()).setCodecProvider(_TestUtil.alwaysCodec("Standard")));
     addDoc(writer, "aaa bbb");
     writer.close();
     SegmentReader reader = SegmentReader.getOnlySegmentReader(dir);

Modified: lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestSnapshotDeletionPolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestSnapshotDeletionPolicy.java?rev=984202&r1=984201&r2=984202&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestSnapshotDeletionPolicy.java (original)
+++ lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestSnapshotDeletionPolicy.java Tue Aug 10 20:32:47 2010
@@ -4,6 +4,7 @@ import static org.junit.Assert.*;
 
 import java.util.Collection;
 import java.util.Map;
+import java.util.Random;
 import java.io.File;
 import java.io.IOException;
 
@@ -33,8 +34,8 @@ public class TestSnapshotDeletionPolicy 
 	
   public static final String INDEX_PATH = "test.snapshots";
 
-  protected IndexWriterConfig getConfig(IndexDeletionPolicy dp) {
-    IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer());
+  protected IndexWriterConfig getConfig(Random random, IndexDeletionPolicy dp) {
+    IndexWriterConfig conf = newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer());
     if (dp != null) {
       conf.setIndexDeletionPolicy(dp);
     }
@@ -84,26 +85,27 @@ public class TestSnapshotDeletionPolicy 
   
   @Test
   public void testSnapshotDeletionPolicy() throws Exception {
+    Random random = newRandom();
     File dir = _TestUtil.getTempDir(INDEX_PATH);
     try {
       Directory fsDir = FSDirectory.open(dir);
-      runTest(fsDir);
+      runTest(random, fsDir);
       fsDir.close();
     } finally {
       _TestUtil.rmDir(dir);
     }
 
     MockRAMDirectory dir2 = new MockRAMDirectory();
-    runTest(dir2);
+    runTest(random, dir2);
     dir2.close();
   }
 
-  private void runTest(Directory dir) throws Exception {
+  private void runTest(Random random, Directory dir) throws Exception {
     // Run for ~1 seconds
     final long stopTime = System.currentTimeMillis() + 1000;
 
     SnapshotDeletionPolicy dp = getDeletionPolicy();
-    final IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
+    final IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random,
         TEST_VERSION_CURRENT, new MockAnalyzer()).setIndexDeletionPolicy(dp)
         .setMaxBufferedDocs(2));
     writer.commit();
@@ -229,10 +231,11 @@ public class TestSnapshotDeletionPolicy 
   public void testBasicSnapshots() throws Exception {
     int numSnapshots = 3;
     SnapshotDeletionPolicy sdp = getDeletionPolicy();
-
+    Random random = newRandom();
+    
     // Create 3 snapshots: snapshot0, snapshot1, snapshot2
     Directory dir = new MockRAMDirectory();
-    IndexWriter writer = new IndexWriter(dir, getConfig(sdp));
+    IndexWriter writer = new IndexWriter(dir, getConfig(random, sdp));
     prepareIndexAndSnapshots(sdp, writer, numSnapshots, "snapshot");
     writer.close();
     
@@ -243,7 +246,7 @@ public class TestSnapshotDeletionPolicy 
 
     // open a new IndexWriter w/ no snapshots to keep and assert that all snapshots are gone.
     sdp = getDeletionPolicy();
-    writer = new IndexWriter(dir, getConfig(sdp));
+    writer = new IndexWriter(dir, getConfig(random, sdp));
     writer.deleteUnusedFiles();
     writer.close();
     assertEquals("no snapshots should exist", 1, IndexReader.listCommits(dir).size());
@@ -260,9 +263,10 @@ public class TestSnapshotDeletionPolicy 
 
   @Test
   public void testMultiThreadedSnapshotting() throws Exception {
+    Random random = newRandom();
     Directory dir = new MockRAMDirectory();
     final SnapshotDeletionPolicy sdp = getDeletionPolicy();
-    final IndexWriter writer = new IndexWriter(dir, getConfig(sdp));
+    final IndexWriter writer = new IndexWriter(dir, getConfig(random, sdp));
 
     Thread[] threads = new Thread[10];
     for (int i = 0; i < threads.length; i++) {
@@ -303,15 +307,16 @@ public class TestSnapshotDeletionPolicy 
 
   @Test
   public void testRollbackToOldSnapshot() throws Exception {
+    Random random = newRandom();
     int numSnapshots = 2;
     Directory dir = new MockRAMDirectory();
     SnapshotDeletionPolicy sdp = getDeletionPolicy();
-    IndexWriter writer = new IndexWriter(dir, getConfig(sdp));
+    IndexWriter writer = new IndexWriter(dir, getConfig(random, sdp));
     prepareIndexAndSnapshots(sdp, writer, numSnapshots, "snapshot");
     writer.close();
 
     // now open the writer on "snapshot0" - make sure it succeeds
-    writer = new IndexWriter(dir, getConfig(sdp).setIndexCommit(sdp.getSnapshot("snapshot0")));
+    writer = new IndexWriter(dir, getConfig(random, sdp).setIndexCommit(sdp.getSnapshot("snapshot0")));
     // this does the actual rollback
     writer.commit();
     writer.deleteUnusedFiles();
@@ -324,9 +329,10 @@ public class TestSnapshotDeletionPolicy 
 
   @Test
   public void testReleaseSnapshot() throws Exception {
+    Random random = newRandom();
     Directory dir = new MockRAMDirectory();
     SnapshotDeletionPolicy sdp = getDeletionPolicy();
-    IndexWriter writer = new IndexWriter(dir, getConfig(sdp));
+    IndexWriter writer = new IndexWriter(dir, getConfig(random, sdp));
     prepareIndexAndSnapshots(sdp, writer, 1, "snapshot");
     
     // Create another commit - we must do that, because otherwise the "snapshot"
@@ -351,18 +357,19 @@ public class TestSnapshotDeletionPolicy 
 
   @Test
   public void testExistingSnapshots() throws Exception {
+    Random random = newRandom();
     // Tests the ability to construct a SDP from existing snapshots, and
     // asserts that those snapshots/commit points are protected.
     int numSnapshots = 3;
     Directory dir = new MockRAMDirectory();
     SnapshotDeletionPolicy sdp = getDeletionPolicy();
-    IndexWriter writer = new IndexWriter(dir, getConfig(sdp));
+    IndexWriter writer = new IndexWriter(dir, getConfig(random, sdp));
     prepareIndexAndSnapshots(sdp, writer, numSnapshots, "snapshot");
     writer.close();
 
     // Make a new policy and initialize with snapshots.
     sdp = getDeletionPolicy(sdp.getSnapshots());
-    writer = new IndexWriter(dir, getConfig(sdp));
+    writer = new IndexWriter(dir, getConfig(random, sdp));
     // attempt to delete unused files - the snapshotted files should not be deleted
     writer.deleteUnusedFiles();
     writer.close();
@@ -371,9 +378,10 @@ public class TestSnapshotDeletionPolicy 
 
   @Test
   public void testSnapshotLastCommitTwice() throws Exception {
+    Random random = newRandom();
     Directory dir = new MockRAMDirectory();
     SnapshotDeletionPolicy sdp = getDeletionPolicy();
-    IndexWriter writer = new IndexWriter(dir, getConfig(sdp));
+    IndexWriter writer = new IndexWriter(dir, getConfig(random, sdp));
     writer.addDocument(new Document());
     writer.commit();
     
@@ -397,11 +405,12 @@ public class TestSnapshotDeletionPolicy 
   
   @Test
   public void testMissingCommits() throws Exception {
+    Random random = newRandom();
     // Tests the behavior of SDP when commits that are given at ctor are missing
     // on onInit().
     Directory dir = new MockRAMDirectory();
     SnapshotDeletionPolicy sdp = getDeletionPolicy();
-    IndexWriter writer = new IndexWriter(dir, getConfig(sdp));
+    IndexWriter writer = new IndexWriter(dir, getConfig(random, sdp));
     writer.addDocument(new Document());
     writer.commit();
     IndexCommit ic = sdp.snapshot("s1");
@@ -412,14 +421,14 @@ public class TestSnapshotDeletionPolicy 
 
     // open a new writer w/ KeepOnlyLastCommit policy, so it will delete "s1"
     // commit.
-    new IndexWriter(dir, getConfig(null)).close();
+    new IndexWriter(dir, getConfig(random, null)).close();
     
     assertFalse("snapshotted commit should not exist", dir.fileExists(ic.getSegmentsFileName()));
     
     // Now reinit SDP from the commits in the index - the snapshot id should not
     // exist anymore.
     sdp = getDeletionPolicy(sdp.getSnapshots());
-    new IndexWriter(dir, getConfig(sdp)).close();
+    new IndexWriter(dir, getConfig(random, sdp)).close();
     
     try {
       sdp.getSnapshot("s1");

Modified: lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestStressIndexing.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestStressIndexing.java?rev=984202&r1=984201&r2=984202&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestStressIndexing.java (original)
+++ lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestStressIndexing.java Tue Aug 10 20:32:47 2010
@@ -118,7 +118,7 @@ public class TestStressIndexing extends 
     stress test.
   */
   public void runStressTest(Directory directory, MergeScheduler mergeScheduler) throws Exception {
-    IndexWriter modifier = new IndexWriter(directory, new IndexWriterConfig(
+    IndexWriter modifier = new IndexWriter(directory, newIndexWriterConfig(RANDOM,
         TEST_VERSION_CURRENT, new MockAnalyzer())
         .setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(10).setMergeScheduler(
             mergeScheduler));

Modified: lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestStressIndexing2.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestStressIndexing2.java?rev=984202&r1=984201&r2=984202&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestStressIndexing2.java (original)
+++ lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestStressIndexing2.java Tue Aug 10 20:32:47 2010
@@ -85,7 +85,7 @@ public class TestStressIndexing2 extends
     int maxThreadStates = 1+r.nextInt(10);
     boolean doReaderPooling = r.nextBoolean();
     Map<String,Document> docs = indexRandom(5, 3, 100, dir1, maxThreadStates, doReaderPooling);
-    indexSerial(docs, dir2);
+    indexSerial(r, docs, dir2);
 
     // verifying verify
     // verifyEquals(dir1, dir1, "id");
@@ -115,7 +115,7 @@ public class TestStressIndexing2 extends
       Directory dir2 = new MockRAMDirectory();
       Map<String,Document> docs = indexRandom(nThreads, iter, range, dir1, maxThreadStates, doReaderPooling);
       //System.out.println("TEST: index serial");
-      indexSerial(docs, dir2);
+      indexSerial(r, docs, dir2);
       //System.out.println("TEST: verify");
       verifyEquals(dir1, dir2, "id");
     }
@@ -141,7 +141,7 @@ public class TestStressIndexing2 extends
   
   public DocsAndWriter indexRandomIWReader(int nThreads, int iterations, int range, Directory dir) throws IOException, InterruptedException {
     Map<String,Document> docs = new HashMap<String,Document>();
-    IndexWriter w = new MockIndexWriter(dir, new IndexWriterConfig(
+    IndexWriter w = new MockIndexWriter(dir, newIndexWriterConfig(r,
         TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE).setRAMBufferSizeMB(
         0.1).setMaxBufferedDocs(maxBufferedDocs));
     w.commit();
@@ -194,7 +194,7 @@ public class TestStressIndexing2 extends
                                           boolean doReaderPooling) throws IOException, InterruptedException {
     Map<String,Document> docs = new HashMap<String,Document>();
     for(int iter=0;iter<3;iter++) {
-      IndexWriter w = new MockIndexWriter(dir, new IndexWriterConfig(
+      IndexWriter w = new MockIndexWriter(dir, newIndexWriterConfig(r,
           TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE)
                .setRAMBufferSizeMB(0.1).setMaxBufferedDocs(maxBufferedDocs).setMaxThreadStates(maxThreadStates)
                .setReaderPooling(doReaderPooling));
@@ -238,8 +238,8 @@ public class TestStressIndexing2 extends
   }
 
   
-  public static void indexSerial(Map<String,Document> docs, Directory dir) throws IOException {
-    IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+  public static void indexSerial(Random random, Map<String,Document> docs, Directory dir) throws IOException {
+    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
 
     // index all docs in a single thread
     Iterator<Document> iter = docs.values().iterator();

Modified: lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestTermVectorsReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestTermVectorsReader.java?rev=984202&r1=984201&r2=984202&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestTermVectorsReader.java (original)
+++ lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestTermVectorsReader.java Tue Aug 10 20:32:47 2010
@@ -93,9 +93,11 @@ public class TestTermVectorsReader exten
     }
     Arrays.sort(tokens);
 
-    IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MyAnalyzer()));
+    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(newRandom(), TEST_VERSION_CURRENT, new MyAnalyzer()).setMaxBufferedDocs(-1));
     ((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundFile(false);
     ((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundDocStore(false);
+    ((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(10);
+
     Document doc = new Document();
     for(int i=0;i<testFields.length;i++) {
       final Field.TermVector tv;

Modified: lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestTermdocPerf.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestTermdocPerf.java?rev=984202&r1=984201&r2=984202&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestTermdocPerf.java (original)
+++ lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestTermdocPerf.java Tue Aug 10 20:32:47 2010
@@ -72,7 +72,7 @@ public class TestTermdocPerf extends Luc
 
     Document doc = new Document();
     doc.add(new Field(field,val, Field.Store.NO, Field.Index.NOT_ANALYZED_NO_NORMS));
-    IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
+    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random,
         TEST_VERSION_CURRENT, analyzer)
         .setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(100));
     ((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(100);

Modified: lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestThreadedOptimize.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestThreadedOptimize.java?rev=984202&r1=984201&r2=984202&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestThreadedOptimize.java (original)
+++ lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestThreadedOptimize.java Tue Aug 10 20:32:47 2010
@@ -33,6 +33,7 @@ import org.apache.lucene.util.LuceneTest
 
 import java.io.IOException;
 import java.io.File;
+import java.util.Random;
 
 public class TestThreadedOptimize extends LuceneTestCase {
   
@@ -53,9 +54,9 @@ public class TestThreadedOptimize extend
     failed = true;
   }
 
-  public void runTest(Directory directory, MergeScheduler merger) throws Exception {
+  public void runTest(Random random, Directory directory, MergeScheduler merger) throws Exception {
 
-    IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(
+    IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(random,
         TEST_VERSION_CURRENT, ANALYZER)
         .setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(2).setMergeScheduler(
             merger));
@@ -120,7 +121,7 @@ public class TestThreadedOptimize extend
       assertEquals(expectedDocCount, writer.maxDoc());
 
       writer.close();
-      writer = new IndexWriter(directory, new IndexWriterConfig(
+      writer = new IndexWriter(directory, newIndexWriterConfig(random,
           TEST_VERSION_CURRENT, ANALYZER).setOpenMode(
           OpenMode.APPEND).setMaxBufferedDocs(2));
 
@@ -137,15 +138,16 @@ public class TestThreadedOptimize extend
     FSDirectory.
   */
   public void testThreadedOptimize() throws Exception {
+    Random random = newRandom();
     Directory directory = new MockRAMDirectory();
-    runTest(directory, new SerialMergeScheduler());
-    runTest(directory, new ConcurrentMergeScheduler());
+    runTest(random, directory, new SerialMergeScheduler());
+    runTest(random, directory, new ConcurrentMergeScheduler());
     directory.close();
 
     File dirName = new File(TEMP_DIR, "luceneTestThreadedOptimize");
     directory = FSDirectory.open(dirName);
-    runTest(directory, new SerialMergeScheduler());
-    runTest(directory, new ConcurrentMergeScheduler());
+    runTest(random, directory, new SerialMergeScheduler());
+    runTest(random, directory, new ConcurrentMergeScheduler());
     directory.close();
     _TestUtil.rmDir(dirName);
   }

Modified: lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestTransactionRollback.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestTransactionRollback.java?rev=984202&r1=984201&r2=984202&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestTransactionRollback.java (original)
+++ lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestTransactionRollback.java Tue Aug 10 20:32:47 2010
@@ -25,6 +25,7 @@ import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.HashMap;
+import java.util.Random;
 
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.analysis.MockAnalyzer;
@@ -44,7 +45,7 @@ public class TestTransactionRollback ext
 	
   private static final String FIELD_RECORD_ID = "record_id";
   private Directory dir;
-
+  private Random random;
 	
   //Rolls back index to a chosen ID
   private void rollBackLast(int id) throws Exception {
@@ -64,7 +65,7 @@ public class TestTransactionRollback ext
     if (last==null)
       throw new RuntimeException("Couldn't find commit point "+id);
 		
-    IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(
+    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random,
         TEST_VERSION_CURRENT, new MockAnalyzer()).setIndexDeletionPolicy(
         new RollbackDeletionPolicy(id)).setIndexCommit(last));
     Map<String,String> data = new HashMap<String,String>();
@@ -124,10 +125,10 @@ public class TestTransactionRollback ext
   protected void setUp() throws Exception {
     super.setUp();
     dir = new MockRAMDirectory();
-		
+		random = newRandom();
     //Build index, of records 1 to 100, committing after each batch of 10
     IndexDeletionPolicy sdp=new KeepAllDeletionPolicy();
-    IndexWriter w=new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setIndexDeletionPolicy(sdp));
+    IndexWriter w=new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setIndexDeletionPolicy(sdp));
     for(int currentRecordId=1;currentRecordId<=100;currentRecordId++) {
       Document doc=new Document();
       doc.add(new Field(FIELD_RECORD_ID,""+currentRecordId,Field.Store.YES,Field.Index.ANALYZED));
@@ -195,7 +196,7 @@ public class TestTransactionRollback ext
     for(int i=0;i<2;i++) {
       // Unless you specify a prior commit point, rollback
       // should not work:
-      new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())
+      new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())
           .setIndexDeletionPolicy(new DeleteLastCommitPolicy())).close();
       IndexReader r = IndexReader.open(dir, true);
       assertEquals(100, r.numDocs());

Modified: lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestTransactions.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestTransactions.java?rev=984202&r1=984201&r2=984202&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestTransactions.java (original)
+++ lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestTransactions.java Tue Aug 10 20:32:47 2010
@@ -93,13 +93,15 @@ public class TestTransactions extends Lu
     @Override
     public void doWork() throws Throwable {
 
-      IndexWriter writer1 = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(3));
+      IndexWriter writer1 = new IndexWriter(dir1, newIndexWriterConfig(RANDOM, TEST_VERSION_CURRENT, new MockAnalyzer())
+          .setMaxBufferedDocs(3).setMergeScheduler(new ConcurrentMergeScheduler()));
       ((LogMergePolicy) writer1.getConfig().getMergePolicy()).setMergeFactor(2);
       ((ConcurrentMergeScheduler) writer1.getConfig().getMergeScheduler()).setSuppressExceptions();
 
       // Intentionally use different params so flush/merge
       // happen @ different times
-      IndexWriter writer2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2));
+      IndexWriter writer2 = new IndexWriter(dir2, newIndexWriterConfig(RANDOM, TEST_VERSION_CURRENT, new MockAnalyzer())
+          .setMaxBufferedDocs(2).setMergeScheduler(new ConcurrentMergeScheduler()));
       ((LogMergePolicy) writer2.getConfig().getMergePolicy()).setMergeFactor(3);
       ((ConcurrentMergeScheduler) writer2.getConfig().getMergeScheduler()).setSuppressExceptions();
 
@@ -181,7 +183,7 @@ public class TestTransactions extends Lu
   }
 
   public void initIndex(Directory dir) throws Throwable {
-    IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(RANDOM, TEST_VERSION_CURRENT, new MockAnalyzer()));
     for(int j=0; j<7; j++) {
       Document d = new Document();
       int n = RANDOM.nextInt();

Modified: lucene/dev/trunk/lucene/src/test/org/apache/lucene/util/LuceneTestCaseJ4.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/test/org/apache/lucene/util/LuceneTestCaseJ4.java?rev=984202&r1=984201&r2=984202&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test/org/apache/lucene/util/LuceneTestCaseJ4.java (original)
+++ lucene/dev/trunk/lucene/src/test/org/apache/lucene/util/LuceneTestCaseJ4.java Tue Aug 10 20:32:47 2010
@@ -563,6 +563,7 @@ public class LuceneTestCaseJ4 {
     }
     
     c.setReaderPooling(r.nextBoolean());
+    c.setReaderTermsIndexDivisor(_TestUtil.nextInt(r, 1, 4));
     return c;
   }