You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by rm...@apache.org on 2011/05/09 17:24:23 UTC

svn commit: r1101062 [10/21] - in /lucene/dev/branches/bulkpostings: ./ dev-tools/ dev-tools/eclipse/ dev-tools/idea/.idea/ dev-tools/idea/lucene/contrib/ant/ dev-tools/idea/lucene/contrib/db/bdb-je/ dev-tools/idea/lucene/contrib/db/bdb/ dev-tools/idea...

Modified: lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestAddIndexes.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestAddIndexes.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestAddIndexes.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestAddIndexes.java Mon May  9 15:24:04 2011
@@ -53,7 +53,7 @@ public class TestAddIndexes extends Luce
     IndexWriter writer = null;
 
     writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT,
-        new MockAnalyzer())
+        new MockAnalyzer(random))
         .setOpenMode(OpenMode.CREATE));
     writer.setInfoStream(VERBOSE ? System.out : null);
     // add 100 documents
@@ -64,7 +64,7 @@ public class TestAddIndexes extends Luce
 
     writer = newWriter(
         aux,
-        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
+        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
             setOpenMode(OpenMode.CREATE).
             setMergePolicy(newLogMergePolicy(false))
     );
@@ -73,14 +73,14 @@ public class TestAddIndexes extends Luce
     assertEquals(40, writer.maxDoc());
     writer.close();
 
-    writer = newWriter(aux2, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
+    writer = newWriter(aux2, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
     // add 40 documents in compound files
     addDocs2(writer, 50);
     assertEquals(50, writer.maxDoc());
     writer.close();
 
     // test doc count before segments are merged
-    writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
+    writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
     assertEquals(100, writer.maxDoc());
     writer.addIndexes(aux, aux2);
     assertEquals(190, writer.maxDoc());
@@ -95,14 +95,14 @@ public class TestAddIndexes extends Luce
 
     // now add another set in.
     Directory aux3 = newDirectory();
-    writer = newWriter(aux3, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    writer = newWriter(aux3, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
     // add 40 documents
     addDocs(writer, 40);
     assertEquals(40, writer.maxDoc());
     writer.close();
 
     // test doc count before segments are merged/index is optimized
-    writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
+    writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
     assertEquals(190, writer.maxDoc());
     writer.addIndexes(aux3);
     assertEquals(230, writer.maxDoc());
@@ -116,7 +116,7 @@ public class TestAddIndexes extends Luce
     verifyTermDocs(dir, new Term("content", "bbb"), 50);
 
     // now optimize it.
-    writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
+    writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
     writer.optimize();
     writer.close();
 
@@ -129,11 +129,11 @@ public class TestAddIndexes extends Luce
 
     // now add a single document
     Directory aux4 = newDirectory();
-    writer = newWriter(aux4, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    writer = newWriter(aux4, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
     addDocs2(writer, 1);
     writer.close();
 
-    writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
+    writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
     assertEquals(230, writer.maxDoc());
     writer.addIndexes(aux4);
     assertEquals(231, writer.maxDoc());
@@ -156,7 +156,7 @@ public class TestAddIndexes extends Luce
     Directory aux = newDirectory();
 
     setUpDirs(dir, aux);
-    IndexWriter writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
+    IndexWriter writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
     writer.setInfoStream(VERBOSE ? System.out : null);
     writer.addIndexes(aux);
 
@@ -194,7 +194,7 @@ public class TestAddIndexes extends Luce
     Directory aux = newDirectory();
 
     setUpDirs(dir, aux);
-    IndexWriter writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
+    IndexWriter writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
 
     // Adds 10 docs, then replaces them with another 10
     // docs, so 10 pending deletes:
@@ -232,7 +232,7 @@ public class TestAddIndexes extends Luce
     Directory aux = newDirectory();
 
     setUpDirs(dir, aux);
-    IndexWriter writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
+    IndexWriter writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
 
     // Adds 10 docs, then replaces them with another 10
     // docs, so 10 pending deletes:
@@ -273,7 +273,7 @@ public class TestAddIndexes extends Luce
 
     IndexWriter writer = null;
 
-    writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
     // add 100 documents
     addDocs(writer, 100);
     assertEquals(100, writer.maxDoc());
@@ -281,7 +281,7 @@ public class TestAddIndexes extends Luce
 
     writer = newWriter(
         aux,
-        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
+        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
             setOpenMode(OpenMode.CREATE).
             setMaxBufferedDocs(1000).
             setMergePolicy(newLogMergePolicy(false))
@@ -291,7 +291,7 @@ public class TestAddIndexes extends Luce
     writer.close();
     writer = newWriter(
         aux,
-        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
+        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
             setOpenMode(OpenMode.CREATE).
             setMaxBufferedDocs(1000).
             setMergePolicy(newLogMergePolicy(false))
@@ -299,7 +299,7 @@ public class TestAddIndexes extends Luce
     addDocs(writer, 100);
     writer.close();
 
-    writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
+    writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
     try {
       // cannot add self
       writer.addIndexes(aux, dir);
@@ -329,7 +329,7 @@ public class TestAddIndexes extends Luce
 
     IndexWriter writer = newWriter(
         dir,
-        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
+        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
             setOpenMode(OpenMode.APPEND).
             setMaxBufferedDocs(10).
             setMergePolicy(newLogMergePolicy(4))
@@ -358,7 +358,7 @@ public class TestAddIndexes extends Luce
 
     IndexWriter writer = newWriter(
         dir,
-        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
+        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
             setOpenMode(OpenMode.APPEND).
             setMaxBufferedDocs(9).
             setMergePolicy(newLogMergePolicy(4))
@@ -387,7 +387,7 @@ public class TestAddIndexes extends Luce
 
     IndexWriter writer = newWriter(
         dir,
-        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
+        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
             setOpenMode(OpenMode.APPEND).
             setMaxBufferedDocs(10).
             setMergePolicy(newLogMergePolicy(4))
@@ -422,7 +422,7 @@ public class TestAddIndexes extends Luce
 
     IndexWriter writer = newWriter(
         dir,
-        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
+        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
             setOpenMode(OpenMode.APPEND).
             setMaxBufferedDocs(4).
             setMergePolicy(newLogMergePolicy(4))
@@ -448,7 +448,7 @@ public class TestAddIndexes extends Luce
 
     IndexWriter writer = newWriter(
         aux2,
-        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
+        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
             setOpenMode(OpenMode.CREATE).
             setMaxBufferedDocs(100).
             setMergePolicy(newLogMergePolicy(10))
@@ -475,7 +475,7 @@ public class TestAddIndexes extends Luce
 
     writer = newWriter(
         dir,
-        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
+        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
             setOpenMode(OpenMode.APPEND).
             setMaxBufferedDocs(6).
             setMergePolicy(newLogMergePolicy(4))
@@ -536,7 +536,7 @@ public class TestAddIndexes extends Luce
   private void setUpDirs(Directory dir, Directory aux) throws IOException {
     IndexWriter writer = null;
 
-    writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(1000));
+    writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(1000));
     // add 1000 documents in 1 segment
     addDocs(writer, 1000);
     assertEquals(1000, writer.maxDoc());
@@ -545,7 +545,7 @@ public class TestAddIndexes extends Luce
 
     writer = newWriter(
         aux,
-        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
+        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
             setOpenMode(OpenMode.CREATE).
             setMaxBufferedDocs(1000).
             setMergePolicy(newLogMergePolicy(false, 10))
@@ -556,7 +556,7 @@ public class TestAddIndexes extends Luce
       writer.close();
       writer = newWriter(
           aux,
-          newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
+          newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
               setOpenMode(OpenMode.APPEND).
               setMaxBufferedDocs(1000).
               setMergePolicy(newLogMergePolicy(false, 10))
@@ -575,7 +575,7 @@ public class TestAddIndexes extends Luce
     lmp.setUseCompoundFile(false);
     lmp.setMergeFactor(100);
     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
-        TEST_VERSION_CURRENT, new MockAnalyzer())
+        TEST_VERSION_CURRENT, new MockAnalyzer(random))
         .setMaxBufferedDocs(5).setMergePolicy(lmp));
 
     Document doc = new Document();
@@ -603,7 +603,7 @@ public class TestAddIndexes extends Luce
     lmp.setUseCompoundFile(false);
     lmp.setMergeFactor(4);
     writer = new IndexWriter(dir2, newIndexWriterConfig(TEST_VERSION_CURRENT,
-        new MockAnalyzer())
+        new MockAnalyzer(random))
         .setMergeScheduler(new SerialMergeScheduler()).setMergePolicy(lmp));
     writer.addIndexes(dir);
     writer.close();
@@ -636,14 +636,14 @@ public class TestAddIndexes extends Luce
       NUM_COPY = numCopy;
       dir = new MockDirectoryWrapper(random, new RAMDirectory());
       IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
-          TEST_VERSION_CURRENT, new MockAnalyzer())
+          TEST_VERSION_CURRENT, new MockAnalyzer(random))
           .setMaxBufferedDocs(2));
       for (int i = 0; i < NUM_INIT_DOCS; i++)
         addDoc(writer);
       writer.close();
 
       dir2 = newDirectory();
-      writer2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+      writer2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
       writer2.setInfoStream(VERBOSE ? System.out : null);
       writer2.commit();
       
@@ -771,11 +771,11 @@ public class TestAddIndexes extends Luce
     c.joinThreads();
 
     int expectedNumDocs = 100+NUM_COPY*(4*NUM_ITER/5)*RunAddIndexesThreads.NUM_THREADS*RunAddIndexesThreads.NUM_INIT_DOCS;
-    assertEquals(expectedNumDocs, c.writer2.numDocs());
+    assertEquals("expected num docs don't match - failures: " + c.failures, expectedNumDocs, c.writer2.numDocs());
 
     c.close(true);
 
-    assertTrue(c.failures.size() == 0);
+    assertTrue("found unexpected failures: " + c.failures, c.failures.isEmpty());
 
     _TestUtil.checkIndex(c.dir2);
 
@@ -944,7 +944,7 @@ public class TestAddIndexes extends Luce
     Directory[] dirs = new Directory[2];
     for (int i = 0; i < dirs.length; i++) {
       dirs[i] = newDirectory();
-      IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer());
+      IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
       IndexWriter writer = new IndexWriter(dirs[i], conf);
       Document doc = new Document();
       doc.add(new Field("id", "myid", Store.NO, Index.NOT_ANALYZED_NO_NORMS));
@@ -952,7 +952,7 @@ public class TestAddIndexes extends Luce
       writer.close();
     }
 
-    IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer());
+    IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
     IndexWriter writer = new IndexWriter(dirs[0], conf);
 
     // Now delete the document
@@ -992,7 +992,7 @@ public class TestAddIndexes extends Luce
     IndexWriter writer = null;
 
     writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT,
-        new MockAnalyzer()).setOpenMode(OpenMode.CREATE).setCodecProvider(
+        new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE).setCodecProvider(
         provider));
     // add 100 documents
     addDocs3(writer, 100);
@@ -1003,7 +1003,7 @@ public class TestAddIndexes extends Luce
 
     writer = newWriter(
         aux,
-        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
+        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
             setOpenMode(OpenMode.CREATE).
             setCodecProvider(provider).
             setMaxBufferedDocs(10).
@@ -1017,7 +1017,7 @@ public class TestAddIndexes extends Luce
 
     writer = newWriter(
         aux2,
-        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
+        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
             setOpenMode(OpenMode.CREATE).
             setCodecProvider(provider)
     );
@@ -1030,7 +1030,7 @@ public class TestAddIndexes extends Luce
     // test doc count before segments are merged
     writer = newWriter(
         dir,
-        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
+        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
             setOpenMode(OpenMode.APPEND).
             setCodecProvider(provider)
     );
@@ -1063,7 +1063,7 @@ public class TestAddIndexes extends Luce
     Directory[] dirs = new Directory[2];
     for (int i = 0; i < dirs.length; i++) {
       dirs[i] = new RAMDirectory();
-      IndexWriter w = new IndexWriter(dirs[i], new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+      IndexWriter w = new IndexWriter(dirs[i], new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
       Document d = new Document();
       d.add(new Field("c", "v", Store.YES, Index.ANALYZED, TermVector.YES));
       w.addDocument(d);
@@ -1073,8 +1073,9 @@ public class TestAddIndexes extends Luce
     IndexReader[] readers = new IndexReader[] { IndexReader.open(dirs[0]), IndexReader.open(dirs[1]) };
     
     Directory dir = new RAMDirectory();
-    IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer());
+    IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy());
     LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
+    lmp.setUseCompoundFile(true);
     lmp.setNoCFSRatio(1.0); // Force creation of CFS
     IndexWriter w3 = new IndexWriter(dir, conf);
     w3.addIndexes(readers);

Modified: lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestAtomicUpdate.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestAtomicUpdate.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestAtomicUpdate.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestAtomicUpdate.java Mon May  9 15:24:04 2011
@@ -127,9 +127,9 @@ public class TestAtomicUpdate extends Lu
     TimedThread[] threads = new TimedThread[4];
 
     IndexWriterConfig conf = new IndexWriterConfig(
-        TEST_VERSION_CURRENT, new MockAnalyzer())
+        TEST_VERSION_CURRENT, new MockAnalyzer(random))
         .setMaxBufferedDocs(7);
-    ((LogMergePolicy) conf.getMergePolicy()).setMergeFactor(3);
+    ((TieredMergePolicy) conf.getMergePolicy()).setMaxMergeAtOnce(3);
     IndexWriter writer = new MockIndexWriter(directory, conf);
     writer.setInfoStream(VERBOSE ? System.out : null);
 

Modified: lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java Mon May  9 15:24:04 2011
@@ -90,6 +90,8 @@ public class TestBackwardsCompatibility 
                              "30.nocfs",
                              "31.cfs",
                              "31.nocfs",
+                             "32.cfs",
+                             "32.nocfs",
   };
   
   final String[] unsupportedNames = {"19.cfs",
@@ -132,7 +134,7 @@ public class TestBackwardsCompatibility 
 
       try {
         writer = new IndexWriter(dir, newIndexWriterConfig(
-          TEST_VERSION_CURRENT, new MockAnalyzer()));
+          TEST_VERSION_CURRENT, new MockAnalyzer(random)));
         fail("IndexWriter creation should not pass for "+unsupportedNames[i]);
       } catch (IndexFormatTooOldException e) {
         // pass
@@ -174,7 +176,7 @@ public class TestBackwardsCompatibility 
       Directory dir = newFSDirectory(oldIndxeDir);
 
       IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(
-          TEST_VERSION_CURRENT, new MockAnalyzer()));
+          TEST_VERSION_CURRENT, new MockAnalyzer(random)));
       w.setInfoStream(VERBOSE ? System.out : null);
       w.optimize();
       w.close();
@@ -194,7 +196,7 @@ public class TestBackwardsCompatibility 
 
       Directory targetDir = newDirectory();
       IndexWriter w = new IndexWriter(targetDir, newIndexWriterConfig(
-          TEST_VERSION_CURRENT, new MockAnalyzer()));
+          TEST_VERSION_CURRENT, new MockAnalyzer(random)));
       w.addIndexes(dir);
       w.close();
 
@@ -215,7 +217,7 @@ public class TestBackwardsCompatibility 
       
       Directory targetDir = newDirectory();
       IndexWriter w = new IndexWriter(targetDir, newIndexWriterConfig(
-          TEST_VERSION_CURRENT, new MockAnalyzer()));
+          TEST_VERSION_CURRENT, new MockAnalyzer(random)));
       w.addIndexes(reader);
       w.close();
       reader.close();
@@ -268,7 +270,7 @@ public class TestBackwardsCompatibility 
   }
 
   public void searchIndex(File indexDir, String oldName) throws IOException {
-    //QueryParser parser = new QueryParser("contents", new MockAnalyzer());
+    //QueryParser parser = new QueryParser("contents", new MockAnalyzer(random));
     //Query query = parser.parse("handle:1");
 
     Directory dir = newFSDirectory(indexDir);
@@ -340,7 +342,7 @@ public class TestBackwardsCompatibility 
 
     Directory dir = newFSDirectory(oldIndexDir);
     // open writer
-    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
+    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
     writer.setInfoStream(VERBOSE ? System.out : null);
     // add 10 docs
     for(int i=0;i<10;i++) {
@@ -385,7 +387,7 @@ public class TestBackwardsCompatibility 
     searcher.close();
 
     // optimize
-    writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
+    writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
     writer.optimize();
     writer.close();
 
@@ -430,7 +432,7 @@ public class TestBackwardsCompatibility 
     searcher.close();
 
     // optimize
-    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
+    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
     writer.optimize();
     writer.close();
 
@@ -446,12 +448,12 @@ public class TestBackwardsCompatibility 
   }
 
   public File createIndex(Random random, String dirName, boolean doCFS) throws IOException {
-
-    File indexDir = _TestUtil.getTempDir(dirName);
+    // we use a real directory name that is not cleaned up, because this method is only used to create backwards indexes:
+    File indexDir = new File(LuceneTestCase.TEMP_DIR, dirName);
     _TestUtil.rmDir(indexDir);
     Directory dir = newFSDirectory(indexDir);
     
-    IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(10);
+    IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(10);
     ((LogMergePolicy) conf.getMergePolicy()).setUseCompoundFile(doCFS);
     IndexWriter writer = new IndexWriter(dir, conf);
     
@@ -462,7 +464,7 @@ public class TestBackwardsCompatibility 
     writer.close();
 
     // open fresh writer so we get no prx file in the added segment
-    conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(10);
+    conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(10);
     ((LogMergePolicy) conf.getMergePolicy()).setUseCompoundFile(doCFS);
     writer = new IndexWriter(dir, conf);
     addNoProxDoc(writer);
@@ -498,7 +500,7 @@ public class TestBackwardsCompatibility 
 
       IndexWriter writer = new IndexWriter(
           dir,
-          newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
+          newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
               setMaxBufferedDocs(-1).
               setRAMBufferSizeMB(16.0).
               setMergePolicy(mergePolicy)

Modified: lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestCheckIndex.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestCheckIndex.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestCheckIndex.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestCheckIndex.java Mon May  9 15:24:04 2011
@@ -34,7 +34,7 @@ public class TestCheckIndex extends Luce
 
   public void testDeletedDocs() throws IOException {
     Directory dir = newDirectory();
-    IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2));
+    IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2));
     Document doc = new Document();
     doc.add(newField("field", "aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
     for(int i=0;i<19;i++) {

Modified: lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestCodecs.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestCodecs.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestCodecs.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestCodecs.java Mon May  9 15:24:04 2011
@@ -321,7 +321,7 @@ public class TestCodecs extends LuceneTe
   public void testSepPositionAfterMerge() throws IOException {
     final Directory dir = newDirectory();
     final IndexWriterConfig config = newIndexWriterConfig(Version.LUCENE_31,
-      new MockAnalyzer());
+      new MockAnalyzer(random));
     config.setCodecProvider(new MockSepCodecs());
     final IndexWriter writer = new IndexWriter(dir, config);
 

Modified: lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestCompoundFile.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestCompoundFile.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestCompoundFile.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestCompoundFile.java Mon May  9 15:24:04 2011
@@ -57,8 +57,7 @@ public class TestCompoundFile extends Lu
     @Override
     public void setUp() throws Exception {
        super.setUp();
-       File file = new File(TEMP_DIR, "testIndex");
-       _TestUtil.rmDir(file);
+       File file = _TestUtil.getTempDir("testIndex");
        // use a simple FSDir here, to be sure to have SimpleFSInputs
        dir = new SimpleFSDirectory(file,null);
     }
@@ -66,7 +65,6 @@ public class TestCompoundFile extends Lu
     @Override
     public void tearDown() throws Exception {
        dir.close();
-       _TestUtil.rmDir(new File(TEMP_DIR, "testIndex"));
        super.tearDown();
     }
 

Modified: lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java Mon May  9 15:24:04 2011
@@ -50,7 +50,7 @@ public class TestConcurrentMergeSchedule
         boolean isClose = false;
         StackTraceElement[] trace = new Exception().getStackTrace();
         for (int i = 0; i < trace.length; i++) {
-          if ("doFlush".equals(trace[i].getMethodName())) {
+          if ("flush".equals(trace[i].getMethodName())) {
             isDoFlush = true;
           }
           if ("close".equals(trace[i].getMethodName())) {
@@ -72,7 +72,7 @@ public class TestConcurrentMergeSchedule
     FailOnlyOnFlush failure = new FailOnlyOnFlush();
     directory.failOn(failure);
 
-    IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2));
+    IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2));
     writer.setInfoStream(VERBOSE ? System.out : null);
     Document doc = new Document();
     Field idField = newField("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
@@ -130,7 +130,7 @@ public class TestConcurrentMergeSchedule
     // start:
     mp.setMinMergeDocs(1000);
     IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(
-        TEST_VERSION_CURRENT, new MockAnalyzer())
+        TEST_VERSION_CURRENT, new MockAnalyzer(random))
         .setMergePolicy(mp));
     writer.setInfoStream(VERBOSE ? System.out : null);
 
@@ -169,7 +169,7 @@ public class TestConcurrentMergeSchedule
   public void testNoExtraFiles() throws IOException {
     MockDirectoryWrapper directory = newDirectory();
     IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(
-        TEST_VERSION_CURRENT, new MockAnalyzer())
+        TEST_VERSION_CURRENT, new MockAnalyzer(random))
         .setMaxBufferedDocs(2));
     writer.setInfoStream(VERBOSE ? System.out : null);
 
@@ -189,7 +189,7 @@ public class TestConcurrentMergeSchedule
 
       // Reopen
       writer = new IndexWriter(directory, newIndexWriterConfig(
-          TEST_VERSION_CURRENT, new MockAnalyzer())
+          TEST_VERSION_CURRENT, new MockAnalyzer(random))
           .setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(2));
       writer.setInfoStream(VERBOSE ? System.out : null);
     }
@@ -207,7 +207,7 @@ public class TestConcurrentMergeSchedule
 
     IndexWriter writer = new IndexWriter(
         directory,
-        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
+        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
             setMaxBufferedDocs(2).
             setMergePolicy(newLogMergePolicy(100))
     );
@@ -240,7 +240,7 @@ public class TestConcurrentMergeSchedule
       // Reopen
       writer = new IndexWriter(
           directory,
-          newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
+          newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
               setOpenMode(OpenMode.APPEND).
               setMergePolicy(newLogMergePolicy(100))
       );

Modified: lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestConsistentFieldNumbers.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestConsistentFieldNumbers.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestConsistentFieldNumbers.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestConsistentFieldNumbers.java Mon May  9 15:24:04 2011
@@ -35,7 +35,7 @@ public class TestConsistentFieldNumbers 
   public void testSameFieldNumbersAcrossSegments() throws Exception {
     for (int i = 0; i < 2; i++) {
       Directory dir = newDirectory();
-      IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
+      IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
 
       Document d1 = new Document();
       d1.add(new Field("f1", "first field", Store.YES, Index.ANALYZED, TermVector.NO));
@@ -44,7 +44,7 @@ public class TestConsistentFieldNumbers 
 
       if (i == 1) {
         writer.close();
-        writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
+        writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
       } else {
         writer.commit();
       }
@@ -72,7 +72,7 @@ public class TestConsistentFieldNumbers 
       assertEquals("f3", fis2.fieldInfo(2).name);
       assertEquals("f4", fis2.fieldInfo(3).name);
 
-      writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
+      writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
       writer.optimize();
       writer.close();
 
@@ -96,7 +96,7 @@ public class TestConsistentFieldNumbers 
   public void testAddIndexes() throws Exception {
     Directory dir1 = newDirectory();
     Directory dir2 = newDirectory();
-    IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
+    IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
 
     Document d1 = new Document();
     d1.add(new Field("f1", "first field", Store.YES, Index.ANALYZED, TermVector.NO));
@@ -104,7 +104,7 @@ public class TestConsistentFieldNumbers 
     writer.addDocument(d1);
 
     writer.close();
-    writer = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
+    writer = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
 
     Document d2 = new Document();
     d2.add(new Field("f2", "second field", Store.YES, Index.ANALYZED, TermVector.NO));
@@ -115,7 +115,7 @@ public class TestConsistentFieldNumbers 
 
     writer.close();
 
-    writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
+    writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
     writer.addIndexes(dir2);
     writer.close();
 
@@ -134,7 +134,7 @@ public class TestConsistentFieldNumbers 
     assertEquals("f3", fis2.fieldInfo(2).name);
     assertEquals("f4", fis2.fieldInfo(3).name);
 
-    writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
+    writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
     writer.optimize();
     writer.close();
 
@@ -159,7 +159,7 @@ public class TestConsistentFieldNumbers 
       Directory dir = newDirectory();
       {
         IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
-            TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(
+            TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(
             NoMergePolicy.NO_COMPOUND_FILES));
         Document d = new Document();
         d.add(new Field("f1", "d1 first field", Store.YES, Index.ANALYZED,
@@ -180,7 +180,7 @@ public class TestConsistentFieldNumbers 
 
       {
         IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
-            TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(
+            TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(
             random.nextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES
                 : NoMergePolicy.COMPOUND_FILES));
         Document d = new Document();
@@ -205,7 +205,7 @@ public class TestConsistentFieldNumbers 
 
       {
         IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
-            TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(
+            TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(
             random.nextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES
                 : NoMergePolicy.COMPOUND_FILES));
         Document d = new Document();
@@ -237,7 +237,7 @@ public class TestConsistentFieldNumbers 
 
       {
         IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
-            TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(
+            TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(
             random.nextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES
                 : NoMergePolicy.COMPOUND_FILES));
         writer.deleteDocuments(new Term("f1", "d1"));
@@ -248,7 +248,7 @@ public class TestConsistentFieldNumbers 
       }
 
       IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
-          TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(
+          TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(
           new LogByteSizeMergePolicy()));
       writer.optimize();
       assertFalse(" field numbers got mixed up", writer.anyNonBulkMerges);
@@ -281,7 +281,7 @@ public class TestConsistentFieldNumbers 
     }
 
     Directory dir = newDirectory();
-    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
 
     for (int i = 0; i < NUM_DOCS; i++) {
       Document d = new Document();

Modified: lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestCrash.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestCrash.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestCrash.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestCrash.java Mon May  9 15:24:04 2011
@@ -36,7 +36,7 @@ public class TestCrash extends LuceneTes
   private IndexWriter initIndex(Random random, MockDirectoryWrapper dir, boolean initialCommit) throws IOException {
     dir.setLockFactory(NoLockFactory.getNoLockFactory());
 
-    IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())
+    IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
         .setMaxBufferedDocs(10).setMergeScheduler(new ConcurrentMergeScheduler()));
     ((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions();
     if (initialCommit) {

Modified: lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestDeletionPolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestDeletionPolicy.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestDeletionPolicy.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestDeletionPolicy.java Mon May  9 15:24:04 2011
@@ -204,7 +204,7 @@ public class TestDeletionPolicy extends 
     Directory dir = newDirectory();
     ExpirationTimeDeletionPolicy policy = new ExpirationTimeDeletionPolicy(dir, SECONDS);
     IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
-        new MockAnalyzer())
+        new MockAnalyzer(random))
         .setIndexDeletionPolicy(policy);
     MergePolicy mp = conf.getMergePolicy();
     if (mp instanceof LogMergePolicy) {
@@ -221,7 +221,7 @@ public class TestDeletionPolicy extends 
       // past commits
       lastDeleteTime = System.currentTimeMillis();
       conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
-          new MockAnalyzer()).setOpenMode(
+          new MockAnalyzer(random)).setOpenMode(
           OpenMode.APPEND).setIndexDeletionPolicy(policy);
       mp = conf.getMergePolicy();
       if (mp instanceof LogMergePolicy) {
@@ -303,7 +303,7 @@ public class TestDeletionPolicy extends 
       policy.dir = dir;
 
       IndexWriterConfig conf = newIndexWriterConfig(
-          TEST_VERSION_CURRENT, new MockAnalyzer())
+          TEST_VERSION_CURRENT, new MockAnalyzer(random))
           .setIndexDeletionPolicy(policy).setMaxBufferedDocs(10)
           .setMergeScheduler(new SerialMergeScheduler());
       MergePolicy mp = conf.getMergePolicy();
@@ -324,7 +324,7 @@ public class TestDeletionPolicy extends 
       }
       if (!isOptimized) {
         conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
-                                    new MockAnalyzer()).setOpenMode(
+                                    new MockAnalyzer(random)).setOpenMode(
                                                                     OpenMode.APPEND).setIndexDeletionPolicy(policy);
         mp = conf.getMergePolicy();
         if (mp instanceof LogMergePolicy) {
@@ -373,7 +373,7 @@ public class TestDeletionPolicy extends 
           int preCount = dir.listAll().length;
           writer = new IndexWriter(dir, newIndexWriterConfig(
               TEST_VERSION_CURRENT,
-              new MockAnalyzer()).setOpenMode(
+              new MockAnalyzer(random)).setOpenMode(
               OpenMode.APPEND).setIndexDeletionPolicy(policy));
           writer.close();
           int postCount = dir.listAll().length;
@@ -397,7 +397,7 @@ public class TestDeletionPolicy extends 
 
     IndexWriter writer = new IndexWriter(
         dir,
-        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
+        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
             setIndexDeletionPolicy(policy).
             setMaxBufferedDocs(2).
             setMergePolicy(newLogMergePolicy(10))
@@ -419,7 +419,7 @@ public class TestDeletionPolicy extends 
     assertTrue(lastCommit != null);
 
     // Now add 1 doc and optimize
-    writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setIndexDeletionPolicy(policy));
+    writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexDeletionPolicy(policy));
     addDoc(writer);
     assertEquals(11, writer.numDocs());
     writer.optimize();
@@ -428,7 +428,7 @@ public class TestDeletionPolicy extends 
     assertEquals(6, IndexReader.listCommits(dir).size());
 
     // Now open writer on the commit just before optimize:
-    writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())
+    writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
         .setIndexDeletionPolicy(policy).setIndexCommit(lastCommit));
     assertEquals(10, writer.numDocs());
 
@@ -441,7 +441,7 @@ public class TestDeletionPolicy extends 
     assertEquals(11, r.numDocs());
     r.close();
 
-    writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())
+    writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
         .setIndexDeletionPolicy(policy).setIndexCommit(lastCommit));
     assertEquals(10, writer.numDocs());
     // Commits the rollback:
@@ -458,7 +458,7 @@ public class TestDeletionPolicy extends 
     r.close();
 
     // Reoptimize
-    writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setIndexDeletionPolicy(policy));
+    writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexDeletionPolicy(policy));
     writer.optimize();
     writer.close();
 
@@ -469,7 +469,7 @@ public class TestDeletionPolicy extends 
 
     // Now open writer on the commit just before optimize,
     // but this time keeping only the last commit:
-    writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setIndexCommit(lastCommit));
+    writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexCommit(lastCommit));
     assertEquals(10, writer.numDocs());
     
     // Reader still sees optimized index, because writer
@@ -505,7 +505,7 @@ public class TestDeletionPolicy extends 
       Directory dir = newDirectory();
 
       IndexWriterConfig conf = newIndexWriterConfig(
-          TEST_VERSION_CURRENT, new MockAnalyzer())
+          TEST_VERSION_CURRENT, new MockAnalyzer(random))
           .setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy)
           .setMaxBufferedDocs(10);
       MergePolicy mp = conf.getMergePolicy();
@@ -518,7 +518,7 @@ public class TestDeletionPolicy extends 
       }
       writer.close();
 
-      conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())
+      conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
           .setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy);
       mp = conf.getMergePolicy();
       if (mp instanceof LogMergePolicy) {
@@ -558,7 +558,7 @@ public class TestDeletionPolicy extends 
 
       for(int j=0;j<N+1;j++) {
         IndexWriterConfig conf = newIndexWriterConfig(
-            TEST_VERSION_CURRENT, new MockAnalyzer())
+            TEST_VERSION_CURRENT, new MockAnalyzer(random))
             .setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy)
             .setMaxBufferedDocs(10);
         MergePolicy mp = conf.getMergePolicy();
@@ -618,8 +618,8 @@ public class TestDeletionPolicy extends 
 
       Directory dir = newDirectory();
       IndexWriterConfig conf = newIndexWriterConfig(
-          TEST_VERSION_CURRENT, new MockAnalyzer())
-        .setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy).setMergePolicy(newInOrderLogMergePolicy());
+          TEST_VERSION_CURRENT, new MockAnalyzer(random))
+        .setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy).setMergePolicy(newLogMergePolicy());
       MergePolicy mp = conf.getMergePolicy();
       if (mp instanceof LogMergePolicy) {
         ((LogMergePolicy) mp).setUseCompoundFile(useCompoundFile);
@@ -634,13 +634,14 @@ public class TestDeletionPolicy extends 
           System.out.println("\nTEST: cycle i=" + i);
         }
         conf = newIndexWriterConfig(
-            TEST_VERSION_CURRENT, new MockAnalyzer())
-            .setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy);
+            TEST_VERSION_CURRENT, new MockAnalyzer(random))
+          .setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy).setMergePolicy(newLogMergePolicy());
         mp = conf.getMergePolicy();
         if (mp instanceof LogMergePolicy) {
           ((LogMergePolicy) mp).setUseCompoundFile(useCompoundFile);
         }
         writer = new IndexWriter(dir, conf);
+        writer.setInfoStream(VERBOSE ? System.out : null);
         for(int j=0;j<17;j++) {
           addDoc(writer);
         }
@@ -662,7 +663,7 @@ public class TestDeletionPolicy extends 
         reader.close();
         searcher.close();
       }
-      conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())
+      conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
           .setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy);
       mp = conf.getMergePolicy();
       if (mp instanceof LogMergePolicy) {
@@ -741,7 +742,7 @@ public class TestDeletionPolicy extends 
 
       Directory dir = newDirectory();
       IndexWriterConfig conf = newIndexWriterConfig(
-          TEST_VERSION_CURRENT, new MockAnalyzer())
+          TEST_VERSION_CURRENT, new MockAnalyzer(random))
           .setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy)
           .setMaxBufferedDocs(10);
       MergePolicy mp = conf.getMergePolicy();
@@ -756,7 +757,7 @@ public class TestDeletionPolicy extends 
       for(int i=0;i<N+1;i++) {
 
         conf = newIndexWriterConfig(
-            TEST_VERSION_CURRENT, new MockAnalyzer())
+            TEST_VERSION_CURRENT, new MockAnalyzer(random))
             .setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy)
             .setMaxBufferedDocs(10);
         mp = conf.getMergePolicy();
@@ -780,7 +781,7 @@ public class TestDeletionPolicy extends 
         searcher.close();
 
         writer = new IndexWriter(dir, newIndexWriterConfig(
-            TEST_VERSION_CURRENT, new MockAnalyzer())
+            TEST_VERSION_CURRENT, new MockAnalyzer(random))
             .setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy));
         // This will not commit: there are no changes
         // pending because we opened for "create":

Modified: lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestDirectoryReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestDirectoryReader.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestDirectoryReader.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestDirectoryReader.java Mon May  9 15:24:04 2011
@@ -44,8 +44,8 @@ public class TestDirectoryReader extends
     doc2 = new Document();
     DocHelper.setupDoc(doc1);
     DocHelper.setupDoc(doc2);
-    DocHelper.writeDoc(dir, doc1);
-    DocHelper.writeDoc(dir, doc2);
+    DocHelper.writeDoc(random, dir, doc1);
+    DocHelper.writeDoc(random, dir, doc2);
     sis = new SegmentInfos();
     sis.read(dir);
   }
@@ -199,7 +199,7 @@ public class TestDirectoryReader extends
   private void addDoc(Random random, Directory ramDir1, String s, boolean create) throws IOException {
     IndexWriter iw = new IndexWriter(ramDir1, newIndexWriterConfig( 
         TEST_VERSION_CURRENT, 
-        new MockAnalyzer()).setOpenMode(
+        new MockAnalyzer(random)).setOpenMode(
         create ? OpenMode.CREATE : OpenMode.APPEND));
     Document doc = new Document();
     doc.add(newField("body", s, Field.Store.YES, Field.Index.ANALYZED));

Modified: lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestDoc.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestDoc.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestDoc.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestDoc.java Mon May  9 15:24:04 2011
@@ -36,6 +36,7 @@ import org.apache.lucene.index.IndexWrit
 import org.apache.lucene.search.DocIdSetIterator;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util._TestUtil;
 import org.apache.lucene.index.codecs.CodecProvider;
 
 
@@ -60,10 +61,10 @@ public class TestDoc extends LuceneTestC
         if (VERBOSE) {
           System.out.println("TEST: setUp");
         }
-        workDir = new File(TEMP_DIR,"TestDoc");
+        workDir = _TestUtil.getTempDir("TestDoc");
         workDir.mkdirs();
 
-        indexDir = new File(workDir, "testIndex");
+        indexDir = _TestUtil.getTempDir("testIndex");
         indexDir.mkdirs();
 
         Directory directory = newFSDirectory(indexDir);
@@ -114,7 +115,7 @@ public class TestDoc extends LuceneTestC
       Directory directory = newFSDirectory(indexDir);
       IndexWriter writer = new IndexWriter(
           directory,
-          newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
+          newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
               setOpenMode(OpenMode.CREATE).
               setMaxBufferedDocs(-1).
               setMergePolicy(newLogMergePolicy(10))
@@ -148,7 +149,7 @@ public class TestDoc extends LuceneTestC
       directory = newFSDirectory(indexDir);
       writer = new IndexWriter(
           directory,
-          newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
+          newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
               setOpenMode(OpenMode.CREATE).
               setMaxBufferedDocs(-1).
               setMergePolicy(newLogMergePolicy(10))

Copied: lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestDocTermOrds.java (from r1088049, lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestDocTermOrds.java)
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestDocTermOrds.java?p2=lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestDocTermOrds.java&p1=lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestDocTermOrds.java&r1=1088049&r2=1101062&rev=1101062&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestDocTermOrds.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestDocTermOrds.java Mon May  9 15:24:04 2011
@@ -60,7 +60,7 @@ public class TestDocTermOrds extends Luc
 
   public void testSimple() throws Exception {
     Directory dir = newDirectory();
-    final RandomIndexWriter w = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newInOrderLogMergePolicy()));
+    final RandomIndexWriter w = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
     Document doc = new Document();
     Field field = newField("field", "", Field.Index.ANALYZED);
     doc.add(field);
@@ -228,7 +228,7 @@ public class TestDocTermOrds extends Luc
     
     final int NUM_DOCS = 1000 * RANDOM_MULTIPLIER;
 
-    IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer());
+    IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
 
     // Sometimes swap in codec that impls ord():
     if (random.nextInt(10) == 7) {
@@ -331,7 +331,7 @@ public class TestDocTermOrds extends Luc
     
     final int NUM_DOCS = 1000 * RANDOM_MULTIPLIER;
 
-    IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer());
+    IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
 
     // Sometimes swap in codec that impls ord():
     if (random.nextInt(10) == 7) {

Modified: lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestDocsAndPositions.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestDocsAndPositions.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestDocsAndPositions.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestDocsAndPositions.java Mon May  9 15:24:04 2011
@@ -21,7 +21,6 @@ import java.util.ArrayList;
 import java.util.Arrays;
 
 import org.apache.lucene.analysis.MockAnalyzer;
-import org.apache.lucene.analysis.MockTokenizer;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
 import org.apache.lucene.index.IndexReader.AtomicReaderContext;
@@ -34,13 +33,11 @@ import org.apache.lucene.util.ReaderUtil
 
 public class TestDocsAndPositions extends LuceneTestCase {
   private String fieldName;
-  private boolean usePayload;
 
   @Override
   public void setUp() throws Exception {
     super.setUp();
     fieldName = "field" + random.nextInt();
-    usePayload = random.nextBoolean();
   }
 
   /**
@@ -49,8 +46,7 @@ public class TestDocsAndPositions extend
   public void testPositionsSimple() throws IOException {
     Directory directory = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random, directory,
-        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(
-            MockTokenizer.WHITESPACE, true, usePayload)));
+        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
     for (int i = 0; i < 39; i++) {
       Document doc = new Document();
       doc.add(newField(fieldName, "1 2 3 4 5 6 7 8 9 10 "
@@ -75,7 +71,7 @@ public class TestDocsAndPositions extend
         final int advance = docsAndPosEnum.advance(random.nextInt(atomicReaderContext.reader.maxDoc()));
         do {
           String msg = "Advanced to: " + advance + " current doc: "
-              + docsAndPosEnum.docID() + " usePayloads: " + usePayload;
+              + docsAndPosEnum.docID(); // TODO: + " usePayloads: " + usePayload;
           assertEquals(msg, 4, docsAndPosEnum.freq());
           assertEquals(msg, 0, docsAndPosEnum.nextPosition());
           assertEquals(msg, 4, docsAndPosEnum.freq());
@@ -134,8 +130,7 @@ public class TestDocsAndPositions extend
   public void testRandomPositions() throws IOException {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random, dir,
-        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(
-            MockTokenizer.WHITESPACE, true, usePayload)).setMergePolicy(newInOrderLogMergePolicy()));
+        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
     int numDocs = 131;
     int max = 1051;
     int term = random.nextInt(max);
@@ -195,8 +190,8 @@ public class TestDocsAndPositions extend
           for (int j = 0; j < howMany; j++) {
             assertEquals("iteration: " + i + " initDoc: " + initDoc + " doc: "
                 + docID + " base: " + atomicReaderContext.docBase
-                + " positions: " + Arrays.toString(pos) + " usePayloads: "
-                + usePayload, pos[j].intValue(), docsAndPosEnum.nextPosition());
+                + " positions: " + Arrays.toString(pos) /* TODO: + " usePayloads: "
+                + usePayload*/, pos[j].intValue(), docsAndPosEnum.nextPosition());
           }
 
           if (random.nextInt(10) == 0) { // once is a while advance
@@ -215,8 +210,7 @@ public class TestDocsAndPositions extend
   public void testRandomDocs() throws IOException {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random, dir,
-        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(
-                                                                    MockTokenizer.WHITESPACE, true, usePayload)).setMergePolicy(newInOrderLogMergePolicy()));
+        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
     int numDocs = 499;
     int max = 15678;
     int term = random.nextInt(max);
@@ -294,8 +288,7 @@ public class TestDocsAndPositions extend
   public void testLargeNumberOfPositions() throws IOException {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random, dir,
-        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(
-            MockTokenizer.WHITESPACE, true, usePayload)));
+        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
     int howMany = 1000;
     for (int i = 0; i < 39; i++) {
       Document doc = new Document();
@@ -334,8 +327,7 @@ public class TestDocsAndPositions extend
         } else {
           initDoc = docsAndPosEnum.advance(random.nextInt(maxDoc));
         }
-        String msg = "Iteration: " + i + " initDoc: " + initDoc + " payloads: "
-            + usePayload;
+        String msg = "Iteration: " + i + " initDoc: " + initDoc; // TODO: + " payloads: " + usePayload;
         assertEquals(howMany / 2, docsAndPosEnum.freq());
         for (int j = 0; j < howMany; j += 2) {
           assertEquals("position missmatch index: " + j + " with freq: "

Modified: lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestDocumentWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestDocumentWriter.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestDocumentWriter.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestDocumentWriter.java Mon May  9 15:24:04 2011
@@ -62,7 +62,7 @@ public class TestDocumentWriter extends 
   public void testAddDocument() throws Exception {
     Document testDoc = new Document();
     DocHelper.setupDoc(testDoc);
-    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
     writer.addDocument(testDoc);
     writer.commit();
     SegmentInfo info = writer.newestSegment();
@@ -211,7 +211,7 @@ public class TestDocumentWriter extends 
 
   public void testPreAnalyzedField() throws IOException {
     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
-        TEST_VERSION_CURRENT, new MockAnalyzer()));
+        TEST_VERSION_CURRENT, new MockAnalyzer(random)));
     Document doc = new Document();
     
     doc.add(new Field("preanalyzed", new TokenStream() {
@@ -271,7 +271,7 @@ public class TestDocumentWriter extends 
     doc.add(newField("f2", "v2", Store.YES, Index.NOT_ANALYZED, TermVector.NO));
 
     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
-        TEST_VERSION_CURRENT, new MockAnalyzer()));
+        TEST_VERSION_CURRENT, new MockAnalyzer(random)));
     writer.addDocument(doc);
     writer.close();
 
@@ -306,7 +306,7 @@ public class TestDocumentWriter extends 
     doc.add(newField("f2", "v2", Store.YES, Index.NO));
 
     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
-        TEST_VERSION_CURRENT, new MockAnalyzer()));
+        TEST_VERSION_CURRENT, new MockAnalyzer(random)));
     writer.addDocument(doc);
     writer.optimize(); // be sure to have a single segment
     writer.close();

Modified: lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestFieldsReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestFieldsReader.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestFieldsReader.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestFieldsReader.java Mon May  9 15:24:04 2011
@@ -24,12 +24,14 @@ import java.util.*;
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
+import org.apache.lucene.document.NumericField;
 import org.apache.lucene.document.FieldSelector;
 import org.apache.lucene.document.FieldSelectorResult;
 import org.apache.lucene.document.Fieldable;
 import org.apache.lucene.document.LoadFirstFieldSelector;
 import org.apache.lucene.document.SetBasedFieldSelector;
 import org.apache.lucene.index.IndexWriterConfig.OpenMode;
+import org.apache.lucene.search.FieldCache;
 import org.apache.lucene.store.AlreadyClosedException;
 import org.apache.lucene.store.BufferedIndexInput;
 import org.apache.lucene.store.Directory;
@@ -51,7 +53,7 @@ public class TestFieldsReader extends Lu
     DocHelper.setupDoc(testDoc);
     _TestUtil.add(testDoc, fieldInfos);
     dir = newDirectory();
-    IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy());
+    IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy());
     ((LogMergePolicy) conf.getMergePolicy()).setUseCompoundFile(false);
     IndexWriter writer = new IndexWriter(dir, conf);
     writer.addDocument(testDoc);
@@ -286,12 +288,11 @@ public class TestFieldsReader extends Lu
    */
   public void testLazyPerformance() throws Exception {
     String userName = System.getProperty("user.name");
-    File file = new File(TEMP_DIR, "lazyDir" + userName);
-    _TestUtil.rmDir(file);
+    File file = _TestUtil.getTempDir("lazyDir" + userName);
     Directory tmpDir = newFSDirectory(file);
     assertTrue(tmpDir != null);
 
-    IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE).setMergePolicy(newLogMergePolicy());
+    IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE).setMergePolicy(newLogMergePolicy());
     ((LogMergePolicy) conf.getMergePolicy()).setUseCompoundFile(false);
     IndexWriter writer = new IndexWriter(tmpDir, conf);
     writer.addDocument(testDoc);
@@ -473,12 +474,12 @@ public class TestFieldsReader extends Lu
 
   // LUCENE-1262
   public void testExceptions() throws Throwable {
-    File indexDir = new File(TEMP_DIR, "testfieldswriterexceptions");
+    File indexDir = _TestUtil.getTempDir("testfieldswriterexceptions");
 
     try {
       Directory dir = new FaultyFSDirectory(indexDir);
       IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( 
-          TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
+          TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
       for(int i=0;i<2;i++)
         writer.addDocument(testDoc);
       writer.optimize();
@@ -512,4 +513,69 @@ public class TestFieldsReader extends Lu
     }
 
   }
+  
+  public void testNumericField() throws Exception {
+    Directory dir = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random, dir);
+    final int numDocs = _TestUtil.nextInt(random, 500, 1000) * RANDOM_MULTIPLIER;
+    final Number[] answers = new Number[numDocs];
+    final NumericField.DataType[] typeAnswers = new NumericField.DataType[numDocs];
+    for(int id=0;id<numDocs;id++) {
+      Document doc = new Document();
+      NumericField nf = new NumericField("nf", Field.Store.YES, false);
+      doc.add(nf);
+      final Number answer;
+      final NumericField.DataType typeAnswer;
+      if (random.nextBoolean()) {
+        // float/double
+        if (random.nextBoolean()) {
+          final float f = random.nextFloat();
+          nf.setFloatValue(f);
+          answer = Float.valueOf(f);
+          typeAnswer = NumericField.DataType.FLOAT;
+        } else {
+          final double d = random.nextDouble();
+          nf.setDoubleValue(d);
+          answer = Double.valueOf(d);
+          typeAnswer = NumericField.DataType.DOUBLE;
+        }
+      } else {
+        // int/long
+        if (random.nextBoolean()) {
+          final int i = random.nextInt();
+          nf.setIntValue(i);
+          answer = Integer.valueOf(i);
+          typeAnswer = NumericField.DataType.INT;
+        } else {
+          final long l = random.nextLong();
+          nf.setLongValue(l);
+          answer = Long.valueOf(l);
+          typeAnswer = NumericField.DataType.LONG;
+        }
+      }
+      answers[id] = answer;
+      typeAnswers[id] = typeAnswer;
+      doc.add(new NumericField("id", Integer.MAX_VALUE, Field.Store.NO, true).setIntValue(id));
+      w.addDocument(doc);
+    }
+    final IndexReader r = w.getReader();
+    w.close();
+    
+    assertEquals(numDocs, r.numDocs());
+
+    for(IndexReader sub : r.getSequentialSubReaders()) {
+      final int[] ids = FieldCache.DEFAULT.getInts(sub, "id");
+      for(int docID=0;docID<sub.numDocs();docID++) {
+        final Document doc = sub.document(docID);
+        final Fieldable f = doc.getFieldable("nf");
+        assertTrue("got f=" + f, f instanceof NumericField);
+        final NumericField nf = (NumericField) f;
+        assertEquals(answers[ids[docID]], nf.getNumericValue());
+        assertSame(typeAnswers[ids[docID]], nf.getDataType());
+      }
+    }
+    r.close();
+    dir.close();
+  }
+  
 }

Modified: lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestFilterIndexReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestFilterIndexReader.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestFilterIndexReader.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestFilterIndexReader.java Mon May  9 15:24:04 2011
@@ -126,7 +126,7 @@ public class TestFilterIndexReader exten
    */
   public void testFilterIndexReader() throws Exception {
     Directory directory = newDirectory();
-    IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
 
     Document d1 = new Document();
     d1.add(newField("default","one two", Field.Store.YES, Field.Index.ANALYZED));
@@ -143,7 +143,7 @@ public class TestFilterIndexReader exten
     writer.close();
 
     Directory target = newDirectory();
-    writer = new IndexWriter(target, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    writer = new IndexWriter(target, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
     IndexReader reader = new TestReader(IndexReader.open(directory, true));
     writer.addIndexes(reader);
     writer.close();

Modified: lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestFlex.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestFlex.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestFlex.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestFlex.java Mon May  9 15:24:04 2011
@@ -32,7 +32,7 @@ public class TestFlex extends LuceneTest
 
     IndexWriter w = new IndexWriter(
         d,
-        new IndexWriterConfig(Version.LUCENE_31, new MockAnalyzer()).
+        new IndexWriterConfig(Version.LUCENE_31, new MockAnalyzer(random)).
             setMaxBufferedDocs(7)
     );
 
@@ -64,7 +64,7 @@ public class TestFlex extends LuceneTest
   public void testTermOrd() throws Exception {
     Directory d = newDirectory();
     IndexWriter w = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
-                                                             new MockAnalyzer()).setCodecProvider(_TestUtil.alwaysCodec("Standard")));
+                                                             new MockAnalyzer(random)).setCodecProvider(_TestUtil.alwaysCodec("Standard")));
     Document doc = new Document();
     doc.add(newField("f", "a b c", Field.Store.NO, Field.Index.ANALYZED));
     w.addDocument(doc);

Modified: lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestGlobalFieldNumbers.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestGlobalFieldNumbers.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestGlobalFieldNumbers.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestGlobalFieldNumbers.java Mon May  9 15:24:04 2011
@@ -46,7 +46,7 @@ public class TestGlobalFieldNumbers exte
       Directory dir = newDirectory();
       {
         IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT,
-            new MockAnalyzer());
+            new MockAnalyzer(random));
         IndexWriter writer = new IndexWriter(dir, config);
         Document d = new Document();
         d.add(new Field("f1", "d1 first field", Store.YES, Index.ANALYZED,
@@ -83,7 +83,7 @@ public class TestGlobalFieldNumbers exte
 
       {
         IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
-            TEST_VERSION_CURRENT, new MockAnalyzer()));
+            TEST_VERSION_CURRENT, new MockAnalyzer(random)));
         Document d = new Document();
         d.add(new Field("f1", "d3 first field", Store.YES, Index.ANALYZED,
             TermVector.NO));
@@ -102,7 +102,7 @@ public class TestGlobalFieldNumbers exte
       }
 
       IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
-          TEST_VERSION_CURRENT, new MockAnalyzer()));
+          TEST_VERSION_CURRENT, new MockAnalyzer(random)));
       writer.optimize();
       assertFalse(" field numbers got mixed up", writer.anyNonBulkMerges);
       writer.close();
@@ -117,7 +117,7 @@ public class TestGlobalFieldNumbers exte
       Directory dir = newDirectory();
       {
         IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT,
-            new MockAnalyzer());
+            new MockAnalyzer(random));
         IndexWriter writer = new IndexWriter(dir, config);
         Document d = new Document();
         d.add(new Field("f1", "d1 first field", Store.YES, Index.ANALYZED,
@@ -145,7 +145,7 @@ public class TestGlobalFieldNumbers exte
       assertFNXFiles(dir, "2.fnx");
 
       IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
-          TEST_VERSION_CURRENT, new MockAnalyzer()));
+          TEST_VERSION_CURRENT, new MockAnalyzer(random)));
       writer.optimize();
       assertFalse(" field numbers got mixed up", writer.anyNonBulkMerges);
       writer.close();
@@ -160,7 +160,7 @@ public class TestGlobalFieldNumbers exte
       Directory dir = newDirectory();
       {
         IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
-            TEST_VERSION_CURRENT, new MockAnalyzer()).setIndexDeletionPolicy(
+            TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexDeletionPolicy(
             new KeepAllDeletionPolicy()));
         Document d = new Document();
         d.add(new Field("f1", "d1 first field", Store.YES, Index.ANALYZED,
@@ -185,7 +185,7 @@ public class TestGlobalFieldNumbers exte
 
       {
         IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
-            TEST_VERSION_CURRENT, new MockAnalyzer()));
+            TEST_VERSION_CURRENT, new MockAnalyzer(random)));
         Document d = new Document();
         d.add(new Field("f1", "d3 first field", Store.YES, Index.ANALYZED,
             TermVector.NO));
@@ -197,7 +197,7 @@ public class TestGlobalFieldNumbers exte
         assertFNXFiles(dir, "2.fnx");
       }
       IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
-          TEST_VERSION_CURRENT, new MockAnalyzer()));
+          TEST_VERSION_CURRENT, new MockAnalyzer(random)));
       writer.optimize();
       assertFalse(" field numbers got mixed up", writer.anyNonBulkMerges);
       writer.close();
@@ -210,7 +210,7 @@ public class TestGlobalFieldNumbers exte
     for (int i = 0; i < 39; i++) {
       Directory dir = newDirectory();
       IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
-          TEST_VERSION_CURRENT, new MockAnalyzer()).setIndexDeletionPolicy(
+          TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexDeletionPolicy(
           new KeepAllDeletionPolicy()));
       Document d = new Document();
       d.add(new Field("f1", "d1 first field", Store.YES, Index.ANALYZED,
@@ -232,7 +232,7 @@ public class TestGlobalFieldNumbers exte
       List<IndexCommit> listCommits = IndexReader.listCommits(dir);
       assertEquals(2, listCommits.size());
       writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT,
-          new MockAnalyzer()).setIndexDeletionPolicy(
+          new MockAnalyzer(random)).setIndexDeletionPolicy(
           new KeepAllDeletionPolicy()).setIndexCommit(listCommits.get(0)));
 
       d = new Document();
@@ -247,7 +247,7 @@ public class TestGlobalFieldNumbers exte
       assertFNXFiles(dir, "1.fnx", "2.fnx", "3.fnx");
 
       writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT,
-          new MockAnalyzer()));
+          new MockAnalyzer(random)));
       writer.commit();
       listCommits = IndexReader.listCommits(dir);
       assertEquals(1, listCommits.size());
@@ -290,9 +290,9 @@ public class TestGlobalFieldNumbers exte
       }
       Directory base = buildRandomIndex(fieldNames.toArray(new String[0]),
           20 + random.nextInt(100),
-          newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+          newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
       IndexWriter writer = new IndexWriter(base, newIndexWriterConfig(
-          TEST_VERSION_CURRENT, new MockAnalyzer()));
+          TEST_VERSION_CURRENT, new MockAnalyzer(random)));
       FieldNumberBiMap globalFieldMap = writer.segmentInfos
           .getOrLoadGlobalFieldNumberMap(base);
       Set<Entry<String, Integer>> entries = globalFieldMap.entries();
@@ -315,7 +315,7 @@ public class TestGlobalFieldNumbers exte
 
       Directory base = newDirectory();
       IndexWriter writer = new IndexWriter(base, newIndexWriterConfig(
-          TEST_VERSION_CURRENT, new MockAnalyzer()));
+          TEST_VERSION_CURRENT, new MockAnalyzer(random)));
       Document doc = new Document();
       for (String string : fieldNames) {
         doc.add(newField(string,
@@ -339,9 +339,9 @@ public class TestGlobalFieldNumbers exte
       for (int j = 0; j < numIndexes; j++) {
         Directory toAdd = buildRandomIndex(fieldNames.toArray(new String[0]),
             1 + random.nextInt(50),
-            newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+            newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
         IndexWriter w = new IndexWriter(base, newIndexWriterConfig(
-            TEST_VERSION_CURRENT, new MockAnalyzer()));
+            TEST_VERSION_CURRENT, new MockAnalyzer(random)));
         if (random.nextBoolean()) {
           IndexReader open = IndexReader.open(toAdd);
           w.addIndexes(open);
@@ -357,7 +357,7 @@ public class TestGlobalFieldNumbers exte
         toAdd.close();
       }
       IndexWriter w = new IndexWriter(base, newIndexWriterConfig(
-          TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(
+          TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(
           new LogByteSizeMergePolicy()));
       w.optimize();
       w.close();
@@ -402,7 +402,7 @@ public class TestGlobalFieldNumbers exte
       }
       Directory base = newDirectory();
       IndexWriter writer = new IndexWriter(base, newIndexWriterConfig(
-          TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(
+          TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(
           NoMergePolicy.NO_COMPOUND_FILES));
 
       SortedMap<Integer, String> copySortedMap = new TreeMap<Integer, String>(
@@ -428,7 +428,7 @@ public class TestGlobalFieldNumbers exte
       writer.close();
 
       writer = new IndexWriter(base, newIndexWriterConfig(TEST_VERSION_CURRENT,
-          new MockAnalyzer()).setMergePolicy(NoMergePolicy.NO_COMPOUND_FILES));
+          new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.NO_COMPOUND_FILES));
       writer.commit(); // make sure the old index is the latest segment
       writer.close();
 
@@ -459,7 +459,7 @@ public class TestGlobalFieldNumbers exte
           .unzip(getDataFile("index." + oldNames[i] + ".zip"), oldIndxeDir);
       dir = newFSDirectory(oldIndxeDir);
       IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
-          TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(policy));
+          TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(policy));
       SegmentInfos segmentInfos = writer.segmentInfos;
       assertTrue(DefaultSegmentInfosWriter.FORMAT_4_0 < segmentInfos.getFormat());
       assertEquals(0, segmentInfos.getGlobalFieldMapVersion());

Modified: lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestIndexFileDeleter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestIndexFileDeleter.java?rev=1101062&r1=1101061&r2=1101062&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestIndexFileDeleter.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestIndexFileDeleter.java Mon May  9 15:24:04 2011
@@ -48,7 +48,7 @@ public class TestIndexFileDeleter extend
 
     IndexWriter writer = new IndexWriter(
         dir,
-        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
+        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
             setMaxBufferedDocs(10).
             setMergePolicy(mergePolicy)
     );
@@ -152,7 +152,7 @@ public class TestIndexFileDeleter extend
 
     // Open & close a writer: it should delete the above 4
     // files and nothing more:
-    writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
+    writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
     writer.close();
 
     String[] files2 = dir.listAll();