You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by dw...@apache.org on 2012/04/15 16:42:01 UTC

svn commit: r1326351 [4/22] - in /lucene/dev/trunk: ./ dev-tools/eclipse/ lucene/ lucene/contrib/highlighter/src/test/org/apache/lucene/search/highlight/ lucene/contrib/highlighter/src/test/org/apache/lucene/search/highlight/custom/ lucene/contrib/high...

Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestCompoundFile.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestCompoundFile.java?rev=1326351&r1=1326350&r2=1326351&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestCompoundFile.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestCompoundFile.java Sun Apr 15 14:41:44 2012
@@ -54,7 +54,7 @@ public class TestCompoundFile extends Lu
     private void createRandomFile(Directory dir, String name, int size)
     throws IOException
     {
-        IndexOutput os = dir.createOutput(name, newIOContext(random));
+        IndexOutput os = dir.createOutput(name, newIOContext(random()));
         for (int i=0; i<size; i++) {
             byte b = (byte) (Math.random() * 256);
             os.writeByte(b);
@@ -72,7 +72,7 @@ public class TestCompoundFile extends Lu
                                     int size)
     throws IOException
     {
-        IndexOutput os = dir.createOutput(name, newIOContext(random));
+        IndexOutput os = dir.createOutput(name, newIOContext(random()));
         for (int i=0; i < size; i++) {
             os.writeByte(start);
             start ++;
@@ -182,13 +182,13 @@ public class TestCompoundFile extends Lu
         for (int i=0; i<data.length; i++) {
             String name = "t" + data[i];
             createSequenceFile(dir, name, (byte) 0, data[i]);
-            CompoundFileDirectory csw = new CompoundFileDirectory(dir, name + ".cfs", newIOContext(random), true);
-            dir.copy(csw, name, name, newIOContext(random));
+            CompoundFileDirectory csw = new CompoundFileDirectory(dir, name + ".cfs", newIOContext(random()), true);
+            dir.copy(csw, name, name, newIOContext(random()));
             csw.close();
 
-            CompoundFileDirectory csr = new CompoundFileDirectory(dir, name + ".cfs", newIOContext(random), false);
-            IndexInput expected = dir.openInput(name, newIOContext(random));
-            IndexInput actual = csr.openInput(name, newIOContext(random));
+            CompoundFileDirectory csr = new CompoundFileDirectory(dir, name + ".cfs", newIOContext(random()), false);
+            IndexInput expected = dir.openInput(name, newIOContext(random()));
+            IndexInput actual = csr.openInput(name, newIOContext(random()));
             assertSameStreams(name, expected, actual);
             assertSameSeekBehavior(name, expected, actual);
             expected.close();
@@ -205,21 +205,21 @@ public class TestCompoundFile extends Lu
         createSequenceFile(dir, "d1", (byte) 0, 15);
         createSequenceFile(dir, "d2", (byte) 0, 114);
 
-        CompoundFileDirectory csw = new CompoundFileDirectory(dir, "d.cfs", newIOContext(random), true);
-        dir.copy(csw, "d1", "d1", newIOContext(random));
-        dir.copy(csw, "d2", "d2", newIOContext(random));
+        CompoundFileDirectory csw = new CompoundFileDirectory(dir, "d.cfs", newIOContext(random()), true);
+        dir.copy(csw, "d1", "d1", newIOContext(random()));
+        dir.copy(csw, "d2", "d2", newIOContext(random()));
         csw.close();
 
-        CompoundFileDirectory csr = new CompoundFileDirectory(dir, "d.cfs", newIOContext(random), false);
-        IndexInput expected = dir.openInput("d1", newIOContext(random));
-        IndexInput actual = csr.openInput("d1", newIOContext(random));
+        CompoundFileDirectory csr = new CompoundFileDirectory(dir, "d.cfs", newIOContext(random()), false);
+        IndexInput expected = dir.openInput("d1", newIOContext(random()));
+        IndexInput actual = csr.openInput("d1", newIOContext(random()));
         assertSameStreams("d1", expected, actual);
         assertSameSeekBehavior("d1", expected, actual);
         expected.close();
         actual.close();
 
-        expected = dir.openInput("d2", newIOContext(random));
-        actual = csr.openInput("d2", newIOContext(random));
+        expected = dir.openInput("d2", newIOContext(random()));
+        actual = csr.openInput("d2", newIOContext(random()));
         assertSameStreams("d2", expected, actual);
         assertSameSeekBehavior("d2", expected, actual);
         expected.close();
@@ -255,21 +255,21 @@ public class TestCompoundFile extends Lu
         createRandomFile(dir, segment + ".notIn2", 51);
 
         // Now test
-        CompoundFileDirectory csw = new CompoundFileDirectory(dir, "test.cfs", newIOContext(random), true);
+        CompoundFileDirectory csw = new CompoundFileDirectory(dir, "test.cfs", newIOContext(random()), true);
         final String data[] = new String[] {
             ".zero", ".one", ".ten", ".hundred", ".big1", ".big2", ".big3",
             ".big4", ".big5", ".big6", ".big7"
         };
         for (int i=0; i<data.length; i++) {
             String fileName = segment + data[i];
-            dir.copy(csw, fileName, fileName, newIOContext(random));
+            dir.copy(csw, fileName, fileName, newIOContext(random()));
         }
         csw.close();
 
-        CompoundFileDirectory csr = new CompoundFileDirectory(dir, "test.cfs", newIOContext(random), false);
+        CompoundFileDirectory csr = new CompoundFileDirectory(dir, "test.cfs", newIOContext(random()), false);
         for (int i=0; i<data.length; i++) {
-            IndexInput check = dir.openInput(segment + data[i], newIOContext(random));
-            IndexInput test = csr.openInput(segment + data[i], newIOContext(random));
+            IndexInput check = dir.openInput(segment + data[i], newIOContext(random()));
+            IndexInput test = csr.openInput(segment + data[i], newIOContext(random()));
             assertSameStreams(data[i], check, test);
             assertSameSeekBehavior(data[i], check, test);
             test.close();
@@ -285,11 +285,11 @@ public class TestCompoundFile extends Lu
      *  the size of each file is 1000 bytes.
      */
     private void setUp_2() throws IOException {
-        CompoundFileDirectory cw = new CompoundFileDirectory(dir, "f.comp", newIOContext(random), true);
+        CompoundFileDirectory cw = new CompoundFileDirectory(dir, "f.comp", newIOContext(random()), true);
         for (int i=0; i<20; i++) {
             createSequenceFile(dir, "f" + i, (byte) 0, 2000);
             String fileName = "f" + i;
-            dir.copy(cw, fileName, fileName, newIOContext(random));
+            dir.copy(cw, fileName, fileName, newIOContext(random()));
         }
         cw.close();
     }
@@ -336,16 +336,16 @@ public class TestCompoundFile extends Lu
 
     public void testClonedStreamsClosing() throws IOException {
         setUp_2();
-        CompoundFileDirectory cr = new CompoundFileDirectory(dir, "f.comp", newIOContext(random), false);
+        CompoundFileDirectory cr = new CompoundFileDirectory(dir, "f.comp", newIOContext(random()), false);
 
         // basic clone
-        IndexInput expected = dir.openInput("f11", newIOContext(random));
+        IndexInput expected = dir.openInput("f11", newIOContext(random()));
 
         // this test only works for FSIndexInput
         assertTrue(_TestHelper.isSimpleFSIndexInput(expected));
         assertTrue(_TestHelper.isSimpleFSIndexInputOpen(expected));
 
-        IndexInput one = cr.openInput("f11", newIOContext(random));
+        IndexInput one = cr.openInput("f11", newIOContext(random()));
 
         IndexInput two = (IndexInput) one.clone();
 
@@ -388,14 +388,14 @@ public class TestCompoundFile extends Lu
      */
     public void testRandomAccess() throws IOException {
         setUp_2();
-        CompoundFileDirectory cr = new CompoundFileDirectory(dir, "f.comp", newIOContext(random), false);
+        CompoundFileDirectory cr = new CompoundFileDirectory(dir, "f.comp", newIOContext(random()), false);
 
         // Open two files
-        IndexInput e1 = dir.openInput("f11", newIOContext(random));
-        IndexInput e2 = dir.openInput("f3", newIOContext(random));
+        IndexInput e1 = dir.openInput("f11", newIOContext(random()));
+        IndexInput e2 = dir.openInput("f3", newIOContext(random()));
 
-        IndexInput a1 = cr.openInput("f11", newIOContext(random));
-        IndexInput a2 = dir.openInput("f3", newIOContext(random));
+        IndexInput a1 = cr.openInput("f11", newIOContext(random()));
+        IndexInput a2 = dir.openInput("f3", newIOContext(random()));
 
         // Seek the first pair
         e1.seek(100);
@@ -467,11 +467,11 @@ public class TestCompoundFile extends Lu
      */
     public void testRandomAccessClones() throws IOException {
         setUp_2();
-        CompoundFileDirectory cr = new CompoundFileDirectory(dir, "f.comp", newIOContext(random), false);
+        CompoundFileDirectory cr = new CompoundFileDirectory(dir, "f.comp", newIOContext(random()), false);
 
         // Open two files
-        IndexInput e1 = cr.openInput("f11", newIOContext(random));
-        IndexInput e2 = cr.openInput("f3", newIOContext(random));
+        IndexInput e1 = cr.openInput("f11", newIOContext(random()));
+        IndexInput e2 = cr.openInput("f3", newIOContext(random()));
 
         IndexInput a1 = (IndexInput) e1.clone();
         IndexInput a2 = (IndexInput) e2.clone();
@@ -544,11 +544,11 @@ public class TestCompoundFile extends Lu
 
     public void testFileNotFound() throws IOException {
         setUp_2();
-        CompoundFileDirectory cr = new CompoundFileDirectory(dir, "f.comp", newIOContext(random), false);
+        CompoundFileDirectory cr = new CompoundFileDirectory(dir, "f.comp", newIOContext(random()), false);
 
         // Open two files
         try {
-            cr.openInput("bogus", newIOContext(random));
+            cr.openInput("bogus", newIOContext(random()));
             fail("File not found");
 
         } catch (IOException e) {
@@ -562,8 +562,8 @@ public class TestCompoundFile extends Lu
 
     public void testReadPastEOF() throws IOException {
         setUp_2();
-        CompoundFileDirectory cr = new CompoundFileDirectory(dir, "f.comp", newIOContext(random), false);
-        IndexInput is = cr.openInput("f2", newIOContext(random));
+        CompoundFileDirectory cr = new CompoundFileDirectory(dir, "f.comp", newIOContext(random()), false);
+        IndexInput is = cr.openInput("f2", newIOContext(random()));
         is.seek(is.length() - 10);
         byte b[] = new byte[100];
         is.readBytes(b, 0, 10);
@@ -593,7 +593,7 @@ public class TestCompoundFile extends Lu
      * will correctly increment the file pointer.
      */
     public void testLargeWrites() throws IOException {
-        IndexOutput os = dir.createOutput("testBufferStart.txt", newIOContext(random));
+        IndexOutput os = dir.createOutput("testBufferStart.txt", newIOContext(random()));
 
         byte[] largeBuf = new byte[2048];
         for (int i=0; i<largeBuf.length; i++) {
@@ -615,13 +615,13 @@ public class TestCompoundFile extends Lu
        createSequenceFile(dir, "d1", (byte) 0, 15);
 
        Directory newDir = newDirectory();
-       CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random), true);
-       dir.copy(csw, "d1", "d1", newIOContext(random));
+       CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random()), true);
+       dir.copy(csw, "d1", "d1", newIOContext(random()));
        csw.close();
 
-       CompoundFileDirectory csr = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random), false);
-       IndexInput expected = dir.openInput("d1", newIOContext(random));
-       IndexInput actual = csr.openInput("d1", newIOContext(random));
+       CompoundFileDirectory csr = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random()), false);
+       IndexInput expected = dir.openInput("d1", newIOContext(random()));
+       IndexInput actual = csr.openInput("d1", newIOContext(random()));
        assertSameStreams("d1", expected, actual);
        assertSameSeekBehavior("d1", expected, actual);
        expected.close();
@@ -634,10 +634,10 @@ public class TestCompoundFile extends Lu
    
   public void testAppend() throws IOException {
     Directory newDir = newDirectory();
-    CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random), true);
-    int size = 5 + random.nextInt(128);
+    CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random()), true);
+    int size = 5 + random().nextInt(128);
     for (int j = 0; j < 2; j++) {
-      IndexOutput os = csw.createOutput("seg_" + j + "_foo.txt", newIOContext(random));
+      IndexOutput os = csw.createOutput("seg_" + j + "_foo.txt", newIOContext(random()));
       for (int i = 0; i < size; i++) {
         os.writeInt(i*j);
       }
@@ -647,14 +647,14 @@ public class TestCompoundFile extends Lu
       assertEquals("d.cfs", listAll[0]);
     }
     createSequenceFile(dir, "d1", (byte) 0, 15);
-    dir.copy(csw, "d1", "d1", newIOContext(random));
+    dir.copy(csw, "d1", "d1", newIOContext(random()));
     String[] listAll = newDir.listAll();
     assertEquals(1, listAll.length);
     assertEquals("d.cfs", listAll[0]);
     csw.close();
-    CompoundFileDirectory csr = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random), false);
+    CompoundFileDirectory csr = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random()), false);
     for (int j = 0; j < 2; j++) {
-      IndexInput openInput = csr.openInput("seg_" + j + "_foo.txt", newIOContext(random));
+      IndexInput openInput = csr.openInput("seg_" + j + "_foo.txt", newIOContext(random()));
       assertEquals(size * 4, openInput.length());
       for (int i = 0; i < size; i++) {
         assertEquals(i*j, openInput.readInt());
@@ -663,8 +663,8 @@ public class TestCompoundFile extends Lu
       openInput.close();
 
     }
-    IndexInput expected = dir.openInput("d1", newIOContext(random));
-    IndexInput actual = csr.openInput("d1", newIOContext(random));
+    IndexInput expected = dir.openInput("d1", newIOContext(random()));
+    IndexInput actual = csr.openInput("d1", newIOContext(random()));
     assertSameStreams("d1", expected, actual);
     assertSameSeekBehavior("d1", expected, actual);
     expected.close();
@@ -675,12 +675,12 @@ public class TestCompoundFile extends Lu
   
   public void testAppendTwice() throws IOException {
     Directory newDir = newDirectory();
-    CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random), true);
+    CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random()), true);
     createSequenceFile(newDir, "d1", (byte) 0, 15);
-    IndexOutput out = csw.createOutput("d.xyz", newIOContext(random));
+    IndexOutput out = csw.createOutput("d.xyz", newIOContext(random()));
     out.writeInt(0);
     try {
-      newDir.copy(csw, "d1", "d1", newIOContext(random));
+      newDir.copy(csw, "d1", "d1", newIOContext(random()));
       fail("file does already exist");
     } catch (IllegalArgumentException e) {
       //
@@ -691,7 +691,7 @@ public class TestCompoundFile extends Lu
    
     csw.close();
 
-    CompoundFileDirectory cfr = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random), false);
+    CompoundFileDirectory cfr = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random()), false);
     assertEquals(1, cfr.listAll().length);
     assertEquals("d.xyz", cfr.listAll()[0]);
     cfr.close();
@@ -700,10 +700,10 @@ public class TestCompoundFile extends Lu
   
   public void testEmptyCFS() throws IOException {
     Directory newDir = newDirectory();
-    CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random), true);
+    CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random()), true);
     csw.close();
 
-    CompoundFileDirectory csr = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random), false);
+    CompoundFileDirectory csr = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random()), false);
     assertEquals(0, csr.listAll().length);
     csr.close();
 
@@ -712,32 +712,32 @@ public class TestCompoundFile extends Lu
   
   public void testReadNestedCFP() throws IOException {
     Directory newDir = newDirectory();
-    CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random), true);
-    CompoundFileDirectory nested = new CompoundFileDirectory(newDir, "b.cfs", newIOContext(random), true);
-    IndexOutput out = nested.createOutput("b.xyz", newIOContext(random));
-    IndexOutput out1 = nested.createOutput("b_1.xyz", newIOContext(random));
+    CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random()), true);
+    CompoundFileDirectory nested = new CompoundFileDirectory(newDir, "b.cfs", newIOContext(random()), true);
+    IndexOutput out = nested.createOutput("b.xyz", newIOContext(random()));
+    IndexOutput out1 = nested.createOutput("b_1.xyz", newIOContext(random()));
     out.writeInt(0);
     out1.writeInt(1);
     out.close();
     out1.close();
     nested.close();
-    newDir.copy(csw, "b.cfs", "b.cfs", newIOContext(random));
-    newDir.copy(csw, "b.cfe", "b.cfe", newIOContext(random));
+    newDir.copy(csw, "b.cfs", "b.cfs", newIOContext(random()));
+    newDir.copy(csw, "b.cfe", "b.cfe", newIOContext(random()));
     newDir.deleteFile("b.cfs");
     newDir.deleteFile("b.cfe");
     csw.close();
     
     assertEquals(2, newDir.listAll().length);
-    csw = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random), false);
+    csw = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random()), false);
     
     assertEquals(2, csw.listAll().length);
-    nested = new CompoundFileDirectory(csw, "b.cfs", newIOContext(random), false);
+    nested = new CompoundFileDirectory(csw, "b.cfs", newIOContext(random()), false);
     
     assertEquals(2, nested.listAll().length);
-    IndexInput openInput = nested.openInput("b.xyz", newIOContext(random));
+    IndexInput openInput = nested.openInput("b.xyz", newIOContext(random()));
     assertEquals(0, openInput.readInt());
     openInput.close();
-    openInput = nested.openInput("b_1.xyz", newIOContext(random));
+    openInput = nested.openInput("b_1.xyz", newIOContext(random()));
     assertEquals(1, openInput.readInt());
     openInput.close();
     nested.close();
@@ -747,8 +747,8 @@ public class TestCompoundFile extends Lu
   
   public void testDoubleClose() throws IOException {
     Directory newDir = newDirectory();
-    CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random), true);
-    IndexOutput out = csw.createOutput("d.xyz", newIOContext(random));
+    CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random()), true);
+    IndexOutput out = csw.createOutput("d.xyz", newIOContext(random()));
     out.writeInt(0);
     out.close();
     
@@ -756,8 +756,8 @@ public class TestCompoundFile extends Lu
     // close a second time - must have no effect according to Closeable
     csw.close();
     
-    csw = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random), false);
-    IndexInput openInput = csw.openInput("d.xyz", newIOContext(random));
+    csw = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random()), false);
+    IndexInput openInput = csw.openInput("d.xyz", newIOContext(random()));
     assertEquals(0, openInput.readInt());
     openInput.close();
     csw.close();
@@ -776,22 +776,22 @@ public class TestCompoundFile extends Lu
     final int FILE_COUNT = atLeast(500);
 
     for(int fileIdx=0;fileIdx<FILE_COUNT;fileIdx++) {
-      IndexOutput out = d.createOutput("file." + fileIdx, newIOContext(random));
+      IndexOutput out = d.createOutput("file." + fileIdx, newIOContext(random()));
       out.writeByte((byte) fileIdx);
       out.close();
     }
     
-    final CompoundFileDirectory cfd = new CompoundFileDirectory(d, "c.cfs", newIOContext(random), true);
+    final CompoundFileDirectory cfd = new CompoundFileDirectory(d, "c.cfs", newIOContext(random()), true);
     for(int fileIdx=0;fileIdx<FILE_COUNT;fileIdx++) {
       final String fileName = "file." + fileIdx;
-      d.copy(cfd, fileName, fileName, newIOContext(random));
+      d.copy(cfd, fileName, fileName, newIOContext(random()));
     }
     cfd.close();
 
     final IndexInput[] ins = new IndexInput[FILE_COUNT];
-    final CompoundFileDirectory cfr = new CompoundFileDirectory(d, "c.cfs", newIOContext(random), false);
+    final CompoundFileDirectory cfr = new CompoundFileDirectory(d, "c.cfs", newIOContext(random()), false);
     for(int fileIdx=0;fileIdx<FILE_COUNT;fileIdx++) {
-      ins[fileIdx] = cfr.openInput("file." + fileIdx, newIOContext(random));
+      ins[fileIdx] = cfr.openInput("file." + fileIdx, newIOContext(random()));
     }
 
     for(int fileIdx=0;fileIdx<FILE_COUNT;fileIdx++) {

Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java?rev=1326351&r1=1326350&r2=1326351&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java Sun Apr 15 14:41:44 2012
@@ -58,7 +58,7 @@ public class TestConcurrentMergeSchedule
             isClose = true;
           }
         }
-        if (isDoFlush && !isClose && random.nextBoolean()) {
+        if (isDoFlush && !isClose && random().nextBoolean()) {
           hitExc = true;
           throw new IOException(Thread.currentThread().getName() + ": now failing during flush");
         }
@@ -73,7 +73,7 @@ public class TestConcurrentMergeSchedule
     FailOnlyOnFlush failure = new FailOnlyOnFlush();
     directory.failOn(failure);
 
-    IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2));
+    IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2));
     Document doc = new Document();
     Field idField = newField("id", "", StringField.TYPE_STORED);
     doc.add(idField);
@@ -130,7 +130,7 @@ public class TestConcurrentMergeSchedule
     // start:
     mp.setMinMergeDocs(1000);
     IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(
-        TEST_VERSION_CURRENT, new MockAnalyzer(random))
+        TEST_VERSION_CURRENT, new MockAnalyzer(random()))
         .setMergePolicy(mp));
 
     Document doc = new Document();
@@ -168,7 +168,7 @@ public class TestConcurrentMergeSchedule
   public void testNoExtraFiles() throws IOException {
     MockDirectoryWrapper directory = newDirectory();
     IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(
-        TEST_VERSION_CURRENT, new MockAnalyzer(random))
+        TEST_VERSION_CURRENT, new MockAnalyzer(random()))
         .setMaxBufferedDocs(2));
 
     for(int iter=0;iter<7;iter++) {
@@ -187,7 +187,7 @@ public class TestConcurrentMergeSchedule
 
       // Reopen
       writer = new IndexWriter(directory, newIndexWriterConfig(
-          TEST_VERSION_CURRENT, new MockAnalyzer(random))
+          TEST_VERSION_CURRENT, new MockAnalyzer(random()))
           .setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(2));
     }
 
@@ -204,7 +204,7 @@ public class TestConcurrentMergeSchedule
 
     IndexWriter writer = new IndexWriter(
         directory,
-        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
+        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
             setMaxBufferedDocs(2).
             setMergePolicy(newLogMergePolicy(100))
     );
@@ -237,7 +237,7 @@ public class TestConcurrentMergeSchedule
       // Reopen
       writer = new IndexWriter(
           directory,
-          newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
+          newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
               setOpenMode(OpenMode.APPEND).
               setMergePolicy(newLogMergePolicy(100))
       );

Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestConsistentFieldNumbers.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestConsistentFieldNumbers.java?rev=1326351&r1=1326350&r2=1326351&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestConsistentFieldNumbers.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestConsistentFieldNumbers.java Sun Apr 15 14:41:44 2012
@@ -37,7 +37,7 @@ public class TestConsistentFieldNumbers 
   public void testSameFieldNumbersAcrossSegments() throws Exception {
     for (int i = 0; i < 2; i++) {
       Directory dir = newDirectory();
-      IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
+      IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
 
       Document d1 = new Document();
       d1.add(new Field("f1", "first field", StringField.TYPE_STORED));
@@ -46,7 +46,7 @@ public class TestConsistentFieldNumbers 
 
       if (i == 1) {
         writer.close();
-        writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
+        writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
       } else {
         writer.commit();
       }
@@ -76,7 +76,7 @@ public class TestConsistentFieldNumbers 
       assertEquals("f3", fis2.fieldInfo(2).name);
       assertEquals("f4", fis2.fieldInfo(3).name);
 
-      writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
+      writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
       writer.forceMerge(1);
       writer.close();
 
@@ -100,7 +100,7 @@ public class TestConsistentFieldNumbers 
   public void testAddIndexes() throws Exception {
     Directory dir1 = newDirectory();
     Directory dir2 = newDirectory();
-    IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
+    IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
 
     Document d1 = new Document();
     d1.add(new Field("f1", "first field", TextField.TYPE_STORED));
@@ -108,7 +108,7 @@ public class TestConsistentFieldNumbers 
     writer.addDocument(d1);
 
     writer.close();
-    writer = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
+    writer = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
 
     Document d2 = new Document();
     FieldType customType2 = new FieldType(TextField.TYPE_STORED);
@@ -121,7 +121,7 @@ public class TestConsistentFieldNumbers 
 
     writer.close();
 
-    writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
+    writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
     writer.addIndexes(dir2);
     writer.close();
 
@@ -140,7 +140,7 @@ public class TestConsistentFieldNumbers 
     assertEquals("f3", fis2.fieldInfo(2).name);
     assertEquals("f4", fis2.fieldInfo(3).name);
 
-    writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
+    writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
     writer.forceMerge(1);
     writer.close();
 
@@ -166,7 +166,7 @@ public class TestConsistentFieldNumbers 
       Directory dir = newDirectory();
       {
         IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
-            TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(
+            TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(
             NoMergePolicy.NO_COMPOUND_FILES));
         Document d = new Document();
         d.add(new Field("f1", "d1 first field", TextField.TYPE_STORED));
@@ -184,8 +184,8 @@ public class TestConsistentFieldNumbers 
 
       {
         IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
-            TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(
-            random.nextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES
+            TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(
+            random().nextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES
                 : NoMergePolicy.COMPOUND_FILES));
         Document d = new Document();
         d.add(new Field("f1", "d2 first field", TextField.TYPE_STORED));
@@ -206,8 +206,8 @@ public class TestConsistentFieldNumbers 
 
       {
         IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
-            TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(
-            random.nextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES
+            TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(
+            random().nextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES
                 : NoMergePolicy.COMPOUND_FILES));
         Document d = new Document();
         d.add(new Field("f1", "d3 first field", TextField.TYPE_STORED));
@@ -233,8 +233,8 @@ public class TestConsistentFieldNumbers 
 
       {
         IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
-            TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(
-            random.nextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES
+            TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(
+            random().nextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES
                 : NoMergePolicy.COMPOUND_FILES));
         writer.deleteDocuments(new Term("f1", "d1"));
         // nuke the first segment entirely so that the segment with gaps is
@@ -244,7 +244,7 @@ public class TestConsistentFieldNumbers 
       }
 
       IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
-          TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(
+          TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(
           new LogByteSizeMergePolicy()).setInfoStream(new FailOnNonBulkMergesInfoStream()));
       writer.forceMerge(1);
       writer.close();
@@ -268,12 +268,12 @@ public class TestConsistentFieldNumbers 
     int[][] docs = new int[NUM_DOCS][4];
     for (int i = 0; i < docs.length; i++) {
       for (int j = 0; j < docs[i].length;j++) {
-        docs[i][j] = random.nextInt(MAX_FIELDS);
+        docs[i][j] = random().nextInt(MAX_FIELDS);
       }
     }
 
     Directory dir = newDirectory();
-    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
+    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
 
     for (int i = 0; i < NUM_DOCS; i++) {
       Document d = new Document();

Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestCrash.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestCrash.java?rev=1326351&r1=1326350&r2=1326351&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestCrash.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestCrash.java Sun Apr 15 14:41:44 2012
@@ -65,7 +65,7 @@ public class TestCrash extends LuceneTes
     // This test relies on being able to open a reader before any commit
     // happened, so we must create an initial commit just to allow that, but
     // before any documents were added.
-    IndexWriter writer = initIndex(random, true);
+    IndexWriter writer = initIndex(random(), true);
     MockDirectoryWrapper dir = (MockDirectoryWrapper) writer.getDirectory();
     crash(writer);
     IndexReader reader = IndexReader.open(dir);
@@ -78,11 +78,11 @@ public class TestCrash extends LuceneTes
     // This test relies on being able to open a reader before any commit
     // happened, so we must create an initial commit just to allow that, but
     // before any documents were added.
-    IndexWriter writer = initIndex(random, true);
+    IndexWriter writer = initIndex(random(), true);
     MockDirectoryWrapper dir = (MockDirectoryWrapper) writer.getDirectory();
     dir.setPreventDoubleWrite(false);
     crash(writer);
-    writer = initIndex(random, dir, false);
+    writer = initIndex(random(), dir, false);
     writer.close();
 
     IndexReader reader = IndexReader.open(dir);
@@ -92,10 +92,10 @@ public class TestCrash extends LuceneTes
   }
 
   public void testCrashAfterReopen() throws IOException {
-    IndexWriter writer = initIndex(random, false);
+    IndexWriter writer = initIndex(random(), false);
     MockDirectoryWrapper dir = (MockDirectoryWrapper) writer.getDirectory();
     writer.close();
-    writer = initIndex(random, dir, false);
+    writer = initIndex(random(), dir, false);
     assertEquals(314, writer.maxDoc());
     crash(writer);
 
@@ -116,7 +116,7 @@ public class TestCrash extends LuceneTes
 
   public void testCrashAfterClose() throws IOException {
     
-    IndexWriter writer = initIndex(random, false);
+    IndexWriter writer = initIndex(random(), false);
     MockDirectoryWrapper dir = (MockDirectoryWrapper) writer.getDirectory();
 
     writer.close();
@@ -137,7 +137,7 @@ public class TestCrash extends LuceneTes
 
   public void testCrashAfterCloseNoWait() throws IOException {
     
-    IndexWriter writer = initIndex(random, false);
+    IndexWriter writer = initIndex(random(), false);
     MockDirectoryWrapper dir = (MockDirectoryWrapper) writer.getDirectory();
 
     writer.close(false);

Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestCrashCausesCorruptIndex.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestCrashCausesCorruptIndex.java?rev=1326351&r1=1326350&r2=1326351&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestCrashCausesCorruptIndex.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestCrashCausesCorruptIndex.java Sun Apr 15 14:41:44 2012
@@ -70,7 +70,7 @@ public class TestCrashCausesCorruptIndex
     // NOTE: cannot use RandomIndexWriter because it
     // sometimes commits:
     IndexWriter indexWriter = new IndexWriter(crashAfterCreateOutput,
-                                              newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
+                                              newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
             
     indexWriter.addDocument(getDocument());
     // writes segments_1:
@@ -103,7 +103,7 @@ public class TestCrashCausesCorruptIndex
     // it doesn't know what to do with the created but empty
     // segments_2 file
     IndexWriter indexWriter = new IndexWriter(realDirectory,
-                                              newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
+                                              newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
             
     // currently the test fails above.
     // however, to test the fix, the following lines should pass as well.

Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestCustomNorms.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestCustomNorms.java?rev=1326351&r1=1326350&r2=1326351&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestCustomNorms.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestCustomNorms.java Sun Apr 15 14:41:44 2012
@@ -57,15 +57,15 @@ public class TestCustomNorms extends Luc
     MockDirectoryWrapper dir = newDirectory();
     dir.setCheckIndexOnClose(false); // can't set sim to checkindex yet
     IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT,
-        new MockAnalyzer(random));
+        new MockAnalyzer(random()));
     Similarity provider = new MySimProvider();
     config.setSimilarity(provider);
-    RandomIndexWriter writer = new RandomIndexWriter(random, dir, config);
-    final LineFileDocs docs = new LineFileDocs(random);
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config);
+    final LineFileDocs docs = new LineFileDocs(random());
     int num = atLeast(100);
     for (int i = 0; i < num; i++) {
       Document doc = docs.nextDoc();
-      float nextFloat = random.nextFloat();
+      float nextFloat = random().nextFloat();
       Field f = new Field(floatTestField, "" + nextFloat, TextField.TYPE_STORED);
       f.setBoost(nextFloat);
 
@@ -98,16 +98,16 @@ public class TestCustomNorms extends Luc
     MockDirectoryWrapper dir = newDirectory();
     dir.setCheckIndexOnClose(false); // can't set sim to checkindex yet
     IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT,
-        new MockAnalyzer(random));
+        new MockAnalyzer(random()));
     Similarity provider = new MySimProvider();
     config.setSimilarity(provider);
-    RandomIndexWriter writer = new RandomIndexWriter(random, dir, config);
-    final LineFileDocs docs = new LineFileDocs(random);
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config);
+    final LineFileDocs docs = new LineFileDocs(random());
     int num = atLeast(100);
     try {
       for (int i = 0; i < num; i++) {
         Document doc = docs.nextDoc();
-        float nextFloat = random.nextFloat();
+        float nextFloat = random().nextFloat();
         Field f = new Field(exceptionTestField, "" + nextFloat,
             TextField.TYPE_STORED);
         f.setBoost(nextFloat);
@@ -142,7 +142,7 @@ public class TestCustomNorms extends Luc
       if (floatTestField.equals(field)) {
         return new FloatEncodingBoostSimilarity();
       } else if (exceptionTestField.equals(field)) {
-        return new RandomTypeSimilarity(random);
+        return new RandomTypeSimilarity(random());
       } else {
         return delegate;
       }

Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDeletionPolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDeletionPolicy.java?rev=1326351&r1=1326350&r2=1326351&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDeletionPolicy.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDeletionPolicy.java Sun Apr 15 14:41:44 2012
@@ -203,7 +203,7 @@ public class TestDeletionPolicy extends 
     Directory dir = newDirectory();
     ExpirationTimeDeletionPolicy policy = new ExpirationTimeDeletionPolicy(dir, SECONDS);
     IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
-        new MockAnalyzer(random))
+        new MockAnalyzer(random()))
         .setIndexDeletionPolicy(policy);
     MergePolicy mp = conf.getMergePolicy();
     if (mp instanceof LogMergePolicy) {
@@ -223,7 +223,7 @@ public class TestDeletionPolicy extends 
       // past commits
       lastDeleteTime = System.currentTimeMillis();
       conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
-          new MockAnalyzer(random)).setOpenMode(
+          new MockAnalyzer(random())).setOpenMode(
           OpenMode.APPEND).setIndexDeletionPolicy(policy);
       mp = conf.getMergePolicy();
       if (mp instanceof LogMergePolicy) {
@@ -310,7 +310,7 @@ public class TestDeletionPolicy extends 
       policy.dir = dir;
 
       IndexWriterConfig conf = newIndexWriterConfig(
-          TEST_VERSION_CURRENT, new MockAnalyzer(random))
+          TEST_VERSION_CURRENT, new MockAnalyzer(random()))
           .setIndexDeletionPolicy(policy).setMaxBufferedDocs(10)
           .setMergeScheduler(new SerialMergeScheduler());
       MergePolicy mp = conf.getMergePolicy();
@@ -331,7 +331,7 @@ public class TestDeletionPolicy extends 
       }
       if (needsMerging) {
         conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
-                                    new MockAnalyzer(random)).setOpenMode(
+                                    new MockAnalyzer(random())).setOpenMode(
                                                                     OpenMode.APPEND).setIndexDeletionPolicy(policy);
         mp = conf.getMergePolicy();
         if (mp instanceof LogMergePolicy) {
@@ -379,7 +379,7 @@ public class TestDeletionPolicy extends 
           int preCount = dir.listAll().length;
           writer = new IndexWriter(dir, newIndexWriterConfig(
               TEST_VERSION_CURRENT,
-              new MockAnalyzer(random)).setOpenMode(
+              new MockAnalyzer(random())).setOpenMode(
               OpenMode.APPEND).setIndexDeletionPolicy(policy));
           writer.close();
           int postCount = dir.listAll().length;
@@ -403,7 +403,7 @@ public class TestDeletionPolicy extends 
 
     IndexWriter writer = new IndexWriter(
         dir,
-        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
+        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
             setIndexDeletionPolicy(policy).
             setMaxBufferedDocs(2).
             setMergePolicy(newLogMergePolicy(10))
@@ -425,7 +425,7 @@ public class TestDeletionPolicy extends 
     assertTrue(lastCommit != null);
 
     // Now add 1 doc and merge
-    writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexDeletionPolicy(policy));
+    writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setIndexDeletionPolicy(policy));
     addDoc(writer);
     assertEquals(11, writer.numDocs());
     writer.forceMerge(1);
@@ -434,7 +434,7 @@ public class TestDeletionPolicy extends 
     assertEquals(6, DirectoryReader.listCommits(dir).size());
 
     // Now open writer on the commit just before merge:
-    writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
+    writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
         .setIndexDeletionPolicy(policy).setIndexCommit(lastCommit));
     assertEquals(10, writer.numDocs());
 
@@ -447,7 +447,7 @@ public class TestDeletionPolicy extends 
     assertEquals(11, r.numDocs());
     r.close();
 
-    writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
+    writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
         .setIndexDeletionPolicy(policy).setIndexCommit(lastCommit));
     assertEquals(10, writer.numDocs());
     // Commits the rollback:
@@ -464,7 +464,7 @@ public class TestDeletionPolicy extends 
     r.close();
 
     // Re-merge
-    writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexDeletionPolicy(policy));
+    writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setIndexDeletionPolicy(policy));
     writer.forceMerge(1);
     writer.close();
 
@@ -475,7 +475,7 @@ public class TestDeletionPolicy extends 
 
     // Now open writer on the commit just before merging,
     // but this time keeping only the last commit:
-    writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexCommit(lastCommit));
+    writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setIndexCommit(lastCommit));
     assertEquals(10, writer.numDocs());
     
     // Reader still sees fully merged index, because writer
@@ -511,7 +511,7 @@ public class TestDeletionPolicy extends 
       Directory dir = newDirectory();
 
       IndexWriterConfig conf = newIndexWriterConfig(
-          TEST_VERSION_CURRENT, new MockAnalyzer(random))
+          TEST_VERSION_CURRENT, new MockAnalyzer(random()))
           .setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy)
           .setMaxBufferedDocs(10);
       MergePolicy mp = conf.getMergePolicy();
@@ -524,7 +524,7 @@ public class TestDeletionPolicy extends 
       }
       writer.close();
 
-      conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
+      conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
           .setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy);
       mp = conf.getMergePolicy();
       if (mp instanceof LogMergePolicy) {
@@ -564,7 +564,7 @@ public class TestDeletionPolicy extends 
 
       for(int j=0;j<N+1;j++) {
         IndexWriterConfig conf = newIndexWriterConfig(
-            TEST_VERSION_CURRENT, new MockAnalyzer(random))
+            TEST_VERSION_CURRENT, new MockAnalyzer(random()))
             .setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy)
             .setMaxBufferedDocs(10);
         MergePolicy mp = conf.getMergePolicy();
@@ -625,7 +625,7 @@ public class TestDeletionPolicy extends 
 
       Directory dir = newDirectory();
       IndexWriterConfig conf = newIndexWriterConfig(
-          TEST_VERSION_CURRENT, new MockAnalyzer(random))
+          TEST_VERSION_CURRENT, new MockAnalyzer(random()))
           .setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy)
           .setMaxBufferedDocs(10);
       MergePolicy mp = conf.getMergePolicy();
@@ -640,7 +640,7 @@ public class TestDeletionPolicy extends 
       for(int i=0;i<N+1;i++) {
 
         conf = newIndexWriterConfig(
-            TEST_VERSION_CURRENT, new MockAnalyzer(random))
+            TEST_VERSION_CURRENT, new MockAnalyzer(random()))
             .setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy)
             .setMaxBufferedDocs(10);
         mp = conf.getMergePolicy();
@@ -653,7 +653,7 @@ public class TestDeletionPolicy extends 
         }
         // this is a commit
         writer.close();
-        conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
+        conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
           .setIndexDeletionPolicy(policy)
           .setMergePolicy(NoMergePolicy.COMPOUND_FILES);
         writer = new IndexWriter(dir, conf);
@@ -667,7 +667,7 @@ public class TestDeletionPolicy extends 
         reader.close();
 
         writer = new IndexWriter(dir, newIndexWriterConfig(
-            TEST_VERSION_CURRENT, new MockAnalyzer(random))
+            TEST_VERSION_CURRENT, new MockAnalyzer(random()))
             .setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy));
         // This will not commit: there are no changes
         // pending because we opened for "create":

Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReader.java?rev=1326351&r1=1326350&r2=1326351&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReader.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReader.java Sun Apr 15 14:41:44 2012
@@ -56,8 +56,8 @@ public class TestDirectoryReader extends
     Document doc2 = new Document();
     DocHelper.setupDoc(doc1);
     DocHelper.setupDoc(doc2);
-    DocHelper.writeDoc(random, dir, doc1);
-    DocHelper.writeDoc(random, dir, doc2);
+    DocHelper.writeDoc(random(), dir, doc1);
+    DocHelper.writeDoc(random(), dir, doc2);
     DirectoryReader reader = DirectoryReader.open(dir);
     assertTrue(reader != null);
     assertTrue(reader instanceof StandardDirectoryReader);
@@ -79,11 +79,11 @@ public class TestDirectoryReader extends
         
   public void testMultiTermDocs() throws IOException {
     Directory ramDir1=newDirectory();
-    addDoc(random, ramDir1, "test foo", true);
+    addDoc(random(), ramDir1, "test foo", true);
     Directory ramDir2=newDirectory();
-    addDoc(random, ramDir2, "test blah", true);
+    addDoc(random(), ramDir2, "test blah", true);
     Directory ramDir3=newDirectory();
-    addDoc(random, ramDir3, "test wow", true);
+    addDoc(random(), ramDir3, "test wow", true);
 
     IndexReader[] readers1 = new IndexReader[]{DirectoryReader.open(ramDir1), DirectoryReader.open(ramDir3)};
     IndexReader[] readers2 = new IndexReader[]{DirectoryReader.open(ramDir1), DirectoryReader.open(ramDir2), DirectoryReader.open(ramDir3)};
@@ -93,7 +93,7 @@ public class TestDirectoryReader extends
     // test mixing up TermDocs and TermEnums from different readers.
     TermsEnum te2 = MultiFields.getTerms(mr2, "body").iterator(null);
     te2.seekCeil(new BytesRef("wow"));
-    DocsEnum td = _TestUtil.docs(random, mr2,
+    DocsEnum td = _TestUtil.docs(random(), mr2,
                                  "body",
                                  te2.term(),
                                  MultiFields.getLiveDocs(mr2),
@@ -102,7 +102,7 @@ public class TestDirectoryReader extends
 
     TermsEnum te3 = MultiFields.getTerms(mr3, "body").iterator(null);
     te3.seekCeil(new BytesRef("wow"));
-    td = _TestUtil.docs(random, te3, MultiFields.getLiveDocs(mr3),
+    td = _TestUtil.docs(random(), te3, MultiFields.getLiveDocs(mr3),
                         td,
                         false);
     
@@ -139,7 +139,7 @@ public class TestDirectoryReader extends
   public void testIsCurrent() throws Exception {
     Directory d = newDirectory();
     IndexWriter writer = new IndexWriter(d, newIndexWriterConfig( 
-      TEST_VERSION_CURRENT, new MockAnalyzer(random)));
+      TEST_VERSION_CURRENT, new MockAnalyzer(random())));
     addDocumentWithFields(writer);
     writer.close();
     // set up reader:
@@ -147,13 +147,13 @@ public class TestDirectoryReader extends
     assertTrue(reader.isCurrent());
     // modify index by adding another document:
     writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
-        new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
+        new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
     addDocumentWithFields(writer);
     writer.close();
     assertFalse(reader.isCurrent());
     // re-create index:
     writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
-        new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
+        new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE));
     addDocumentWithFields(writer);
     writer.close();
     assertFalse(reader.isCurrent());
@@ -170,7 +170,7 @@ public class TestDirectoryReader extends
       // set up writer
       IndexWriter writer = new IndexWriter(
           d,
-          newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
+          newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
       );
 
       Document doc = new Document();
@@ -196,7 +196,7 @@ public class TestDirectoryReader extends
       // add more documents
       writer = new IndexWriter(
           d,
-          newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
+          newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
               setOpenMode(OpenMode.APPEND).
               setMergePolicy(newLogMergePolicy())
       );
@@ -314,7 +314,7 @@ public void testTermVectors() throws Exc
   // set up writer
   IndexWriter writer = new IndexWriter(
       d,
-      newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
+      newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
           setMergePolicy(newLogMergePolicy())
   );
   // want to get some more segments here
@@ -346,12 +346,12 @@ public void testTermVectors() throws Exc
   d.close();
 }
 
-static void assertTermDocsCount(String msg,
+void assertTermDocsCount(String msg,
                                    IndexReader reader,
                                    Term term,
                                    int expected)
   throws IOException {
-  DocsEnum tdocs = _TestUtil.docs(random, reader,
+  DocsEnum tdocs = _TestUtil.docs(random(), reader,
                                   term.field(),
                                   new BytesRef(term.text()),
                                   MultiFields.getLiveDocs(reader),
@@ -371,7 +371,7 @@ static void assertTermDocsCount(String m
       Directory dir = newDirectory();
       byte[] bin = new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9};
       
-      IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
+      IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
       
       for (int i = 0; i < 10; i++) {
         addDoc(writer, "document number " + (i + 1));
@@ -380,7 +380,7 @@ static void assertTermDocsCount(String m
         addDocumentWithTermVectorFields(writer);
       }
       writer.close();
-      writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
+      writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
       Document doc = new Document();
       doc.add(new StoredField("bin1", bin));
       doc.add(new TextField("junk", "junk text"));
@@ -402,7 +402,7 @@ static void assertTermDocsCount(String m
       // force merge
 
 
-      writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
+      writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
       writer.forceMerge(1);
       writer.close();
       reader = DirectoryReader.open(dir);
@@ -440,7 +440,7 @@ public void testFilesOpenClose() throws 
       // Create initial data set
       File dirFile = _TestUtil.getTempDir("TestIndexReader.testFilesOpenClose");
       Directory dir = newFSDirectory(dirFile);
-      IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
+      IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
       addDoc(writer, "test");
       writer.close();
       dir.close();
@@ -450,7 +450,7 @@ public void testFilesOpenClose() throws 
       dir = newFSDirectory(dirFile);
 
       // Now create the data set again, just as before
-      writer  = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
+      writer  = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE));
       addDoc(writer, "test");
       writer.close();
       dir.close();
@@ -654,7 +654,7 @@ public void testFilesOpenClose() throws 
     // set up writer
     IndexWriter writer = new IndexWriter(
         d,
-        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
+        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
             setMaxBufferedDocs(2).
             setMergePolicy(newLogMergePolicy(10))
     );
@@ -674,7 +674,7 @@ public void testFilesOpenClose() throws 
     // Change the index
     writer = new IndexWriter(
         d,
-        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
+        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
             setOpenMode(OpenMode.APPEND).
             setMaxBufferedDocs(2).
             setMergePolicy(newLogMergePolicy(10))
@@ -690,7 +690,7 @@ public void testFilesOpenClose() throws 
     r2.close();
 
     writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
-      new MockAnalyzer(random))
+      new MockAnalyzer(random()))
       .setOpenMode(OpenMode.APPEND));
     writer.forceMerge(1);
     writer.close();
@@ -735,7 +735,7 @@ public void testFilesOpenClose() throws 
     Directory dir = newDirectory();
     
     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
-        TEST_VERSION_CURRENT, new MockAnalyzer(random))
+        TEST_VERSION_CURRENT, new MockAnalyzer(random()))
         .setMaxBufferedDocs(2));
     writer.addDocument(createDocument("a"));
     writer.addDocument(createDocument("a"));
@@ -762,7 +762,7 @@ public void testFilesOpenClose() throws 
     Directory dir = newDirectory();
     IndexWriter writer = new IndexWriter(
         dir,
-        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
+        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
             setMergePolicy(newLogMergePolicy(10))
     );
     Document doc = new Document();
@@ -797,7 +797,7 @@ public void testFilesOpenClose() throws 
   // LUCENE-1586: getUniqueTermCount
   public void testUniqueTermCount() throws Exception {
     Directory dir = newDirectory();
-    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
+    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
     Document doc = new Document();
     doc.add(newField("field", "a b c d e f g h i j k l m n o p q r s t u v w x y z", TextField.TYPE_UNSTORED));
     doc.add(newField("number", "0 1 2 3 4 5 6 7 8 9", TextField.TYPE_UNSTORED));
@@ -826,7 +826,7 @@ public void testFilesOpenClose() throws 
   // LUCENE-1609: don't load terms index
   public void testNoTermsIndex() throws Throwable {
     Directory dir = newDirectory();
-    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setCodec(_TestUtil.alwaysPostingsFormat(new Lucene40PostingsFormat())));
+    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setCodec(_TestUtil.alwaysPostingsFormat(new Lucene40PostingsFormat())));
     Document doc = new Document();
     doc.add(newField("field", "a b c d e f g h i j k l m n o p q r s t u v w x y z", TextField.TYPE_UNSTORED));
     doc.add(newField("number", "0 1 2 3 4 5 6 7 8 9", TextField.TYPE_UNSTORED));
@@ -845,7 +845,7 @@ public void testFilesOpenClose() throws 
     assertEquals(-1, ((SegmentReader) r.getSequentialSubReaders()[0]).getTermInfosIndexDivisor());
     writer = new IndexWriter(
         dir,
-        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
+        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
             setCodec(_TestUtil.alwaysPostingsFormat(new Lucene40PostingsFormat())).
             setMergePolicy(newLogMergePolicy(10))
     );
@@ -875,7 +875,7 @@ public void testFilesOpenClose() throws 
   public void testPrepareCommitIsCurrent() throws Throwable {
     Directory dir = newDirectory();
     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( 
-        TEST_VERSION_CURRENT, new MockAnalyzer(random)));
+        TEST_VERSION_CURRENT, new MockAnalyzer(random())));
     writer.commit();
     Document doc = new Document();
     writer.addDocument(doc);
@@ -920,7 +920,7 @@ public void testFilesOpenClose() throws 
   // LUCENE-2812
   public void testIndexExists() throws Exception {
     Directory dir = newDirectory();
-    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
+    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
     writer.addDocument(new Document());
     writer.prepareCommit();
     assertFalse(DirectoryReader.indexExists(dir));
@@ -934,7 +934,7 @@ public void testFilesOpenClose() throws 
   // dict cache
   public void testTotalTermFreqCached() throws Exception {
     Directory dir = newDirectory();
-    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
+    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
     Document d = new Document();
     d.add(newField("f", "a a b", TextField.TYPE_UNSTORED));
     writer.addDocument(d);
@@ -955,7 +955,7 @@ public void testFilesOpenClose() throws 
   // LUCENE-2474
   public void testReaderFinishedListener() throws Exception {
     Directory dir = newDirectory();
-    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
+    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
     ((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(3);
     writer.addDocument(new Document());
     writer.commit();
@@ -988,7 +988,7 @@ public void testFilesOpenClose() throws 
   
   public void testOOBDocID() throws Exception {
     Directory dir = newDirectory();
-    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
+    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
     writer.addDocument(new Document());
     DirectoryReader r = writer.getReader();
     writer.close();
@@ -1005,7 +1005,7 @@ public void testFilesOpenClose() throws 
   
   public void testTryIncRef() throws CorruptIndexException, LockObtainFailedException, IOException {
     Directory dir = newDirectory();
-    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
+    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
     writer.addDocument(new Document());
     writer.commit();
     DirectoryReader r = DirectoryReader.open(dir);
@@ -1019,7 +1019,7 @@ public void testFilesOpenClose() throws 
   
   public void testStressTryIncRef() throws CorruptIndexException, LockObtainFailedException, IOException, InterruptedException {
     Directory dir = newDirectory();
-    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
+    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
     writer.addDocument(new Document());
     writer.commit();
     DirectoryReader r = DirectoryReader.open(dir);
@@ -1027,7 +1027,7 @@ public void testFilesOpenClose() throws 
     
     IncThread[] threads = new IncThread[numThreads];
     for (int i = 0; i < threads.length; i++) {
-      threads[i] = new IncThread(r, random);
+      threads[i] = new IncThread(r, random());
       threads[i].start();
     }
     Thread.sleep(100);
@@ -1071,7 +1071,7 @@ public void testFilesOpenClose() throws 
   
   public void testLoadCertainFields() throws Exception {
     Directory dir = newDirectory();
-    RandomIndexWriter writer = new RandomIndexWriter(random, dir);
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     Document doc = new Document();
     doc.add(newField("field1", "foobar", StringField.TYPE_STORED));
     doc.add(newField("field2", "foobaz", StringField.TYPE_STORED));

Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReaderReopen.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReaderReopen.java?rev=1326351&r1=1326350&r2=1326351&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReaderReopen.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReaderReopen.java Sun Apr 15 14:41:44 2012
@@ -35,14 +35,10 @@ import org.apache.lucene.document.FieldT
 import org.apache.lucene.document.StringField;
 import org.apache.lucene.document.TextField;
 import org.apache.lucene.index.IndexWriterConfig.OpenMode;
-import org.apache.lucene.search.FieldCache;
 import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.search.ScoreDoc;
 import org.apache.lucene.search.TermQuery;
-import org.apache.lucene.search.similarities.DefaultSimilarity;
-import org.apache.lucene.store.AlreadyClosedException;
 import org.apache.lucene.store.Directory;
-import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util._TestUtil;
 
@@ -51,7 +47,7 @@ public class TestDirectoryReaderReopen e
   public void testReopen() throws Exception {
     final Directory dir1 = newDirectory();
     
-    createIndex(random, dir1, false);
+    createIndex(random(), dir1, false);
     performDefaultTests(new TestReopen() {
 
       @Override
@@ -69,7 +65,7 @@ public class TestDirectoryReaderReopen e
     
     final Directory dir2 = newDirectory();
     
-    createIndex(random, dir2, true);
+    createIndex(random(), dir2, true);
     performDefaultTests(new TestReopen() {
 
       @Override
@@ -93,12 +89,12 @@ public class TestDirectoryReaderReopen e
   // try this once with reopen once recreate, on both RAMDir and FSDir.
   public void testCommitReopen () throws IOException {
     Directory dir = newDirectory();
-    doTestReopenWithCommit(random, dir, true);
+    doTestReopenWithCommit(random(), dir, true);
     dir.close();
   }
   public void testCommitRecreate () throws IOException {
     Directory dir = newDirectory();
-    doTestReopenWithCommit(random, dir, false);
+    doTestReopenWithCommit(random(), dir, false);
     dir.close();
   }
 
@@ -222,9 +218,9 @@ public class TestDirectoryReaderReopen e
   public void testThreadSafety() throws Exception {
     final Directory dir = newDirectory();
     // NOTE: this also controls the number of threads!
-    final int n = _TestUtil.nextInt(random, 20, 40);
+    final int n = _TestUtil.nextInt(random(), 20, 40);
     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
-        TEST_VERSION_CURRENT, new MockAnalyzer(random)));
+        TEST_VERSION_CURRENT, new MockAnalyzer(random())));
     for (int i = 0; i < n; i++) {
       writer.addDocument(createDocument(i, 3));
     }
@@ -235,7 +231,7 @@ public class TestDirectoryReaderReopen e
       @Override
       protected void modifyIndex(int i) throws IOException {
        IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
-         TEST_VERSION_CURRENT, new MockAnalyzer(random)));
+         TEST_VERSION_CURRENT, new MockAnalyzer(random())));
        modifier.addDocument(createDocument(n + i, 6));
        modifier.close();
       }
@@ -249,7 +245,6 @@ public class TestDirectoryReaderReopen e
     final List<ReaderCouple> readers = Collections.synchronizedList(new ArrayList<ReaderCouple>());
     DirectoryReader firstReader = DirectoryReader.open(dir);
     DirectoryReader reader = firstReader;
-    final Random rnd = random;
     
     ReaderThread[] threads = new ReaderThread[n];
     final Set<DirectoryReader> readersToClose = Collections.synchronizedSet(new HashSet<DirectoryReader>());
@@ -273,6 +268,7 @@ public class TestDirectoryReaderReopen e
           
           @Override
           public void run() throws Exception {
+            Random rnd = LuceneTestCase.random();
             while (!stopped) {
               if (index % 2 == 0) {
                 // refresh reader synchronized
@@ -301,7 +297,7 @@ public class TestDirectoryReaderReopen e
                 }
               }
               synchronized(this) {
-                wait(_TestUtil.nextInt(random, 1, 100));
+                wait(_TestUtil.nextInt(random(), 1, 100));
               }
             }
           }
@@ -311,6 +307,7 @@ public class TestDirectoryReaderReopen e
         task = new ReaderThreadTask() {
           @Override
           public void run() throws Exception {
+            Random rnd = LuceneTestCase.random();
             while (!stopped) {
               int numReaders = readers.size();
               if (numReaders > 0) {
@@ -319,7 +316,7 @@ public class TestDirectoryReaderReopen e
               }
               
               synchronized(this) {
-                wait(_TestUtil.nextInt(random, 1, 100));
+                wait(_TestUtil.nextInt(random(), 1, 100));
               }
             }
           }
@@ -507,20 +504,20 @@ public class TestDirectoryReaderReopen e
         if (VERBOSE) {
           System.out.println("TEST: modify index");
         }
-        IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
+        IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
         w.deleteDocuments(new Term("field2", "a11"));
         w.deleteDocuments(new Term("field2", "b30"));
         w.close();
         break;
       }
       case 1: {
-        IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
+        IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
         w.forceMerge(1);
         w.close();
         break;
       }
       case 2: {
-        IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
+        IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
         w.addDocument(createDocument(101, 4));
         w.forceMerge(1);
         w.addDocument(createDocument(102, 4));
@@ -529,7 +526,7 @@ public class TestDirectoryReaderReopen e
         break;
       }
       case 3: {
-        IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
+        IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
         w.addDocument(createDocument(101, 4));
         w.close();
         break;
@@ -587,7 +584,7 @@ public class TestDirectoryReaderReopen e
     Directory dir = newDirectory();
     IndexWriter writer = new IndexWriter(
         dir,
-        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
+        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
             setIndexDeletionPolicy(new KeepAllCommits()).
             setMaxBufferedDocs(-1).
             setMergePolicy(newLogMergePolicy(10))

Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDoc.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDoc.java?rev=1326351&r1=1326350&r2=1326351&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDoc.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDoc.java Sun Apr 15 14:41:44 2012
@@ -110,7 +110,7 @@ public class TestDoc extends LuceneTestC
       Directory directory = newFSDirectory(indexDir, null);
       IndexWriter writer = new IndexWriter(
           directory,
-          newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
+          newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
               setOpenMode(OpenMode.CREATE).
               setMaxBufferedDocs(-1).
               setMergePolicy(newLogMergePolicy(10))
@@ -145,7 +145,7 @@ public class TestDoc extends LuceneTestC
       directory = newFSDirectory(indexDir, null);
       writer = new IndexWriter(
           directory,
-          newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
+          newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
               setOpenMode(OpenMode.CREATE).
               setMaxBufferedDocs(-1).
               setMergePolicy(newLogMergePolicy(10))
@@ -189,7 +189,7 @@ public class TestDoc extends LuceneTestC
 
    private SegmentInfo merge(Directory dir, SegmentInfo si1, SegmentInfo si2, String merged, boolean useCompoundFile)
    throws Exception {
-      IOContext context = newIOContext(random);
+      IOContext context = newIOContext(random());
       SegmentReader r1 = new SegmentReader(si1, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, context);
       SegmentReader r2 = new SegmentReader(si2, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, context);
 
@@ -206,7 +206,7 @@ public class TestDoc extends LuceneTestC
                                                false, codec, fieldInfos);
       
       if (useCompoundFile) {
-        Collection<String> filesToDelete = IndexWriter.createCompoundFile(dir, merged + ".cfs", MergeState.CheckAbort.NONE, info, newIOContext(random));
+        Collection<String> filesToDelete = IndexWriter.createCompoundFile(dir, merged + ".cfs", MergeState.CheckAbort.NONE, info, newIOContext(random()));
         info.setUseCompoundFile(true);
         for (final String fileToDelete : filesToDelete) 
           si1.dir.deleteFile(fileToDelete);
@@ -218,7 +218,7 @@ public class TestDoc extends LuceneTestC
 
    private void printSegment(PrintWriter out, SegmentInfo si)
    throws Exception {
-      SegmentReader reader = new SegmentReader(si, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random));
+      SegmentReader reader = new SegmentReader(si, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random()));
 
       for (int i = 0; i < reader.numDocs(); i++)
         out.println(reader.document(i));

Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDocCount.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDocCount.java?rev=1326351&r1=1326350&r2=1326351&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDocCount.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDocCount.java Sun Apr 15 14:41:44 2012
@@ -34,7 +34,7 @@ public class TestDocCount extends Lucene
     assumeFalse("PreFlex codec does not support docCount statistic!", 
         "Lucene3x".equals(Codec.getDefault().getName()));
     Directory dir = newDirectory();
-    RandomIndexWriter iw = new RandomIndexWriter(random, dir);
+    RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
     int numDocs = atLeast(100);
     for (int i = 0; i < numDocs; i++) {
       iw.addDocument(doc());
@@ -52,9 +52,9 @@ public class TestDocCount extends Lucene
   
   private Document doc() {
     Document doc = new Document();
-    int numFields = _TestUtil.nextInt(random, 1, 10);
+    int numFields = _TestUtil.nextInt(random(), 1, 10);
     for (int i = 0; i < numFields; i++) {
-      doc.add(newField("" + _TestUtil.nextInt(random, 'a', 'z'), "" + _TestUtil.nextInt(random, 'a', 'z'), StringField.TYPE_UNSTORED));
+      doc.add(newField("" + _TestUtil.nextInt(random(), 'a', 'z'), "" + _TestUtil.nextInt(random(), 'a', 'z'), StringField.TYPE_UNSTORED));
     }
     return doc;
   }
@@ -75,7 +75,7 @@ public class TestDocCount extends Lucene
       FixedBitSet visited = new FixedBitSet(ir.maxDoc());
       TermsEnum te = terms.iterator(null);
       while (te.next() != null) {
-        DocsEnum de = _TestUtil.docs(random, te, null, null, false);
+        DocsEnum de = _TestUtil.docs(random(), te, null, null, false);
         while (de.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
           visited.set(de.docID());
         }

Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDocTermOrds.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDocTermOrds.java?rev=1326351&r1=1326350&r2=1326351&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDocTermOrds.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDocTermOrds.java Sun Apr 15 14:41:44 2012
@@ -50,7 +50,7 @@ public class TestDocTermOrds extends Luc
 
   public void testSimple() throws Exception {
     Directory dir = newDirectory();
-    final RandomIndexWriter w = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
+    final RandomIndexWriter w = new RandomIndexWriter(random(), dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
     Document doc = new Document();
     Field field = newField("field", "", TextField.TYPE_UNSTORED);
     doc.add(field);
@@ -96,7 +96,7 @@ public class TestDocTermOrds extends Luc
     final int NUM_TERMS = atLeast(20);
     final Set<BytesRef> terms = new HashSet<BytesRef>();
     while(terms.size() < NUM_TERMS) {
-      final String s = _TestUtil.randomRealisticUnicodeString(random);
+      final String s = _TestUtil.randomRealisticUnicodeString(random());
       //final String s = _TestUtil.randomSimpleString(random);
       if (s.length() > 0) {
         terms.add(new BytesRef(s));
@@ -107,16 +107,16 @@ public class TestDocTermOrds extends Luc
     
     final int NUM_DOCS = atLeast(100);
 
-    IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
+    IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
 
     // Sometimes swap in codec that impls ord():
-    if (random.nextInt(10) == 7) {
+    if (random().nextInt(10) == 7) {
       // Make sure terms index has ords:
       Codec codec = _TestUtil.alwaysPostingsFormat(PostingsFormat.forName("Lucene40WithOrds"));
       conf.setCodec(codec);
     }
     
-    final RandomIndexWriter w = new RandomIndexWriter(random, dir, conf);
+    final RandomIndexWriter w = new RandomIndexWriter(random(), dir, conf);
 
     final int[][] idToOrds = new int[NUM_DOCS][];
     final Set<Integer> ordsForDocSet = new HashSet<Integer>();
@@ -126,9 +126,9 @@ public class TestDocTermOrds extends Luc
 
       doc.add(new IntField("id", id));
       
-      final int termCount = _TestUtil.nextInt(random, 0, 20*RANDOM_MULTIPLIER);
+      final int termCount = _TestUtil.nextInt(random(), 0, 20*RANDOM_MULTIPLIER);
       while(ordsForDocSet.size() < termCount) {
-        ordsForDocSet.add(random.nextInt(termsArray.length));
+        ordsForDocSet.add(random().nextInt(termsArray.length));
       }
       final int[] ordsForDoc = new int[termCount];
       int upto = 0;
@@ -181,12 +181,12 @@ public class TestDocTermOrds extends Luc
     MockDirectoryWrapper dir = newDirectory();
 
     final Set<String> prefixes = new HashSet<String>();
-    final int numPrefix = _TestUtil.nextInt(random, 2, 7);
+    final int numPrefix = _TestUtil.nextInt(random(), 2, 7);
     if (VERBOSE) {
       System.out.println("TEST: use " + numPrefix + " prefixes");
     }
     while(prefixes.size() < numPrefix) {
-      prefixes.add(_TestUtil.randomRealisticUnicodeString(random));
+      prefixes.add(_TestUtil.randomRealisticUnicodeString(random()));
       //prefixes.add(_TestUtil.randomSimpleString(random));
     }
     final String[] prefixesArray = prefixes.toArray(new String[prefixes.size()]);
@@ -194,7 +194,7 @@ public class TestDocTermOrds extends Luc
     final int NUM_TERMS = atLeast(20);
     final Set<BytesRef> terms = new HashSet<BytesRef>();
     while(terms.size() < NUM_TERMS) {
-      final String s = prefixesArray[random.nextInt(prefixesArray.length)] + _TestUtil.randomRealisticUnicodeString(random);
+      final String s = prefixesArray[random().nextInt(prefixesArray.length)] + _TestUtil.randomRealisticUnicodeString(random());
       //final String s = prefixesArray[random.nextInt(prefixesArray.length)] + _TestUtil.randomSimpleString(random);
       if (s.length() > 0) {
         terms.add(new BytesRef(s));
@@ -205,15 +205,15 @@ public class TestDocTermOrds extends Luc
     
     final int NUM_DOCS = atLeast(100);
 
-    IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
+    IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
 
     // Sometimes swap in codec that impls ord():
-    if (random.nextInt(10) == 7) {
+    if (random().nextInt(10) == 7) {
       Codec codec = _TestUtil.alwaysPostingsFormat(PostingsFormat.forName("Lucene40WithOrds"));
       conf.setCodec(codec);
     }
     
-    final RandomIndexWriter w = new RandomIndexWriter(random, dir, conf);
+    final RandomIndexWriter w = new RandomIndexWriter(random(), dir, conf);
 
     final int[][] idToOrds = new int[NUM_DOCS][];
     final Set<Integer> ordsForDocSet = new HashSet<Integer>();
@@ -223,9 +223,9 @@ public class TestDocTermOrds extends Luc
 
       doc.add(new IntField("id", id));
       
-      final int termCount = _TestUtil.nextInt(random, 0, 20*RANDOM_MULTIPLIER);
+      final int termCount = _TestUtil.nextInt(random(), 0, 20*RANDOM_MULTIPLIER);
       while(ordsForDocSet.size() < termCount) {
-        ordsForDocSet.add(random.nextInt(termsArray.length));
+        ordsForDocSet.add(random().nextInt(termsArray.length));
       }
       final int[] ordsForDoc = new int[termCount];
       int upto = 0;
@@ -302,7 +302,7 @@ public class TestDocTermOrds extends Luc
                                             "field",
                                             prefixRef,
                                             Integer.MAX_VALUE,
-                                            _TestUtil.nextInt(random, 2, 10));
+                                            _TestUtil.nextInt(random(), 2, 10));
                                             
 
     final int[] docIDToID = FieldCache.DEFAULT.getInts(r, "id", false);