You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by si...@apache.org on 2011/04/13 13:22:31 UTC

svn commit: r1091748 [2/8] - in /lucene/dev/branches/realtime_search: ./ dev-tools/idea/lucene/contrib/ant/ dev-tools/idea/lucene/contrib/db/bdb-je/ dev-tools/idea/lucene/contrib/db/bdb/ dev-tools/idea/lucene/contrib/demo/ dev-tools/idea/lucene/contrib...

Modified: lucene/dev/branches/realtime_search/lucene/contrib/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/contrib/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java?rev=1091748&r1=1091747&r2=1091748&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/contrib/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java (original)
+++ lucene/dev/branches/realtime_search/lucene/contrib/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java Wed Apr 13 11:22:24 2011
@@ -90,7 +90,7 @@ public class HighlighterTest extends Bas
   Directory ramDir;
   public IndexSearcher searcher = null;
   int numHighlights = 0;
-  final Analyzer analyzer = new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true);
+  final Analyzer analyzer = new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true);
   TopDocs hits;
 
   String[] texts = {
@@ -101,7 +101,7 @@ public class HighlighterTest extends Bas
       "wordx wordy wordz wordx wordy wordx worda wordb wordy wordc", "y z x y z a b", "lets is a the lets is a the lets is a the lets" };
 
   public void testQueryScorerHits() throws Exception {
-    Analyzer analyzer = new MockAnalyzer(MockTokenizer.SIMPLE, true);
+    Analyzer analyzer = new MockAnalyzer(random, MockTokenizer.SIMPLE, true);
     QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, analyzer);
     query = qp.parse("\"very long\"");
     searcher = new IndexSearcher(ramDir, true);
@@ -133,7 +133,7 @@ public class HighlighterTest extends Bas
 
     String s1 = "I call our world Flatland, not because we call it so,";
 
-    QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true));
+    QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true));
 
     // Verify that a query against the default field results in text being
     // highlighted
@@ -165,7 +165,7 @@ public class HighlighterTest extends Bas
    */
   private static String highlightField(Query query, String fieldName, String text)
       throws IOException, InvalidTokenOffsetsException {
-    TokenStream tokenStream = new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true).tokenStream(fieldName, new StringReader(text));
+    TokenStream tokenStream = new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true).tokenStream(fieldName, new StringReader(text));
     // Assuming "<B>", "</B>" used to highlight
     SimpleHTMLFormatter formatter = new SimpleHTMLFormatter();
     QueryScorer scorer = new QueryScorer(query, fieldName, FIELD_NAME);
@@ -210,7 +210,7 @@ public class HighlighterTest extends Bas
     String f2c = f2 + ":";
     String q = "(" + f1c + ph1 + " OR " + f2c + ph1 + ") AND (" + f1c + ph2
         + " OR " + f2c + ph2 + ")";
-    Analyzer analyzer = new MockAnalyzer(MockTokenizer.WHITESPACE, false);
+    Analyzer analyzer = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
     QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, f1, analyzer);
     Query query = qp.parse(q);
 
@@ -1134,13 +1134,13 @@ public class HighlighterTest extends Bas
           sb.append("stoppedtoken");
         }
         SimpleHTMLFormatter fm = new SimpleHTMLFormatter();
-        Highlighter hg = getHighlighter(query, "data", new MockAnalyzer(MockTokenizer.SIMPLE, true, stopWords, true).tokenStream(
+        Highlighter hg = getHighlighter(query, "data", new MockAnalyzer(random, MockTokenizer.SIMPLE, true, stopWords, true).tokenStream(
             "data", new StringReader(sb.toString())), fm);// new Highlighter(fm,
         // new
         // QueryTermScorer(query));
         hg.setTextFragmenter(new NullFragmenter());
         hg.setMaxDocCharsToAnalyze(100);
-        match = hg.getBestFragment(new MockAnalyzer(MockTokenizer.SIMPLE, true, stopWords, true), "data", sb.toString());
+        match = hg.getBestFragment(new MockAnalyzer(random, MockTokenizer.SIMPLE, true, stopWords, true), "data", sb.toString());
         assertTrue("Matched text should be no more than 100 chars in length ", match.length() < hg
             .getMaxDocCharsToAnalyze());
 
@@ -1151,7 +1151,7 @@ public class HighlighterTest extends Bas
         // + whitespace)
         sb.append(" ");
         sb.append(goodWord);
-        match = hg.getBestFragment(new MockAnalyzer(MockTokenizer.SIMPLE, true, stopWords, true), "data", sb.toString());
+        match = hg.getBestFragment(new MockAnalyzer(random, MockTokenizer.SIMPLE, true, stopWords, true), "data", sb.toString());
         assertTrue("Matched text should be no more than 100 chars in length ", match.length() < hg
             .getMaxDocCharsToAnalyze());
       }
@@ -1170,10 +1170,10 @@ public class HighlighterTest extends Bas
 
         String text = "this is a text with searchterm in it";
         SimpleHTMLFormatter fm = new SimpleHTMLFormatter();
-        Highlighter hg = getHighlighter(query, "text", new MockAnalyzer(MockTokenizer.SIMPLE, true, stopWords, true).tokenStream("text", new StringReader(text)), fm);
+        Highlighter hg = getHighlighter(query, "text", new MockAnalyzer(random, MockTokenizer.SIMPLE, true, stopWords, true).tokenStream("text", new StringReader(text)), fm);
         hg.setTextFragmenter(new NullFragmenter());
         hg.setMaxDocCharsToAnalyze(36);
-        String match = hg.getBestFragment(new MockAnalyzer(MockTokenizer.SIMPLE, true, stopWords, true), "text", text);
+        String match = hg.getBestFragment(new MockAnalyzer(random, MockTokenizer.SIMPLE, true, stopWords, true), "text", text);
         assertTrue(
             "Matched text should contain remainder of text after highlighted query ",
             match.endsWith("in it"));
@@ -1191,7 +1191,7 @@ public class HighlighterTest extends Bas
         // test to show how rewritten query can still be used
         if (searcher != null) searcher.close();
         searcher = new IndexSearcher(ramDir, true);
-        Analyzer analyzer = new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true);
+        Analyzer analyzer = new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true);
 
         QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, analyzer);
         Query query = parser.parse("JF? or Kenned*");
@@ -1446,64 +1446,64 @@ public class HighlighterTest extends Bas
         Highlighter highlighter;
         String result;
 
-        query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("foo");
+        query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("foo");
         highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this);
         result = highlighter.getBestFragments(getTS2(), s, 3, "...");
         assertEquals("Hi-Speed10 <B>foo</B>", result);
 
-        query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("10");
+        query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("10");
         highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this);
         result = highlighter.getBestFragments(getTS2(), s, 3, "...");
         assertEquals("Hi-Speed<B>10</B> foo", result);
 
-        query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("hi");
+        query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("hi");
         highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this);
         result = highlighter.getBestFragments(getTS2(), s, 3, "...");
         assertEquals("<B>Hi</B>-Speed10 foo", result);
 
-        query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("speed");
+        query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("speed");
         highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this);
         result = highlighter.getBestFragments(getTS2(), s, 3, "...");
         assertEquals("Hi-<B>Speed</B>10 foo", result);
 
-        query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("hispeed");
+        query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("hispeed");
         highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this);
         result = highlighter.getBestFragments(getTS2(), s, 3, "...");
         assertEquals("<B>Hi-Speed</B>10 foo", result);
 
-        query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("hi speed");
+        query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("hi speed");
         highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this);
         result = highlighter.getBestFragments(getTS2(), s, 3, "...");
         assertEquals("<B>Hi-Speed</B>10 foo", result);
 
         // ///////////////// same tests, just put the bigger overlapping token
         // first
-        query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("foo");
+        query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("foo");
         highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this);
         result = highlighter.getBestFragments(getTS2a(), s, 3, "...");
         assertEquals("Hi-Speed10 <B>foo</B>", result);
 
-        query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("10");
+        query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("10");
         highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this);
         result = highlighter.getBestFragments(getTS2a(), s, 3, "...");
         assertEquals("Hi-Speed<B>10</B> foo", result);
 
-        query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("hi");
+        query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("hi");
         highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this);
         result = highlighter.getBestFragments(getTS2a(), s, 3, "...");
         assertEquals("<B>Hi</B>-Speed10 foo", result);
 
-        query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("speed");
+        query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("speed");
         highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this);
         result = highlighter.getBestFragments(getTS2a(), s, 3, "...");
         assertEquals("Hi-<B>Speed</B>10 foo", result);
 
-        query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("hispeed");
+        query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("hispeed");
         highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this);
         result = highlighter.getBestFragments(getTS2a(), s, 3, "...");
         assertEquals("<B>Hi-Speed</B>10 foo", result);
 
-        query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("hi speed");
+        query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("hi speed");
         highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this);
         result = highlighter.getBestFragments(getTS2a(), s, 3, "...");
         assertEquals("<B>Hi-Speed</B>10 foo", result);
@@ -1514,7 +1514,7 @@ public class HighlighterTest extends Bas
   }
   
   private Directory dir;
-  private Analyzer a = new MockAnalyzer(MockTokenizer.WHITESPACE, false);
+  private Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
   
   public void testWeightedTermsWithDeletes() throws IOException, ParseException, InvalidTokenOffsetsException {
     makeIndex();
@@ -1529,7 +1529,7 @@ public class HighlighterTest extends Bas
   }
   
   private void makeIndex() throws IOException {
-    IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)));
+    IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
     writer.addDocument( doc( "t_text1", "random words for highlighting tests del" ) );
     writer.addDocument( doc( "t_text1", "more random words for second field del" ) );
     writer.addDocument( doc( "t_text1", "random words for highlighting tests del" ) );
@@ -1539,7 +1539,7 @@ public class HighlighterTest extends Bas
   }
   
   private void deleteDocument() throws IOException {
-    IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setOpenMode(OpenMode.APPEND));
+    IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setOpenMode(OpenMode.APPEND));
     writer.deleteDocuments( new Term( "t_text1", "del" ) );
     // To see negative idf, keep comment the following line
     //writer.optimize();
@@ -1644,7 +1644,7 @@ public class HighlighterTest extends Bas
     dir = newDirectory();
     ramDir = newDirectory();
     IndexWriter writer = new IndexWriter(ramDir, newIndexWriterConfig(
-        TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true)));
+        TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true)));
     for (String text : texts) {
       addDoc(writer, text);
     }

Modified: lucene/dev/branches/realtime_search/lucene/contrib/highlighter/src/test/org/apache/lucene/search/vectorhighlight/AbstractTestCase.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/contrib/highlighter/src/test/org/apache/lucene/search/vectorhighlight/AbstractTestCase.java?rev=1091748&r1=1091747&r2=1091748&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/contrib/highlighter/src/test/org/apache/lucene/search/vectorhighlight/AbstractTestCase.java (original)
+++ lucene/dev/branches/realtime_search/lucene/contrib/highlighter/src/test/org/apache/lucene/search/vectorhighlight/AbstractTestCase.java Wed Apr 13 11:22:24 2011
@@ -87,9 +87,9 @@ public abstract class AbstractTestCase e
   @Override
   public void setUp() throws Exception {
     super.setUp();
-    analyzerW = new MockAnalyzer(MockTokenizer.WHITESPACE, false);
+    analyzerW = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
     analyzerB = new BigramAnalyzer();
-    analyzerK = new MockAnalyzer(MockTokenizer.KEYWORD, false);
+    analyzerK = new MockAnalyzer(random, MockTokenizer.KEYWORD, false);
     paW = new QueryParser(TEST_VERSION_CURRENT,  F, analyzerW );
     paB = new QueryParser(TEST_VERSION_CURRENT,  F, analyzerB );
     dir = newDirectory();

Modified: lucene/dev/branches/realtime_search/lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestEmptyIndex.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestEmptyIndex.java?rev=1091748&r1=1091747&r2=1091748&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestEmptyIndex.java (original)
+++ lucene/dev/branches/realtime_search/lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestEmptyIndex.java Wed Apr 13 11:22:24 2011
@@ -59,7 +59,7 @@ public class TestEmptyIndex extends Luce
 
     // make sure a Directory acts the same
     Directory d = newDirectory();
-    new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())).close();
+    new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))).close();
     r = IndexReader.open(d, false);
     testNorms(r);
     r.close();
@@ -84,7 +84,7 @@ public class TestEmptyIndex extends Luce
 
     // make sure a Directory acts the same
     Directory d = newDirectory();
-    new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())).close();
+    new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))).close();
     r = IndexReader.open(d, false);
     termsEnumTest(r);
     r.close();

Modified: lucene/dev/branches/realtime_search/lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestIndicesEquals.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestIndicesEquals.java?rev=1091748&r1=1091747&r2=1091748&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestIndicesEquals.java (original)
+++ lucene/dev/branches/realtime_search/lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestIndicesEquals.java Wed Apr 13 11:22:24 2011
@@ -21,6 +21,7 @@ import java.util.Arrays;
 import java.util.Comparator;
 import java.util.Iterator;
 import java.util.List;
+import java.util.Random;
 
 import org.apache.lucene.analysis.Token;
 import org.apache.lucene.analysis.TokenStream;
@@ -65,7 +66,7 @@ public class TestIndicesEquals extends L
 
     // create dir data
     IndexWriter indexWriter = new IndexWriter(dir, newIndexWriterConfig(
-        TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy()));
+        TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
     
     for (int i = 0; i < 20; i++) {
       Document document = new Document();
@@ -88,10 +89,13 @@ public class TestIndicesEquals extends L
 
     Directory dir = newDirectory();
     InstantiatedIndex ii = new InstantiatedIndex();
-
+    
+    // we need to pass the "same" random to both, so they surely index the same payload data.
+    long seed = random.nextLong();
+    
     // create dir data
     IndexWriter indexWriter = new IndexWriter(dir, newIndexWriterConfig(
-                                                                        TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy()));
+                                                                        TEST_VERSION_CURRENT, new MockAnalyzer(new Random(seed))).setMergePolicy(newLogMergePolicy()));
     indexWriter.setInfoStream(VERBOSE ? System.out : null);
     if (VERBOSE) {
       System.out.println("TEST: make test index");
@@ -104,7 +108,7 @@ public class TestIndicesEquals extends L
     indexWriter.close();
 
     // test ii writer
-    InstantiatedIndexWriter instantiatedIndexWriter = ii.indexWriterFactory(new MockAnalyzer(), true);
+    InstantiatedIndexWriter instantiatedIndexWriter = ii.indexWriterFactory(new MockAnalyzer(new Random(seed)), true);
     for (int i = 0; i < 500; i++) {
       Document document = new Document();
       assembleDocument(document, i);

Modified: lucene/dev/branches/realtime_search/lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestUnoptimizedReaderOnConstructor.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestUnoptimizedReaderOnConstructor.java?rev=1091748&r1=1091747&r2=1091748&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestUnoptimizedReaderOnConstructor.java (original)
+++ lucene/dev/branches/realtime_search/lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestUnoptimizedReaderOnConstructor.java Wed Apr 13 11:22:24 2011
@@ -34,17 +34,17 @@ public class TestUnoptimizedReaderOnCons
 
   public void test() throws Exception {
     Directory dir = newDirectory();
-    IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
     addDocument(iw, "Hello, world!");
     addDocument(iw, "All work and no play makes jack a dull boy");
     iw.close();
 
-    iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
+    iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
     addDocument(iw, "Hello, tellus!");
     addDocument(iw, "All work and no play makes danny a dull boy");
     iw.close();
 
-    iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
+    iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
     addDocument(iw, "Hello, earth!");
     addDocument(iw, "All work and no play makes wendy a dull girl");
     iw.close();

Modified: lucene/dev/branches/realtime_search/lucene/contrib/memory/src/test/org/apache/lucene/index/memory/MemoryIndexTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/contrib/memory/src/test/org/apache/lucene/index/memory/MemoryIndexTest.java?rev=1091748&r1=1091747&r2=1091748&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/contrib/memory/src/test/org/apache/lucene/index/memory/MemoryIndexTest.java (original)
+++ lucene/dev/branches/realtime_search/lucene/contrib/memory/src/test/org/apache/lucene/index/memory/MemoryIndexTest.java Wed Apr 13 11:22:24 2011
@@ -143,9 +143,9 @@ public class MemoryIndexTest extends Bas
    */
   private Analyzer randomAnalyzer() {
     switch(random.nextInt(3)) {
-      case 0: return new MockAnalyzer(MockTokenizer.SIMPLE, true);
-      case 1: return new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true);
-      default: return new MockAnalyzer(MockTokenizer.WHITESPACE, false);
+      case 0: return new MockAnalyzer(random, MockTokenizer.SIMPLE, true);
+      case 1: return new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true);
+      default: return new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
     }
   }
   

Modified: lucene/dev/branches/realtime_search/lucene/contrib/misc/src/test/org/apache/lucene/index/TestFieldNormModifier.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/contrib/misc/src/test/org/apache/lucene/index/TestFieldNormModifier.java?rev=1091748&r1=1091747&r2=1091748&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/contrib/misc/src/test/org/apache/lucene/index/TestFieldNormModifier.java (original)
+++ lucene/dev/branches/realtime_search/lucene/contrib/misc/src/test/org/apache/lucene/index/TestFieldNormModifier.java Wed Apr 13 11:22:24 2011
@@ -61,7 +61,7 @@ public class TestFieldNormModifier exten
     super.setUp();
     store = newDirectory();
     IndexWriter writer = new IndexWriter(store, newIndexWriterConfig(
-                                                                     TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy()));
+                                                                     TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
     
     for (int i = 0; i < NUM_DOCS; i++) {
       Document d = new Document();

Modified: lucene/dev/branches/realtime_search/lucene/contrib/misc/src/test/org/apache/lucene/index/TestIndexSplitter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/contrib/misc/src/test/org/apache/lucene/index/TestIndexSplitter.java?rev=1091748&r1=1091747&r2=1091748&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/contrib/misc/src/test/org/apache/lucene/index/TestIndexSplitter.java (original)
+++ lucene/dev/branches/realtime_search/lucene/contrib/misc/src/test/org/apache/lucene/index/TestIndexSplitter.java Wed Apr 13 11:22:24 2011
@@ -39,7 +39,7 @@ public class TestIndexSplitter extends L
     mergePolicy.setNoCFSRatio(1);
     IndexWriter iw = new IndexWriter(
         fsDir,
-        new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
+        new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
             setOpenMode(OpenMode.CREATE).
             setMergePolicy(mergePolicy)
     );

Modified: lucene/dev/branches/realtime_search/lucene/contrib/misc/src/test/org/apache/lucene/index/TestMultiPassIndexSplitter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/contrib/misc/src/test/org/apache/lucene/index/TestMultiPassIndexSplitter.java?rev=1091748&r1=1091747&r2=1091748&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/contrib/misc/src/test/org/apache/lucene/index/TestMultiPassIndexSplitter.java (original)
+++ lucene/dev/branches/realtime_search/lucene/contrib/misc/src/test/org/apache/lucene/index/TestMultiPassIndexSplitter.java Wed Apr 13 11:22:24 2011
@@ -32,7 +32,7 @@ public class TestMultiPassIndexSplitter 
   public void setUp() throws Exception {
     super.setUp();
     dir = newDirectory();
-    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy()));
+    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
     Document doc;
     for (int i = 0; i < NUM_DOCS; i++) {
       doc = new Document();

Modified: lucene/dev/branches/realtime_search/lucene/contrib/misc/src/test/org/apache/lucene/index/TestTermVectorAccessor.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/contrib/misc/src/test/org/apache/lucene/index/TestTermVectorAccessor.java?rev=1091748&r1=1091747&r2=1091748&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/contrib/misc/src/test/org/apache/lucene/index/TestTermVectorAccessor.java (original)
+++ lucene/dev/branches/realtime_search/lucene/contrib/misc/src/test/org/apache/lucene/index/TestTermVectorAccessor.java Wed Apr 13 11:22:24 2011
@@ -25,7 +25,7 @@ public class TestTermVectorAccessor exte
 
   public void test() throws Exception {
     Directory dir = newDirectory();
-    IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
 
     Document doc;
 

Modified: lucene/dev/branches/realtime_search/lucene/contrib/misc/src/test/org/apache/lucene/index/codecs/appending/TestAppendingCodec.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/contrib/misc/src/test/org/apache/lucene/index/codecs/appending/TestAppendingCodec.java?rev=1091748&r1=1091747&r2=1091748&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/contrib/misc/src/test/org/apache/lucene/index/codecs/appending/TestAppendingCodec.java (original)
+++ lucene/dev/branches/realtime_search/lucene/contrib/misc/src/test/org/apache/lucene/index/codecs/appending/TestAppendingCodec.java Wed Apr 13 11:22:24 2011
@@ -134,7 +134,7 @@ public class TestAppendingCodec extends 
 
   public void testCodec() throws Exception {
     Directory dir = new AppendingRAMDirectory(random, new RAMDirectory());
-    IndexWriterConfig cfg = new IndexWriterConfig(Version.LUCENE_40, new MockAnalyzer());
+    IndexWriterConfig cfg = new IndexWriterConfig(Version.LUCENE_40, new MockAnalyzer(random));
     
     cfg.setCodecProvider(new AppendingCodecProvider());
     ((TieredMergePolicy)cfg.getMergePolicy()).setUseCompoundFile(false);

Modified: lucene/dev/branches/realtime_search/lucene/contrib/misc/src/test/org/apache/lucene/misc/TestHighFreqTerms.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/contrib/misc/src/test/org/apache/lucene/misc/TestHighFreqTerms.java?rev=1091748&r1=1091747&r2=1091748&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/contrib/misc/src/test/org/apache/lucene/misc/TestHighFreqTerms.java (original)
+++ lucene/dev/branches/realtime_search/lucene/contrib/misc/src/test/org/apache/lucene/misc/TestHighFreqTerms.java Wed Apr 13 11:22:24 2011
@@ -40,7 +40,7 @@ public class TestHighFreqTerms extends L
   public static void setUpClass() throws Exception {
     dir = newDirectory();
     writer = new IndexWriter(dir, newIndexWriterConfig(random,
-       TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false))
+       TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false))
        .setMaxBufferedDocs(2));
     writer.setInfoStream(VERBOSE ? System.out : null);
     indexDocs(writer);

Modified: lucene/dev/branches/realtime_search/lucene/contrib/misc/src/test/org/apache/lucene/misc/TestLengthNormModifier.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/contrib/misc/src/test/org/apache/lucene/misc/TestLengthNormModifier.java?rev=1091748&r1=1091747&r2=1091748&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/contrib/misc/src/test/org/apache/lucene/misc/TestLengthNormModifier.java (original)
+++ lucene/dev/branches/realtime_search/lucene/contrib/misc/src/test/org/apache/lucene/misc/TestLengthNormModifier.java Wed Apr 13 11:22:24 2011
@@ -66,7 +66,7 @@ public class TestLengthNormModifier exte
       super.setUp();
       store = newDirectory();
 	IndexWriter writer = new IndexWriter(store, newIndexWriterConfig(
-                                                                         TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy()));
+                                                                         TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
 	
 	for (int i = 0; i < NUM_DOCS; i++) {
 	    Document d = new Document();

Modified: lucene/dev/branches/realtime_search/lucene/contrib/queries/src/test/org/apache/lucene/search/BooleanFilterTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/contrib/queries/src/test/org/apache/lucene/search/BooleanFilterTest.java?rev=1091748&r1=1091747&r2=1091748&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/contrib/queries/src/test/org/apache/lucene/search/BooleanFilterTest.java (original)
+++ lucene/dev/branches/realtime_search/lucene/contrib/queries/src/test/org/apache/lucene/search/BooleanFilterTest.java Wed Apr 13 11:22:24 2011
@@ -39,7 +39,7 @@ public class BooleanFilterTest extends L
 	public void setUp() throws Exception {
 	  super.setUp();
 		directory = newDirectory();
-		RandomIndexWriter writer = new RandomIndexWriter(random, directory, new MockAnalyzer(MockTokenizer.WHITESPACE, false));
+		RandomIndexWriter writer = new RandomIndexWriter(random, directory, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false));
 		
 		//Add series of docs with filterable fields : acces rights, prices, dates and "in-stock" flags
 		addDoc(writer, "admin guest", "010", "20040101","Y");

Modified: lucene/dev/branches/realtime_search/lucene/contrib/queries/src/test/org/apache/lucene/search/DuplicateFilterTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/contrib/queries/src/test/org/apache/lucene/search/DuplicateFilterTest.java?rev=1091748&r1=1091747&r2=1091748&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/contrib/queries/src/test/org/apache/lucene/search/DuplicateFilterTest.java (original)
+++ lucene/dev/branches/realtime_search/lucene/contrib/queries/src/test/org/apache/lucene/search/DuplicateFilterTest.java Wed Apr 13 11:22:24 2011
@@ -43,7 +43,7 @@ public class DuplicateFilterTest extends
 	public void setUp() throws Exception {
     super.setUp();
 		directory = newDirectory();
-		RandomIndexWriter writer = new RandomIndexWriter(random, directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy()));
+		RandomIndexWriter writer = new RandomIndexWriter(random, directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
 		
 		//Add series of docs with filterable fields : url, text and dates  flags
 		addDoc(writer, "http://lucene.apache.org", "lucene 1.4.3 available", "20040101");

Modified: lucene/dev/branches/realtime_search/lucene/contrib/queries/src/test/org/apache/lucene/search/FuzzyLikeThisQueryTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/contrib/queries/src/test/org/apache/lucene/search/FuzzyLikeThisQueryTest.java?rev=1091748&r1=1091747&r2=1091748&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/contrib/queries/src/test/org/apache/lucene/search/FuzzyLikeThisQueryTest.java (original)
+++ lucene/dev/branches/realtime_search/lucene/contrib/queries/src/test/org/apache/lucene/search/FuzzyLikeThisQueryTest.java Wed Apr 13 11:22:24 2011
@@ -34,13 +34,13 @@ public class FuzzyLikeThisQueryTest exte
 	private Directory directory;
 	private IndexSearcher searcher;
 	private IndexReader reader;
-	private Analyzer analyzer=new MockAnalyzer();
+	private Analyzer analyzer=new MockAnalyzer(random);
 
 	@Override
 	public void setUp() throws Exception	{
 	  super.setUp();
 		directory = newDirectory();
-		RandomIndexWriter writer = new RandomIndexWriter(random, directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy()));
+		RandomIndexWriter writer = new RandomIndexWriter(random, directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
 		
 		//Add series of docs with misspelt names
 		addDoc(writer, "jonathon smythe","1");
@@ -121,7 +121,7 @@ public class FuzzyLikeThisQueryTest exte
 	}
 	
 	public void testFuzzyLikeThisQueryEquals() {
-	  Analyzer analyzer = new MockAnalyzer();
+	  Analyzer analyzer = new MockAnalyzer(random);
     FuzzyLikeThisQuery fltq1 = new FuzzyLikeThisQuery(10, analyzer);
     fltq1.addTerms("javi", "subject", 0.5f, 2);
     FuzzyLikeThisQuery fltq2 = new FuzzyLikeThisQuery(10, analyzer);

Modified: lucene/dev/branches/realtime_search/lucene/contrib/queries/src/test/org/apache/lucene/search/regex/TestSpanRegexQuery.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/contrib/queries/src/test/org/apache/lucene/search/regex/TestSpanRegexQuery.java?rev=1091748&r1=1091747&r2=1091748&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/contrib/queries/src/test/org/apache/lucene/search/regex/TestSpanRegexQuery.java (original)
+++ lucene/dev/branches/realtime_search/lucene/contrib/queries/src/test/org/apache/lucene/search/regex/TestSpanRegexQuery.java Wed Apr 13 11:22:24 2011
@@ -56,7 +56,7 @@ public class TestSpanRegexQuery extends 
   public void testSpanRegex() throws Exception {
     Directory directory = newDirectory();
     IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(
-        TEST_VERSION_CURRENT, new MockAnalyzer()));
+        TEST_VERSION_CURRENT, new MockAnalyzer(random)));
     Document doc = new Document();
     // doc.add(newField("field", "the quick brown fox jumps over the lazy dog",
     // Field.Store.NO, Field.Index.ANALYZED));
@@ -97,14 +97,14 @@ public class TestSpanRegexQuery extends 
 
     // creating first index writer
     IndexWriter writerA = new IndexWriter(indexStoreA, newIndexWriterConfig(
-        TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
+        TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
     writerA.addDocument(lDoc);
     writerA.optimize();
     writerA.close();
 
     // creating second index writer
     IndexWriter writerB = new IndexWriter(indexStoreB, newIndexWriterConfig(
-        TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
+        TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
     writerB.addDocument(lDoc2);
     writerB.optimize();
     writerB.close();

Modified: lucene/dev/branches/realtime_search/lucene/contrib/queries/src/test/org/apache/lucene/search/similar/TestMoreLikeThis.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/contrib/queries/src/test/org/apache/lucene/search/similar/TestMoreLikeThis.java?rev=1091748&r1=1091747&r2=1091748&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/contrib/queries/src/test/org/apache/lucene/search/similar/TestMoreLikeThis.java (original)
+++ lucene/dev/branches/realtime_search/lucene/contrib/queries/src/test/org/apache/lucene/search/similar/TestMoreLikeThis.java Wed Apr 13 11:22:24 2011
@@ -74,7 +74,7 @@ public class TestMoreLikeThis extends Lu
     Map<String,Float> originalValues = getOriginalValues();
     
     MoreLikeThis mlt = new MoreLikeThis(reader);
-    mlt.setAnalyzer(new MockAnalyzer(MockTokenizer.WHITESPACE, false));
+    mlt.setAnalyzer(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false));
     mlt.setMinDocFreq(1);
     mlt.setMinTermFreq(1);
     mlt.setMinWordLen(1);
@@ -109,7 +109,7 @@ public class TestMoreLikeThis extends Lu
   private Map<String,Float> getOriginalValues() throws IOException {
     Map<String,Float> originalValues = new HashMap<String,Float>();
     MoreLikeThis mlt = new MoreLikeThis(reader);
-    mlt.setAnalyzer(new MockAnalyzer(MockTokenizer.WHITESPACE, false));
+    mlt.setAnalyzer(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false));
     mlt.setMinDocFreq(1);
     mlt.setMinTermFreq(1);
     mlt.setMinWordLen(1);

Modified: lucene/dev/branches/realtime_search/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/complexPhrase/TestComplexPhraseQuery.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/complexPhrase/TestComplexPhraseQuery.java?rev=1091748&r1=1091747&r2=1091748&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/complexPhrase/TestComplexPhraseQuery.java (original)
+++ lucene/dev/branches/realtime_search/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/complexPhrase/TestComplexPhraseQuery.java Wed Apr 13 11:22:24 2011
@@ -34,7 +34,7 @@ import org.apache.lucene.util.LuceneTest
 
 public class TestComplexPhraseQuery extends LuceneTestCase {
   Directory rd;
-  Analyzer analyzer = new MockAnalyzer();
+  Analyzer analyzer = new MockAnalyzer(random);
 
   DocData docsContent[] = { new DocData("john smith", "1"),
       new DocData("johathon smith", "2"),

Modified: lucene/dev/branches/realtime_search/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/ext/TestExtendableQueryParser.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/ext/TestExtendableQueryParser.java?rev=1091748&r1=1091747&r2=1091748&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/ext/TestExtendableQueryParser.java (original)
+++ lucene/dev/branches/realtime_search/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/ext/TestExtendableQueryParser.java Wed Apr 13 11:22:24 2011
@@ -43,7 +43,7 @@ public class TestExtendableQueryParser e
   public QueryParser getParser(Analyzer a, Extensions extensions)
       throws Exception {
     if (a == null)
-      a = new MockAnalyzer(MockTokenizer.SIMPLE, true);
+      a = new MockAnalyzer(random, MockTokenizer.SIMPLE, true);
     QueryParser qp = extensions == null ? new ExtendableQueryParser(
         TEST_VERSION_CURRENT, "field", a) : new ExtendableQueryParser(
         TEST_VERSION_CURRENT, "field", a, extensions);

Modified: lucene/dev/branches/realtime_search/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/precedence/TestPrecedenceQueryParser.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/precedence/TestPrecedenceQueryParser.java?rev=1091748&r1=1091747&r2=1091748&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/precedence/TestPrecedenceQueryParser.java (original)
+++ lucene/dev/branches/realtime_search/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/precedence/TestPrecedenceQueryParser.java Wed Apr 13 11:22:24 2011
@@ -125,7 +125,7 @@ public class TestPrecedenceQueryParser e
 
   public PrecedenceQueryParser getParser(Analyzer a) throws Exception {
     if (a == null)
-      a = new MockAnalyzer(MockTokenizer.SIMPLE, true);
+      a = new MockAnalyzer(random, MockTokenizer.SIMPLE, true);
     PrecedenceQueryParser qp = new PrecedenceQueryParser();
     qp.setAnalyzer(a);
     qp.setDefaultOperator(Operator.OR);
@@ -171,7 +171,7 @@ public class TestPrecedenceQueryParser e
 
   public Query getQueryDOA(String query, Analyzer a) throws Exception {
     if (a == null)
-      a = new MockAnalyzer(MockTokenizer.SIMPLE, true);
+      a = new MockAnalyzer(random, MockTokenizer.SIMPLE, true);
     PrecedenceQueryParser qp = new PrecedenceQueryParser();
     qp.setAnalyzer(a);
     qp.setDefaultOperator(Operator.AND);
@@ -232,7 +232,7 @@ public class TestPrecedenceQueryParser e
         "+(title:dog title:cat) -author:\"bob dole\"");
 
     PrecedenceQueryParser qp = new PrecedenceQueryParser();
-    qp.setAnalyzer(new MockAnalyzer());
+    qp.setAnalyzer(new MockAnalyzer(random));
     // make sure OR is the default:
     assertEquals(Operator.OR, qp.getDefaultOperator());
     qp.setDefaultOperator(Operator.AND);
@@ -246,7 +246,7 @@ public class TestPrecedenceQueryParser e
   }
 
   public void testPunct() throws Exception {
-    Analyzer a = new MockAnalyzer(MockTokenizer.WHITESPACE, false);
+    Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
     assertQueryEquals("a&b", a, "a&b");
     assertQueryEquals("a&&b", a, "a&&b");
     assertQueryEquals(".NET", a, ".NET");
@@ -266,7 +266,7 @@ public class TestPrecedenceQueryParser e
     assertQueryEquals("term 1.0 1 2", null, "term");
     assertQueryEquals("term term1 term2", null, "term term term");
 
-    Analyzer a = new MockAnalyzer();
+    Analyzer a = new MockAnalyzer(random);
     assertQueryEquals("3", a, "3");
     assertQueryEquals("term 1.0 1 2", a, "term 1.0 1 2");
     assertQueryEquals("term term1 term2", a, "term term1 term2");
@@ -405,7 +405,7 @@ public class TestPrecedenceQueryParser e
     final String defaultField = "default";
     final String monthField = "month";
     final String hourField = "hour";
-    PrecedenceQueryParser qp = new PrecedenceQueryParser(new MockAnalyzer());
+    PrecedenceQueryParser qp = new PrecedenceQueryParser(new MockAnalyzer(random));
 
     Map<CharSequence, DateTools.Resolution> fieldMap = new HashMap<CharSequence,DateTools.Resolution>();
     // set a field specific date resolution
@@ -467,7 +467,7 @@ public class TestPrecedenceQueryParser e
   }
 
   public void testEscaped() throws Exception {
-    Analyzer a = new MockAnalyzer(MockTokenizer.WHITESPACE, false);
+    Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
 
     assertQueryEquals("a\\-b:c", a, "a-b:c");
     assertQueryEquals("a\\+b:c", a, "a+b:c");
@@ -533,7 +533,7 @@ public class TestPrecedenceQueryParser e
 
   public void testBoost() throws Exception {
     CharacterRunAutomaton stopSet = new CharacterRunAutomaton(BasicAutomata.makeString("on"));
-    Analyzer oneStopAnalyzer = new MockAnalyzer(MockTokenizer.SIMPLE, true, stopSet, true);
+    Analyzer oneStopAnalyzer = new MockAnalyzer(random, MockTokenizer.SIMPLE, true, stopSet, true);
 
     PrecedenceQueryParser qp = new PrecedenceQueryParser();
     qp.setAnalyzer(oneStopAnalyzer);
@@ -548,7 +548,7 @@ public class TestPrecedenceQueryParser e
     q = qp.parse("\"on\"^1.0", "field");
     assertNotNull(q);
 
-    q = getParser(new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true)).parse("the^3",
+    q = getParser(new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true)).parse("the^3",
         "field");
     assertNotNull(q);
   }
@@ -564,7 +564,7 @@ public class TestPrecedenceQueryParser e
   public void testBooleanQuery() throws Exception {
     BooleanQuery.setMaxClauseCount(2);
     try {
-      getParser(new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("one two three", "field");
+      getParser(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("one two three", "field");
       fail("ParseException expected due to too many boolean clauses");
     } catch (QueryNodeException expected) {
       // too many boolean clauses, so ParseException is expected
@@ -573,7 +573,7 @@ public class TestPrecedenceQueryParser e
   
   // LUCENE-792
   public void testNOT() throws Exception {
-    Analyzer a = new MockAnalyzer(MockTokenizer.WHITESPACE, false);
+    Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
     assertQueryEquals("NOT foo AND bar", a, "-foo +bar");
   }
 
@@ -582,7 +582,7 @@ public class TestPrecedenceQueryParser e
    * issue has been corrected.
    */
   public void testPrecedence() throws Exception {
-    PrecedenceQueryParser parser = getParser(new MockAnalyzer(MockTokenizer.WHITESPACE, false));
+    PrecedenceQueryParser parser = getParser(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false));
     Query query1 = parser.parse("A AND B OR C AND D", "field");
     Query query2 = parser.parse("(A AND B) OR (C AND D)", "field");
     assertEquals(query1, query2);

Modified: lucene/dev/branches/realtime_search/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestMultiFieldQPHelper.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestMultiFieldQPHelper.java?rev=1091748&r1=1091747&r2=1091748&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestMultiFieldQPHelper.java (original)
+++ lucene/dev/branches/realtime_search/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestMultiFieldQPHelper.java Wed Apr 13 11:22:24 2011
@@ -80,7 +80,7 @@ public class TestMultiFieldQPHelper exte
     String[] fields = { "b", "t" };
     StandardQueryParser mfqp = new StandardQueryParser();
     mfqp.setMultiFields(fields);
-    mfqp.setAnalyzer(new MockAnalyzer());
+    mfqp.setAnalyzer(new MockAnalyzer(random));
 
     Query q = mfqp.parse("one", null);
     assertEquals("b:one t:one", q.toString());
@@ -150,7 +150,7 @@ public class TestMultiFieldQPHelper exte
     StandardQueryParser mfqp = new StandardQueryParser();
     mfqp.setMultiFields(fields);
     mfqp.setFieldsBoost(boosts);
-    mfqp.setAnalyzer(new MockAnalyzer());
+    mfqp.setAnalyzer(new MockAnalyzer(random));
 
     // Check for simple
     Query q = mfqp.parse("one", null);
@@ -178,24 +178,24 @@ public class TestMultiFieldQPHelper exte
   public void testStaticMethod1() throws QueryNodeException {
     String[] fields = { "b", "t" };
     String[] queries = { "one", "two" };
-    Query q = QueryParserUtil.parse(queries, fields, new MockAnalyzer());
+    Query q = QueryParserUtil.parse(queries, fields, new MockAnalyzer(random));
     assertEquals("b:one t:two", q.toString());
 
     String[] queries2 = { "+one", "+two" };
-    q = QueryParserUtil.parse(queries2, fields, new MockAnalyzer());
+    q = QueryParserUtil.parse(queries2, fields, new MockAnalyzer(random));
     assertEquals("(+b:one) (+t:two)", q.toString());
 
     String[] queries3 = { "one", "+two" };
-    q = QueryParserUtil.parse(queries3, fields, new MockAnalyzer());
+    q = QueryParserUtil.parse(queries3, fields, new MockAnalyzer(random));
     assertEquals("b:one (+t:two)", q.toString());
 
     String[] queries4 = { "one +more", "+two" };
-    q = QueryParserUtil.parse(queries4, fields, new MockAnalyzer());
+    q = QueryParserUtil.parse(queries4, fields, new MockAnalyzer(random));
     assertEquals("(b:one +b:more) (+t:two)", q.toString());
 
     String[] queries5 = { "blah" };
     try {
-      q = QueryParserUtil.parse(queries5, fields, new MockAnalyzer());
+      q = QueryParserUtil.parse(queries5, fields, new MockAnalyzer(random));
       fail();
     } catch (IllegalArgumentException e) {
       // expected exception, array length differs
@@ -219,15 +219,15 @@ public class TestMultiFieldQPHelper exte
     BooleanClause.Occur[] flags = { BooleanClause.Occur.MUST,
         BooleanClause.Occur.MUST_NOT };
     Query q = QueryParserUtil.parse("one", fields, flags,
-        new MockAnalyzer());
+        new MockAnalyzer(random));
     assertEquals("+b:one -t:one", q.toString());
 
-    q = QueryParserUtil.parse("one two", fields, flags, new MockAnalyzer());
+    q = QueryParserUtil.parse("one two", fields, flags, new MockAnalyzer(random));
     assertEquals("+(b:one b:two) -(t:one t:two)", q.toString());
 
     try {
       BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST };
-      q = QueryParserUtil.parse("blah", fields, flags2, new MockAnalyzer());
+      q = QueryParserUtil.parse("blah", fields, flags2, new MockAnalyzer(random));
       fail();
     } catch (IllegalArgumentException e) {
       // expected exception, array length differs
@@ -240,19 +240,19 @@ public class TestMultiFieldQPHelper exte
         BooleanClause.Occur.MUST_NOT };
     StandardQueryParser parser = new StandardQueryParser();
     parser.setMultiFields(fields);
-    parser.setAnalyzer(new MockAnalyzer());
+    parser.setAnalyzer(new MockAnalyzer(random));
 
     Query q = QueryParserUtil.parse("one", fields, flags,
-        new MockAnalyzer());// , fields, flags, new
+        new MockAnalyzer(random));// , fields, flags, new
     // MockAnalyzer());
     assertEquals("+b:one -t:one", q.toString());
 
-    q = QueryParserUtil.parse("one two", fields, flags, new MockAnalyzer());
+    q = QueryParserUtil.parse("one two", fields, flags, new MockAnalyzer(random));
     assertEquals("+(b:one b:two) -(t:one t:two)", q.toString());
 
     try {
       BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST };
-      q = QueryParserUtil.parse("blah", fields, flags2, new MockAnalyzer());
+      q = QueryParserUtil.parse("blah", fields, flags2, new MockAnalyzer(random));
       fail();
     } catch (IllegalArgumentException e) {
       // expected exception, array length differs
@@ -265,13 +265,13 @@ public class TestMultiFieldQPHelper exte
     BooleanClause.Occur[] flags = { BooleanClause.Occur.MUST,
         BooleanClause.Occur.MUST_NOT, BooleanClause.Occur.SHOULD };
     Query q = QueryParserUtil.parse(queries, fields, flags,
-        new MockAnalyzer());
+        new MockAnalyzer(random));
     assertEquals("+f1:one -f2:two f3:three", q.toString());
 
     try {
       BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST };
       q = QueryParserUtil
-          .parse(queries, fields, flags2, new MockAnalyzer());
+          .parse(queries, fields, flags2, new MockAnalyzer(random));
       fail();
     } catch (IllegalArgumentException e) {
       // expected exception, array length differs
@@ -284,13 +284,13 @@ public class TestMultiFieldQPHelper exte
     BooleanClause.Occur[] flags = { BooleanClause.Occur.MUST,
         BooleanClause.Occur.MUST_NOT };
     Query q = QueryParserUtil.parse(queries, fields, flags,
-        new MockAnalyzer());
+        new MockAnalyzer(random));
     assertEquals("+b:one -t:two", q.toString());
 
     try {
       BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST };
       q = QueryParserUtil
-          .parse(queries, fields, flags2, new MockAnalyzer());
+          .parse(queries, fields, flags2, new MockAnalyzer(random));
       fail();
     } catch (IllegalArgumentException e) {
       // expected exception, array length differs
@@ -316,7 +316,7 @@ public class TestMultiFieldQPHelper exte
   }
 
   public void testStopWordSearching() throws Exception {
-    Analyzer analyzer = new MockAnalyzer();
+    Analyzer analyzer = new MockAnalyzer(random);
     Directory ramDir = newDirectory();
     IndexWriter iw = new IndexWriter(ramDir, newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
     Document doc = new Document();
@@ -342,7 +342,7 @@ public class TestMultiFieldQPHelper exte
    * Return empty tokens for field "f1".
    */
   private static final class AnalyzerReturningNull extends Analyzer {
-    MockAnalyzer stdAnalyzer = new MockAnalyzer();
+    MockAnalyzer stdAnalyzer = new MockAnalyzer(random);
 
     public AnalyzerReturningNull() {
     }

Modified: lucene/dev/branches/realtime_search/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestQPHelper.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestQPHelper.java?rev=1091748&r1=1091747&r2=1091748&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestQPHelper.java (original)
+++ lucene/dev/branches/realtime_search/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestQPHelper.java Wed Apr 13 11:22:24 2011
@@ -191,7 +191,7 @@ public class TestQPHelper extends Lucene
 
   public StandardQueryParser getParser(Analyzer a) throws Exception {
     if (a == null)
-      a = new MockAnalyzer(MockTokenizer.SIMPLE, true);
+      a = new MockAnalyzer(random, MockTokenizer.SIMPLE, true);
     StandardQueryParser qp = new StandardQueryParser();
     qp.setAnalyzer(a);
 
@@ -281,7 +281,7 @@ public class TestQPHelper extends Lucene
 
   public Query getQueryDOA(String query, Analyzer a) throws Exception {
     if (a == null)
-      a = new MockAnalyzer(MockTokenizer.SIMPLE, true);
+      a = new MockAnalyzer(random, MockTokenizer.SIMPLE, true);
     StandardQueryParser qp = new StandardQueryParser();
     qp.setAnalyzer(a);
     qp.setDefaultOperator(Operator.AND);
@@ -301,7 +301,7 @@ public class TestQPHelper extends Lucene
   }
 
   public void testConstantScoreAutoRewrite() throws Exception {
-    StandardQueryParser qp = new StandardQueryParser(new MockAnalyzer(MockTokenizer.WHITESPACE, false));
+    StandardQueryParser qp = new StandardQueryParser(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false));
     Query q = qp.parse("foo*bar", "field");
     assertTrue(q instanceof WildcardQuery);
     assertEquals(MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT, ((MultiTermQuery) q).getRewriteMethod());
@@ -410,9 +410,9 @@ public class TestQPHelper extends Lucene
   public void testSimple() throws Exception {
     assertQueryEquals("\"term germ\"~2", null, "\"term germ\"~2");
     assertQueryEquals("term term term", null, "term term term");
-    assertQueryEquals("t�rm term term", new MockAnalyzer(MockTokenizer.WHITESPACE, false),
+    assertQueryEquals("t�rm term term", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false),
         "t�rm term term");
-    assertQueryEquals("�mlaut", new MockAnalyzer(MockTokenizer.WHITESPACE, false), "�mlaut");
+    assertQueryEquals("�mlaut", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false), "�mlaut");
 
     // FIXME: change MockAnalyzer to not extend CharTokenizer for this test
     //assertQueryEquals("\"\"", new KeywordAnalyzer(), "");
@@ -470,7 +470,7 @@ public class TestQPHelper extends Lucene
   }
 
   public void testPunct() throws Exception {
-    Analyzer a = new MockAnalyzer(MockTokenizer.WHITESPACE, false);
+    Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
     assertQueryEquals("a&b", a, "a&b");
     assertQueryEquals("a&&b", a, "a&&b");
     assertQueryEquals(".NET", a, ".NET");
@@ -491,7 +491,7 @@ public class TestQPHelper extends Lucene
     assertQueryEquals("term 1.0 1 2", null, "term");
     assertQueryEquals("term term1 term2", null, "term term term");
 
-    Analyzer a = new MockAnalyzer(MockTokenizer.WHITESPACE, false);
+    Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
     assertQueryEquals("3", a, "3");
     assertQueryEquals("term 1.0 1 2", a, "term 1.0 1 2");
     assertQueryEquals("term term1 term2", a, "term term1 term2");
@@ -726,7 +726,7 @@ public class TestQPHelper extends Lucene
   }
 
   public void testEscaped() throws Exception {
-    Analyzer a = new MockAnalyzer(MockTokenizer.WHITESPACE, false);
+    Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
 
     /*
      * assertQueryEquals("\\[brackets", a, "\\[brackets");
@@ -825,7 +825,7 @@ public class TestQPHelper extends Lucene
   }
 
   public void testQueryStringEscaping() throws Exception {
-    Analyzer a = new MockAnalyzer(MockTokenizer.WHITESPACE, false);
+    Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
 
     assertEscapedQueryEquals("a-b:c", a, "a\\-b\\:c");
     assertEscapedQueryEquals("a+b:c", a, "a\\+b\\:c");
@@ -866,7 +866,7 @@ public class TestQPHelper extends Lucene
   @Ignore("contrib queryparser shouldn't escape wildcard terms")
   public void testEscapedWildcard() throws Exception {
     StandardQueryParser qp = new StandardQueryParser();
-    qp.setAnalyzer(new MockAnalyzer(MockTokenizer.WHITESPACE, false));
+    qp.setAnalyzer(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false));
 
     WildcardQuery q = new WildcardQuery(new Term("field", "foo\\?ba?r"));
     assertEquals(q, qp.parse("foo\\?ba?r", "field"));
@@ -904,7 +904,7 @@ public class TestQPHelper extends Lucene
 
   public void testBoost() throws Exception {
     CharacterRunAutomaton stopSet = new CharacterRunAutomaton(BasicAutomata.makeString("on"));
-    Analyzer oneStopAnalyzer = new MockAnalyzer(MockTokenizer.SIMPLE, true, stopSet, true);
+    Analyzer oneStopAnalyzer = new MockAnalyzer(random, MockTokenizer.SIMPLE, true, stopSet, true);
     StandardQueryParser qp = new StandardQueryParser();
     qp.setAnalyzer(oneStopAnalyzer);
 
@@ -920,7 +920,7 @@ public class TestQPHelper extends Lucene
     assertNotNull(q);
 
     StandardQueryParser qp2 = new StandardQueryParser();
-    qp2.setAnalyzer(new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true));
+    qp2.setAnalyzer(new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true));
 
     q = qp2.parse("the^3", "field");
     // "the" is a stop word so the result is an empty query:
@@ -950,7 +950,7 @@ public class TestQPHelper extends Lucene
 
   public void testCustomQueryParserWildcard() {
     try {
-      new QPTestParser(new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("a?t", "contents");
+      new QPTestParser(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("a?t", "contents");
       fail("Wildcard queries should not be allowed");
     } catch (QueryNodeException expected) {
       // expected exception
@@ -959,7 +959,7 @@ public class TestQPHelper extends Lucene
 
   public void testCustomQueryParserFuzzy() throws Exception {
     try {
-      new QPTestParser(new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("xunit~", "contents");
+      new QPTestParser(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("xunit~", "contents");
       fail("Fuzzy queries should not be allowed");
     } catch (QueryNodeException expected) {
       // expected exception
@@ -970,7 +970,7 @@ public class TestQPHelper extends Lucene
     BooleanQuery.setMaxClauseCount(2);
     try {
       StandardQueryParser qp = new StandardQueryParser();
-      qp.setAnalyzer(new MockAnalyzer(MockTokenizer.WHITESPACE, false));
+      qp.setAnalyzer(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false));
 
       qp.parse("one two three", "field");
       fail("ParseException expected due to too many boolean clauses");
@@ -984,7 +984,7 @@ public class TestQPHelper extends Lucene
    */
   public void testPrecedence() throws Exception {
     StandardQueryParser qp = new StandardQueryParser();
-    qp.setAnalyzer(new MockAnalyzer(MockTokenizer.WHITESPACE, false));
+    qp.setAnalyzer(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false));
 
     Query query1 = qp.parse("A AND B OR C AND D", "field");
     Query query2 = qp.parse("+A +B +C +D", "field");
@@ -995,7 +995,7 @@ public class TestQPHelper extends Lucene
 // Todo: Convert from DateField to DateUtil
 //  public void testLocalDateFormat() throws IOException, QueryNodeException {
 //    Directory ramDir = newDirectory();
-//    IndexWriter iw = new IndexWriter(ramDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)));
+//    IndexWriter iw = new IndexWriter(ramDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
 //    addDateDoc("a", 2005, 12, 2, 10, 15, 33, iw);
 //    addDateDoc("b", 2005, 12, 4, 22, 15, 00, iw);
 //    iw.close();
@@ -1116,7 +1116,7 @@ public class TestQPHelper extends Lucene
   public void testStopwords() throws Exception {
     StandardQueryParser qp = new StandardQueryParser();
     CharacterRunAutomaton stopSet = new CharacterRunAutomaton(new RegExp("the|foo").toAutomaton());
-    qp.setAnalyzer(new MockAnalyzer(MockTokenizer.SIMPLE, true, stopSet, true));
+    qp.setAnalyzer(new MockAnalyzer(random, MockTokenizer.SIMPLE, true, stopSet, true));
 
     Query result = qp.parse("a:the OR a:foo", "a");
     assertNotNull("result is null and it shouldn't be", result);
@@ -1140,7 +1140,7 @@ public class TestQPHelper extends Lucene
   public void testPositionIncrement() throws Exception {
     StandardQueryParser qp = new StandardQueryParser();
     qp.setAnalyzer(
-        new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true));
+        new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true));
 
     qp.setEnablePositionIncrements(true);
 
@@ -1161,7 +1161,7 @@ public class TestQPHelper extends Lucene
 
   public void testMatchAllDocs() throws Exception {
     StandardQueryParser qp = new StandardQueryParser();
-    qp.setAnalyzer(new MockAnalyzer(MockTokenizer.WHITESPACE, false));
+    qp.setAnalyzer(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false));
 
     assertEquals(new MatchAllDocsQuery(), qp.parse("*:*", "field"));
     assertEquals(new MatchAllDocsQuery(), qp.parse("(*:*)", "field"));
@@ -1173,7 +1173,7 @@ public class TestQPHelper extends Lucene
   private void assertHits(int expected, String query, IndexSearcher is)
       throws IOException, QueryNodeException {
     StandardQueryParser qp = new StandardQueryParser();
-    qp.setAnalyzer(new MockAnalyzer(MockTokenizer.WHITESPACE, false));
+    qp.setAnalyzer(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false));
     qp.setLocale(Locale.ENGLISH);
 
     Query q = qp.parse(query, "date");

Modified: lucene/dev/branches/realtime_search/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/surround/query/SingleFieldTestDb.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/surround/query/SingleFieldTestDb.java?rev=1091748&r1=1091747&r2=1091748&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/surround/query/SingleFieldTestDb.java (original)
+++ lucene/dev/branches/realtime_search/lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/surround/query/SingleFieldTestDb.java Wed Apr 13 11:22:24 2011
@@ -41,7 +41,7 @@ public class SingleFieldTestDb {
       fieldName = fName;
       IndexWriter writer = new IndexWriter(db, new IndexWriterConfig(
           Version.LUCENE_CURRENT,
-          new MockAnalyzer()));
+          new MockAnalyzer(random)));
       for (int j = 0; j < docs.length; j++) {
         Document d = new Document();
         d.add(new Field(fieldName, docs[j], Field.Store.NO, Field.Index.ANALYZED));

Modified: lucene/dev/branches/realtime_search/lucene/contrib/spatial/src/test/org/apache/lucene/spatial/tier/TestCartesian.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/contrib/spatial/src/test/org/apache/lucene/spatial/tier/TestCartesian.java?rev=1091748&r1=1091747&r2=1091748&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/contrib/spatial/src/test/org/apache/lucene/spatial/tier/TestCartesian.java (original)
+++ lucene/dev/branches/realtime_search/lucene/contrib/spatial/src/test/org/apache/lucene/spatial/tier/TestCartesian.java Wed Apr 13 11:22:24 2011
@@ -71,7 +71,7 @@ public class TestCartesian extends Lucen
     super.setUp();
     directory = newDirectory();
 
-    IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
     
     setUpPlotter( 2, 15);
     

Modified: lucene/dev/branches/realtime_search/lucene/contrib/spatial/src/test/org/apache/lucene/spatial/tier/TestDistance.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/contrib/spatial/src/test/org/apache/lucene/spatial/tier/TestDistance.java?rev=1091748&r1=1091747&r2=1091748&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/contrib/spatial/src/test/org/apache/lucene/spatial/tier/TestDistance.java (original)
+++ lucene/dev/branches/realtime_search/lucene/contrib/spatial/src/test/org/apache/lucene/spatial/tier/TestDistance.java Wed Apr 13 11:22:24 2011
@@ -47,7 +47,7 @@ public class TestDistance extends Lucene
   public void setUp() throws Exception {
     super.setUp();
     directory = newDirectory();
-    writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+    writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
     addData(writer);
     
   }

Modified: lucene/dev/branches/realtime_search/lucene/contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestDirectSpellChecker.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestDirectSpellChecker.java?rev=1091748&r1=1091747&r2=1091748&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestDirectSpellChecker.java (original)
+++ lucene/dev/branches/realtime_search/lucene/contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestDirectSpellChecker.java Wed Apr 13 11:22:24 2011
@@ -35,7 +35,7 @@ public class TestDirectSpellChecker exte
     spellChecker.setMinQueryLength(0);
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random, dir, 
-        new MockAnalyzer(MockTokenizer.SIMPLE, true));
+        new MockAnalyzer(random, MockTokenizer.SIMPLE, true));
 
     for (int i = 0; i < 20; i++) {
       Document doc = new Document();
@@ -93,7 +93,7 @@ public class TestDirectSpellChecker exte
   public void testOptions() throws Exception {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random, dir, 
-        new MockAnalyzer(MockTokenizer.SIMPLE, true));
+        new MockAnalyzer(random, MockTokenizer.SIMPLE, true));
 
     Document doc = new Document();
     doc.add(newField("text", "foobar", Field.Store.NO, Field.Index.ANALYZED));

Modified: lucene/dev/branches/realtime_search/lucene/contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestLuceneDictionary.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestLuceneDictionary.java?rev=1091748&r1=1091747&r2=1091748&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestLuceneDictionary.java (original)
+++ lucene/dev/branches/realtime_search/lucene/contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestLuceneDictionary.java Wed Apr 13 11:22:24 2011
@@ -46,7 +46,7 @@ public class TestLuceneDictionary extend
   public void setUp() throws Exception {
     super.setUp();
     store = newDirectory();
-    IndexWriter writer = new IndexWriter(store, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)));
+    IndexWriter writer = new IndexWriter(store, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
 
     Document doc;
 

Modified: lucene/dev/branches/realtime_search/lucene/contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestSpellChecker.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestSpellChecker.java?rev=1091748&r1=1091747&r2=1091748&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestSpellChecker.java (original)
+++ lucene/dev/branches/realtime_search/lucene/contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestSpellChecker.java Wed Apr 13 11:22:24 2011
@@ -54,7 +54,7 @@ public class TestSpellChecker extends Lu
     //create a user index
     userindex = newDirectory();
     IndexWriter writer = new IndexWriter(userindex, new IndexWriterConfig(
-        TEST_VERSION_CURRENT, new MockAnalyzer()));
+        TEST_VERSION_CURRENT, new MockAnalyzer(random)));
 
     for (int i = 0; i < 1000; i++) {
       Document doc = new Document();

Modified: lucene/dev/branches/realtime_search/lucene/contrib/wordnet/src/test/org/apache/lucene/wordnet/TestWordnet.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/contrib/wordnet/src/test/org/apache/lucene/wordnet/TestWordnet.java?rev=1091748&r1=1091747&r2=1091748&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/contrib/wordnet/src/test/org/apache/lucene/wordnet/TestWordnet.java (original)
+++ lucene/dev/branches/realtime_search/lucene/contrib/wordnet/src/test/org/apache/lucene/wordnet/TestWordnet.java Wed Apr 13 11:22:24 2011
@@ -63,7 +63,7 @@ public class TestWordnet extends LuceneT
   
   private void assertExpandsTo(String term, String expected[]) throws IOException {
     Query expandedQuery = SynExpand.expand(term, searcher, new 
-        MockAnalyzer(), "field", 1F);
+        MockAnalyzer(random), "field", 1F);
     BooleanQuery expectedQuery = new BooleanQuery();
     for (String t : expected)
       expectedQuery.add(new TermQuery(new Term("field", t)), 

Modified: lucene/dev/branches/realtime_search/lucene/contrib/xml-query-parser/src/test/org/apache/lucene/xmlparser/TestParser.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/contrib/xml-query-parser/src/test/org/apache/lucene/xmlparser/TestParser.java?rev=1091748&r1=1091747&r2=1091748&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/contrib/xml-query-parser/src/test/org/apache/lucene/xmlparser/TestParser.java (original)
+++ lucene/dev/branches/realtime_search/lucene/contrib/xml-query-parser/src/test/org/apache/lucene/xmlparser/TestParser.java Wed Apr 13 11:22:24 2011
@@ -49,7 +49,7 @@ public class TestParser extends LuceneTe
 	@BeforeClass
 	public static void beforeClass() throws Exception {
 	  // TODO: rewrite test (this needs to set QueryParser.enablePositionIncrements, too, for work with CURRENT):
-	  Analyzer analyzer=new MockAnalyzer(MockTokenizer.WHITESPACE, true, MockTokenFilter.ENGLISH_STOPSET, false); 
+	  Analyzer analyzer=new MockAnalyzer(random, MockTokenizer.WHITESPACE, true, MockTokenFilter.ENGLISH_STOPSET, false); 
     //initialize the parser
 	  builder=new CorePlusExtensionsParser("contents",analyzer);
 		

Modified: lucene/dev/branches/realtime_search/lucene/contrib/xml-query-parser/src/test/org/apache/lucene/xmlparser/TestQueryTemplateManager.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/contrib/xml-query-parser/src/test/org/apache/lucene/xmlparser/TestQueryTemplateManager.java?rev=1091748&r1=1091747&r2=1091748&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/contrib/xml-query-parser/src/test/org/apache/lucene/xmlparser/TestQueryTemplateManager.java (original)
+++ lucene/dev/branches/realtime_search/lucene/contrib/xml-query-parser/src/test/org/apache/lucene/xmlparser/TestQueryTemplateManager.java Wed Apr 13 11:22:24 2011
@@ -44,7 +44,7 @@ import org.xml.sax.SAXException;
 public class TestQueryTemplateManager extends LuceneTestCase {
 
 	CoreParser builder;
-	Analyzer analyzer=new MockAnalyzer();
+	Analyzer analyzer=new MockAnalyzer(random);
 	private IndexSearcher searcher;
 	private Directory dir;
 	

Modified: lucene/dev/branches/realtime_search/lucene/src/java/org/apache/lucene/index/CheckIndex.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/src/java/org/apache/lucene/index/CheckIndex.java?rev=1091748&r1=1091747&r2=1091748&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/src/java/org/apache/lucene/index/CheckIndex.java (original)
+++ lucene/dev/branches/realtime_search/lucene/src/java/org/apache/lucene/index/CheckIndex.java Wed Apr 13 11:22:24 2011
@@ -661,10 +661,13 @@ public class CheckIndex {
           status.termCount++;
 
           final DocsEnum docs2;
+          final boolean hasPositions;
           if (postings != null) {
             docs2 = postings;
+            hasPositions = true;
           } else {
             docs2 = docs;
+            hasPositions = false;
           }
 
           int lastDoc = -1;
@@ -736,22 +739,60 @@ public class CheckIndex {
 
           // Test skipping
           if (docFreq >= 16) {
-            for(int idx=0;idx<7;idx++) {
-              final int skipDocID = (int) (((idx+1)*(long) maxDoc)/8);
-              docs = terms.docs(delDocs, docs);
-              final int docID = docs.advance(skipDocID);
-              if (docID == DocsEnum.NO_MORE_DOCS) {
-                break;
-              } else {
-                if (docID < skipDocID) {
-                  throw new RuntimeException("term " + term + ": advance(docID=" + skipDocID + ") returned docID=" + docID);
-                }
-                final int nextDocID = docs.nextDoc();
-                if (nextDocID == DocsEnum.NO_MORE_DOCS) {
+            if (hasPositions) {
+              for(int idx=0;idx<7;idx++) {
+                final int skipDocID = (int) (((idx+1)*(long) maxDoc)/8);
+                postings = terms.docsAndPositions(delDocs, postings);
+                final int docID = postings.advance(skipDocID);
+                if (docID == DocsEnum.NO_MORE_DOCS) {
                   break;
+                } else {
+                  if (docID < skipDocID) {
+                    throw new RuntimeException("term " + term + ": advance(docID=" + skipDocID + ") returned docID=" + docID);
+                  }
+                  final int freq = postings.freq();
+                  if (freq <= 0) {
+                    throw new RuntimeException("termFreq " + freq + " is out of bounds");
+                  }
+                  int lastPosition = -1;
+                  for(int posUpto=0;posUpto<freq;posUpto++) {
+                    final int pos = postings.nextPosition();
+                    if (pos < 0) {
+                      throw new RuntimeException("position " + pos + " is out of bounds");
+                    }
+                    if (pos <= lastPosition) {
+                      throw new RuntimeException("position " + pos + " is <= lastPosition " + lastPosition);
+                    }
+                    lastPosition = pos;
+                  } 
+
+                  final int nextDocID = postings.nextDoc();
+                  if (nextDocID == DocsEnum.NO_MORE_DOCS) {
+                    break;
+                  }
+                  if (nextDocID <= docID) {
+                    throw new RuntimeException("term " + term + ": advance(docID=" + skipDocID + "), then .next() returned docID=" + nextDocID + " vs prev docID=" + docID);
+                  }
                 }
-                if (nextDocID <= docID) {
-                  throw new RuntimeException("term " + term + ": advance(docID=" + skipDocID + "), then .next() returned docID=" + nextDocID + " vs prev docID=" + docID);
+              }
+            } else {
+              for(int idx=0;idx<7;idx++) {
+                final int skipDocID = (int) (((idx+1)*(long) maxDoc)/8);
+                docs = terms.docs(delDocs, docs);
+                final int docID = docs.advance(skipDocID);
+                if (docID == DocsEnum.NO_MORE_DOCS) {
+                  break;
+                } else {
+                  if (docID < skipDocID) {
+                    throw new RuntimeException("term " + term + ": advance(docID=" + skipDocID + ") returned docID=" + docID);
+                  }
+                  final int nextDocID = docs.nextDoc();
+                  if (nextDocID == DocsEnum.NO_MORE_DOCS) {
+                    break;
+                  }
+                  if (nextDocID <= docID) {
+                    throw new RuntimeException("term " + term + ": advance(docID=" + skipDocID + "), then .next() returned docID=" + nextDocID + " vs prev docID=" + docID);
+                  }
                 }
               }
             }

Modified: lucene/dev/branches/realtime_search/lucene/src/java/org/apache/lucene/index/codecs/VariableGapTermsIndexReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/src/java/org/apache/lucene/index/codecs/VariableGapTermsIndexReader.java?rev=1091748&r1=1091747&r2=1091748&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/src/java/org/apache/lucene/index/codecs/VariableGapTermsIndexReader.java (original)
+++ lucene/dev/branches/realtime_search/lucene/src/java/org/apache/lucene/index/codecs/VariableGapTermsIndexReader.java Wed Apr 13 11:22:24 2011
@@ -18,6 +18,9 @@ package org.apache.lucene.index.codecs;
  */
 
 import java.io.IOException;
+import java.io.FileOutputStream;   // for toDot
+import java.io.OutputStreamWriter; // for toDot
+import java.io.Writer;             // for toDot
 import java.util.Collection;
 import java.util.HashMap;
 import java.util.Iterator;
@@ -34,6 +37,7 @@ import org.apache.lucene.util.automaton.
 import org.apache.lucene.util.automaton.fst.BytesRefFSTEnum;
 import org.apache.lucene.util.automaton.fst.FST;
 import org.apache.lucene.util.automaton.fst.PositiveIntOutputs;
+import org.apache.lucene.util.automaton.fst.Util; // for toDot
 
 /** See {@link VariableGapTermsIndexWriter}
  * 
@@ -52,11 +56,13 @@ public class VariableGapTermsIndexReader
   // start of the field info data
   protected long dirOffset;
 
+  final String segment;
+
   public VariableGapTermsIndexReader(Directory dir, FieldInfos fieldInfos, String segment, int indexDivisor, String codecId)
     throws IOException {
 
     in = dir.openInput(IndexFileNames.segmentFileName(segment, codecId, VariableGapTermsIndexWriter.TERMS_INDEX_EXTENSION));
-    
+    this.segment = segment;
     boolean success = false;
 
     try {
@@ -176,6 +182,14 @@ public class VariableGapTermsIndexReader
         fst = new FST<Long>(clone, fstOutputs);
         clone.close();
 
+        /*
+        final String dotFileName = segment + "_" + fieldInfo.name + ".dot";
+        Writer w = new OutputStreamWriter(new FileOutputStream(dotFileName));
+        Util.toDot(fst, w, false, false);
+        System.out.println("FST INDEX: SAVED to " + dotFileName);
+        w.close();
+        */
+
         if (indexDivisor > 1) {
           // subsample
           final PositiveIntOutputs outputs = PositiveIntOutputs.getSingleton(true);

Modified: lucene/dev/branches/realtime_search/lucene/src/java/org/apache/lucene/index/codecs/preflex/TermInfosReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/src/java/org/apache/lucene/index/codecs/preflex/TermInfosReader.java?rev=1091748&r1=1091747&r2=1091748&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/src/java/org/apache/lucene/index/codecs/preflex/TermInfosReader.java (original)
+++ lucene/dev/branches/realtime_search/lucene/src/java/org/apache/lucene/index/codecs/preflex/TermInfosReader.java Wed Apr 13 11:22:24 2011
@@ -310,7 +310,7 @@ public final class TermInfosReader {
         }
       } else {
         assert sameTermInfo(ti, tiOrd, enumerator);
-        assert (int) enumerator.position == tiOrd.termOrd;
+        assert enumerator.position == tiOrd.termOrd;
       }
     } else {
       ti = null;