You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by rm...@apache.org on 2011/10/01 05:05:07 UTC

svn commit: r1177888 [15/16] - in /lucene/dev/branches/lucene2621: ./ dev-tools/eclipse/ dev-tools/idea/lucene/contrib/ dev-tools/maven/ lucene/ lucene/contrib/ lucene/contrib/demo/src/java/org/apache/lucene/demo/ lucene/contrib/demo/src/java/org/apach...

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestIndonesianStemFilterFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestIndonesianStemFilterFactory.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestIndonesianStemFilterFactory.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestIndonesianStemFilterFactory.java Sat Oct  1 03:04:53 2011
@@ -22,9 +22,9 @@ import java.io.StringReader;
 import java.util.HashMap;
 import java.util.Map;
 
+import org.apache.lucene.analysis.MockTokenizer;
 import org.apache.lucene.analysis.TokenStream;
 import org.apache.lucene.analysis.Tokenizer;
-import org.apache.lucene.analysis.core.WhitespaceTokenizer;
 
 /**
  * Simple tests to ensure the Indonesian stem filter factory is working.
@@ -35,7 +35,7 @@ public class TestIndonesianStemFilterFac
    */
   public void testStemming() throws Exception {
     Reader reader = new StringReader("dibukukannya");
-    Tokenizer tokenizer = new WhitespaceTokenizer(DEFAULT_VERSION, reader);
+    Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
     IndonesianStemFilterFactory factory = new IndonesianStemFilterFactory();
     Map<String,String> args = new HashMap<String,String>();
     factory.init(args);
@@ -48,7 +48,7 @@ public class TestIndonesianStemFilterFac
    */
   public void testStemmingInflectional() throws Exception {
     Reader reader = new StringReader("dibukukannya");
-    Tokenizer tokenizer = new WhitespaceTokenizer(DEFAULT_VERSION, reader);
+    Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
     IndonesianStemFilterFactory factory = new IndonesianStemFilterFactory();
     Map<String,String> args = new HashMap<String,String>();
     args.put("stemDerivational", "false");

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestItalianLightStemFilterFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestItalianLightStemFilterFactory.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestItalianLightStemFilterFactory.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestItalianLightStemFilterFactory.java Sat Oct  1 03:04:53 2011
@@ -20,8 +20,8 @@ package org.apache.solr.analysis;
 import java.io.Reader;
 import java.io.StringReader;
 
+import org.apache.lucene.analysis.MockTokenizer;
 import org.apache.lucene.analysis.TokenStream;
-import org.apache.lucene.analysis.core.WhitespaceTokenizer;
 
 /**
  * Simple tests to ensure the Italian light stem factory is working.
@@ -30,7 +30,7 @@ public class TestItalianLightStemFilterF
   public void testStemming() throws Exception {
     Reader reader = new StringReader("ragazzo ragazzi");
     ItalianLightStemFilterFactory factory = new ItalianLightStemFilterFactory();
-    TokenStream stream = factory.create(new WhitespaceTokenizer(DEFAULT_VERSION, reader));
+    TokenStream stream = factory.create(new MockTokenizer(reader, MockTokenizer.WHITESPACE, false));
     assertTokenStreamContents(stream, new String[] { "ragazz", "ragazz" });
   }
 }

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestKStemFilterFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestKStemFilterFactory.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestKStemFilterFactory.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestKStemFilterFactory.java Sat Oct  1 03:04:53 2011
@@ -3,8 +3,8 @@ package org.apache.solr.analysis;
 import java.io.Reader;
 import java.io.StringReader;
 
+import org.apache.lucene.analysis.MockTokenizer;
 import org.apache.lucene.analysis.TokenStream;
-import org.apache.lucene.analysis.core.WhitespaceTokenizer;
 
 /**
  * Licensed to the Apache Software Foundation (ASF) under one or more
@@ -30,7 +30,7 @@ public class TestKStemFilterFactory exte
   public void testStemming() throws Exception {
     Reader reader = new StringReader("bricks");
     KStemFilterFactory factory = new KStemFilterFactory();
-    TokenStream stream = factory.create(new WhitespaceTokenizer(DEFAULT_VERSION, reader));
+    TokenStream stream = factory.create(new MockTokenizer(reader, MockTokenizer.WHITESPACE, false));
     assertTokenStreamContents(stream, new String[] { "brick" });
   }
 }

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestKeywordMarkerFilterFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestKeywordMarkerFilterFactory.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestKeywordMarkerFilterFactory.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestKeywordMarkerFilterFactory.java Sat Oct  1 03:04:53 2011
@@ -23,8 +23,8 @@ import java.io.StringReader;
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.lucene.analysis.core.WhitespaceTokenizer;
 import org.apache.lucene.analysis.en.PorterStemFilter;
+import org.apache.lucene.analysis.MockTokenizer;
 import org.apache.lucene.analysis.TokenStream;
 import org.apache.lucene.analysis.Tokenizer;
 import org.apache.solr.common.ResourceLoader;
@@ -36,7 +36,7 @@ import org.apache.solr.core.SolrResource
 public class TestKeywordMarkerFilterFactory extends BaseTokenTestCase {
   public void testKeywords() throws IOException {
     Reader reader = new StringReader("dogs cats");
-    Tokenizer tokenizer = new WhitespaceTokenizer(DEFAULT_VERSION, reader);
+    Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
     KeywordMarkerFilterFactory factory = new KeywordMarkerFilterFactory();
     Map<String,String> args = new HashMap<String,String>(DEFAULT_VERSION_PARAM);
     ResourceLoader loader = new SolrResourceLoader(null, null);
@@ -50,7 +50,7 @@ public class TestKeywordMarkerFilterFact
   
   public void testKeywordsCaseInsensitive() throws IOException {
     Reader reader = new StringReader("dogs cats Cats");
-    Tokenizer tokenizer = new WhitespaceTokenizer(DEFAULT_VERSION, reader);
+    Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
     KeywordMarkerFilterFactory factory = new KeywordMarkerFilterFactory();
     Map<String,String> args = new HashMap<String,String>(DEFAULT_VERSION_PARAM);
     ResourceLoader loader = new SolrResourceLoader(null, null);

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestLatvianStemFilterFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestLatvianStemFilterFactory.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestLatvianStemFilterFactory.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestLatvianStemFilterFactory.java Sat Oct  1 03:04:53 2011
@@ -20,8 +20,8 @@ package org.apache.solr.analysis;
 import java.io.Reader;
 import java.io.StringReader;
 
+import org.apache.lucene.analysis.MockTokenizer;
 import org.apache.lucene.analysis.TokenStream;
-import org.apache.lucene.analysis.core.WhitespaceTokenizer;
 
 /**
  * Simple tests to ensure the Latvian stem factory is working.
@@ -30,7 +30,7 @@ public class TestLatvianStemFilterFactor
   public void testStemming() throws Exception {
     Reader reader = new StringReader("tirgiem tirgus");
     LatvianStemFilterFactory factory = new LatvianStemFilterFactory();
-    TokenStream stream = factory.create(new WhitespaceTokenizer(DEFAULT_VERSION, reader));
+    TokenStream stream = factory.create(new MockTokenizer(reader, MockTokenizer.WHITESPACE, false));
     assertTokenStreamContents(stream, new String[] { "tirg", "tirg" });
   }
 }

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestMultiWordSynonyms.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestMultiWordSynonyms.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestMultiWordSynonyms.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestMultiWordSynonyms.java Sat Oct  1 03:04:53 2011
@@ -19,7 +19,6 @@ package org.apache.solr.analysis;
 
 import org.apache.lucene.analysis.MockTokenizer;
 import org.apache.lucene.analysis.TokenStream;
-import org.apache.lucene.analysis.core.WhitespaceTokenizer;
 import org.apache.solr.common.ResourceLoader;
 
 import java.io.ByteArrayInputStream;
@@ -46,7 +45,7 @@ public class TestMultiWordSynonyms exten
     SlowSynonymMap synMap = new SlowSynonymMap(true);
     SlowSynonymFilterFactory.parseRules(rules, synMap, "=>", ",", true, null);
 
-    SlowSynonymFilter ts = new SlowSynonymFilter(new WhitespaceTokenizer(DEFAULT_VERSION, new StringReader("a e")), synMap);
+    SlowSynonymFilter ts = new SlowSynonymFilter(new MockTokenizer(new StringReader("a e"), MockTokenizer.WHITESPACE, false), synMap);
     // This fails because ["e","e"] is the value of the token stream
     assertTokenStreamContents(ts, new String[] { "a", "e" });
   }

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestNGramFilters.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestNGramFilters.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestNGramFilters.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestNGramFilters.java Sat Oct  1 03:04:53 2011
@@ -22,9 +22,9 @@ import java.io.StringReader;
 import java.util.HashMap;
 import java.util.Map;
 
+import org.apache.lucene.analysis.MockTokenizer;
 import org.apache.lucene.analysis.TokenStream;
 import org.apache.lucene.analysis.Tokenizer;
-import org.apache.lucene.analysis.core.WhitespaceTokenizer;
 
 /**
  * Simple tests to ensure the NGram filter factories are working.
@@ -64,7 +64,7 @@ public class TestNGramFilters extends Ba
     Map<String,String> args = new HashMap<String,String>();
     NGramFilterFactory factory = new NGramFilterFactory();
     factory.init(args);
-    TokenStream stream = factory.create(new WhitespaceTokenizer(DEFAULT_VERSION, reader));
+    TokenStream stream = factory.create(new MockTokenizer(reader, MockTokenizer.WHITESPACE, false));
     assertTokenStreamContents(stream, 
         new String[] { "t", "e", "s", "t", "te", "es", "st" });
   }
@@ -78,7 +78,7 @@ public class TestNGramFilters extends Ba
     args.put("maxGramSize", "3");
     NGramFilterFactory factory = new NGramFilterFactory();
     factory.init(args);
-    TokenStream stream = factory.create(new WhitespaceTokenizer(DEFAULT_VERSION, reader));
+    TokenStream stream = factory.create(new MockTokenizer(reader, MockTokenizer.WHITESPACE, false));
     assertTokenStreamContents(stream, 
         new String[] { "te", "es", "st", "tes", "est" });
   }
@@ -129,7 +129,7 @@ public class TestNGramFilters extends Ba
     Map<String,String> args = new HashMap<String,String>();
     EdgeNGramFilterFactory factory = new EdgeNGramFilterFactory();
     factory.init(args);
-    TokenStream stream = factory.create(new WhitespaceTokenizer(DEFAULT_VERSION, reader));
+    TokenStream stream = factory.create(new MockTokenizer(reader, MockTokenizer.WHITESPACE, false));
     assertTokenStreamContents(stream, 
         new String[] { "t" });
   }
@@ -143,7 +143,7 @@ public class TestNGramFilters extends Ba
     args.put("maxGramSize", "2");
     EdgeNGramFilterFactory factory = new EdgeNGramFilterFactory();
     factory.init(args);
-    TokenStream stream = factory.create(new WhitespaceTokenizer(DEFAULT_VERSION, reader));
+    TokenStream stream = factory.create(new MockTokenizer(reader, MockTokenizer.WHITESPACE, false));
     assertTokenStreamContents(stream, 
         new String[] { "t", "te" });
   }
@@ -156,7 +156,7 @@ public class TestNGramFilters extends Ba
     args.put("side", "back");
     EdgeNGramFilterFactory factory = new EdgeNGramFilterFactory();
     factory.init(args);
-    TokenStream stream = factory.create(new WhitespaceTokenizer(DEFAULT_VERSION, reader));
+    TokenStream stream = factory.create(new MockTokenizer(reader, MockTokenizer.WHITESPACE, false));
     assertTokenStreamContents(stream, 
         new String[] { "y" });
   }

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestPatternReplaceCharFilterFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestPatternReplaceCharFilterFactory.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestPatternReplaceCharFilterFactory.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestPatternReplaceCharFilterFactory.java Sat Oct  1 03:04:53 2011
@@ -24,8 +24,8 @@ import java.util.Map;
 
 import org.apache.lucene.analysis.CharReader;
 import org.apache.lucene.analysis.CharStream;
+import org.apache.lucene.analysis.MockTokenizer;
 import org.apache.lucene.analysis.TokenStream;
-import org.apache.lucene.analysis.core.WhitespaceTokenizer;
 
 /**
  * Simple tests to ensure this factory is working
@@ -44,7 +44,7 @@ public class TestPatternReplaceCharFilte
     factory.init(args);
     CharStream cs = factory.create(
           CharReader.get( new StringReader( BLOCK ) ) );
-    TokenStream ts = new WhitespaceTokenizer(DEFAULT_VERSION, cs );
+    TokenStream ts = new MockTokenizer(cs, MockTokenizer.WHITESPACE, false);
     assertTokenStreamContents(ts,
         new String[] { "this", "is", "test." },
         new int[] { 0, 5, 8 },
@@ -61,8 +61,11 @@ public class TestPatternReplaceCharFilte
     factory.init(args);
     CharStream cs = factory.create(
           CharReader.get( new StringReader( BLOCK ) ) );
-    TokenStream ts = new WhitespaceTokenizer(DEFAULT_VERSION, cs );
+    TokenStream ts = new MockTokenizer(cs, MockTokenizer.WHITESPACE, false);
+    ts.reset();
     assertFalse(ts.incrementToken());
+    ts.end();
+    ts.close();
   }
   
   // 012345678
@@ -77,7 +80,7 @@ public class TestPatternReplaceCharFilte
     factory.init(args);
     CharStream cs = factory.create(
           CharReader.get( new StringReader( BLOCK ) ) );
-    TokenStream ts = new WhitespaceTokenizer(DEFAULT_VERSION, cs );
+    TokenStream ts = new MockTokenizer(cs, MockTokenizer.WHITESPACE, false);
     assertTokenStreamContents(ts,
         new String[] { "aa#bb#cc" },
         new int[] { 0 },

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestPatternReplaceFilterFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestPatternReplaceFilterFactory.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestPatternReplaceFilterFactory.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestPatternReplaceFilterFactory.java Sat Oct  1 03:04:53 2011
@@ -17,8 +17,8 @@
 
 package org.apache.solr.analysis;
 
+import org.apache.lucene.analysis.MockTokenizer;
 import org.apache.lucene.analysis.TokenStream;
-import org.apache.lucene.analysis.core.WhitespaceTokenizer;
 
 import java.io.StringReader;
 import java.util.HashMap;
@@ -37,7 +37,7 @@ public class TestPatternReplaceFilterFac
     args.put("replacement", "-");
     factory.init(args);
     TokenStream ts = factory.create
-            (new WhitespaceTokenizer(DEFAULT_VERSION, new StringReader(input)));
+            (new MockTokenizer(new StringReader(input), MockTokenizer.WHITESPACE, false));
                    
     assertTokenStreamContents(ts, 
         new String[] { "-foo-foo-foo-", "-", "c-" });

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestPersianNormalizationFilterFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestPersianNormalizationFilterFactory.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestPersianNormalizationFilterFactory.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestPersianNormalizationFilterFactory.java Sat Oct  1 03:04:53 2011
@@ -20,9 +20,9 @@ package org.apache.solr.analysis;
 import java.io.Reader;
 import java.io.StringReader;
 
+import org.apache.lucene.analysis.MockTokenizer;
 import org.apache.lucene.analysis.TokenStream;
 import org.apache.lucene.analysis.Tokenizer;
-import org.apache.lucene.analysis.core.WhitespaceTokenizer;
 
 /**
  * Simple tests to ensure the Persian normalization factory is working.
@@ -33,7 +33,7 @@ public class TestPersianNormalizationFil
    */
   public void testNormalization() throws Exception {
     Reader reader = new StringReader("های");
-    Tokenizer tokenizer = new WhitespaceTokenizer(DEFAULT_VERSION, reader);
+    Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
     PersianNormalizationFilterFactory factory = new PersianNormalizationFilterFactory();
     TokenStream stream = factory.create(tokenizer);
     assertTokenStreamContents(stream, new String[] { "هاي" });

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestPhoneticFilterFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestPhoneticFilterFactory.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestPhoneticFilterFactory.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestPhoneticFilterFactory.java Sat Oct  1 03:04:53 2011
@@ -22,9 +22,9 @@ import java.util.HashMap;
 import java.util.Map;
 
 import org.apache.commons.codec.language.Metaphone;
+import org.apache.lucene.analysis.MockTokenizer;
 import org.apache.lucene.analysis.TokenStream;
 import org.apache.lucene.analysis.Tokenizer;
-import org.apache.lucene.analysis.core.WhitespaceTokenizer;
 
 
 /**
@@ -89,8 +89,7 @@ public class TestPhoneticFilterFactory e
   
   static void assertAlgorithm(String algName, String inject, String input,
       String[] expected) throws Exception {
-    Tokenizer tokenizer = new WhitespaceTokenizer(DEFAULT_VERSION,
-        new StringReader(input));
+    Tokenizer tokenizer = new MockTokenizer(new StringReader(input), MockTokenizer.WHITESPACE, false);
     Map<String,String> args = new HashMap<String,String>();
     args.put("encoder", algName);
     args.put("inject", inject);

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestPorterStemFilterFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestPorterStemFilterFactory.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestPorterStemFilterFactory.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestPorterStemFilterFactory.java Sat Oct  1 03:04:53 2011
@@ -20,9 +20,9 @@ package org.apache.solr.analysis;
 import java.io.Reader;
 import java.io.StringReader;
 
+import org.apache.lucene.analysis.MockTokenizer;
 import org.apache.lucene.analysis.TokenStream;
 import org.apache.lucene.analysis.Tokenizer;
-import org.apache.lucene.analysis.core.WhitespaceTokenizer;
 
 /**
  * Simple tests to ensure the Porter stem filter factory is working.
@@ -33,7 +33,7 @@ public class TestPorterStemFilterFactory
    */
   public void testStemming() throws Exception {
     Reader reader = new StringReader("dogs");
-    Tokenizer tokenizer = new WhitespaceTokenizer(DEFAULT_VERSION, reader);
+    Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
     PorterStemFilterFactory factory = new PorterStemFilterFactory();
     TokenStream stream = factory.create(tokenizer);
     assertTokenStreamContents(stream, new String[] { "dog" });

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestPortugueseLightStemFilterFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestPortugueseLightStemFilterFactory.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestPortugueseLightStemFilterFactory.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestPortugueseLightStemFilterFactory.java Sat Oct  1 03:04:53 2011
@@ -20,8 +20,8 @@ package org.apache.solr.analysis;
 import java.io.Reader;
 import java.io.StringReader;
 
+import org.apache.lucene.analysis.MockTokenizer;
 import org.apache.lucene.analysis.TokenStream;
-import org.apache.lucene.analysis.core.WhitespaceTokenizer;
 
 /**
  * Simple tests to ensure the Portuguese Light stem factory is working.
@@ -30,7 +30,7 @@ public class TestPortugueseLightStemFilt
   public void testStemming() throws Exception {
     Reader reader = new StringReader("evidentemente");
     PortugueseLightStemFilterFactory factory = new PortugueseLightStemFilterFactory();
-    TokenStream stream = factory.create(new WhitespaceTokenizer(DEFAULT_VERSION, reader));
+    TokenStream stream = factory.create(new MockTokenizer(reader, MockTokenizer.WHITESPACE, false));
     assertTokenStreamContents(stream, new String[] { "evident" });
   }
 }

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestPortugueseMinimalStemFilterFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestPortugueseMinimalStemFilterFactory.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestPortugueseMinimalStemFilterFactory.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestPortugueseMinimalStemFilterFactory.java Sat Oct  1 03:04:53 2011
@@ -20,8 +20,8 @@ package org.apache.solr.analysis;
 import java.io.Reader;
 import java.io.StringReader;
 
+import org.apache.lucene.analysis.MockTokenizer;
 import org.apache.lucene.analysis.TokenStream;
-import org.apache.lucene.analysis.core.WhitespaceTokenizer;
 
 /**
  * Simple tests to ensure the Portuguese Minimal stem factory is working.
@@ -30,7 +30,7 @@ public class TestPortugueseMinimalStemFi
   public void testStemming() throws Exception {
     Reader reader = new StringReader("questões");
     PortugueseMinimalStemFilterFactory factory = new PortugueseMinimalStemFilterFactory();
-    TokenStream stream = factory.create(new WhitespaceTokenizer(DEFAULT_VERSION, reader));
+    TokenStream stream = factory.create(new MockTokenizer(reader, MockTokenizer.WHITESPACE, false));
     assertTokenStreamContents(stream, new String[] { "questão" });
   }
 }

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestPortugueseStemFilterFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestPortugueseStemFilterFactory.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestPortugueseStemFilterFactory.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestPortugueseStemFilterFactory.java Sat Oct  1 03:04:53 2011
@@ -20,8 +20,8 @@ package org.apache.solr.analysis;
 import java.io.Reader;
 import java.io.StringReader;
 
+import org.apache.lucene.analysis.MockTokenizer;
 import org.apache.lucene.analysis.TokenStream;
-import org.apache.lucene.analysis.core.WhitespaceTokenizer;
 
 /**
  * Simple tests to ensure the Portuguese stem factory is working.
@@ -30,7 +30,7 @@ public class TestPortugueseStemFilterFac
   public void testStemming() throws Exception {
     Reader reader = new StringReader("maluquice");
     PortugueseStemFilterFactory factory = new PortugueseStemFilterFactory();
-    TokenStream stream = factory.create(new WhitespaceTokenizer(DEFAULT_VERSION, reader));
+    TokenStream stream = factory.create(new MockTokenizer(reader, MockTokenizer.WHITESPACE, false));
     assertTokenStreamContents(stream, new String[] { "maluc" });
   }
 }

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestReverseStringFilterFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestReverseStringFilterFactory.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestReverseStringFilterFactory.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestReverseStringFilterFactory.java Sat Oct  1 03:04:53 2011
@@ -20,9 +20,9 @@ package org.apache.solr.analysis;
 import java.io.Reader;
 import java.io.StringReader;
 
+import org.apache.lucene.analysis.MockTokenizer;
 import org.apache.lucene.analysis.TokenStream;
 import org.apache.lucene.analysis.Tokenizer;
-import org.apache.lucene.analysis.core.WhitespaceTokenizer;
 
 /**
  * Simple tests to ensure the Reverse string filter factory is working.
@@ -33,7 +33,7 @@ public class TestReverseStringFilterFact
    */
   public void testReversing() throws Exception {
     Reader reader = new StringReader("simple test");
-    Tokenizer tokenizer = new WhitespaceTokenizer(DEFAULT_VERSION, reader);
+    Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
     ReverseStringFilterFactory factory = new ReverseStringFilterFactory();
     factory.init(DEFAULT_VERSION_PARAM);
     TokenStream stream = factory.create(tokenizer);

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestReversedWildcardFilterFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestReversedWildcardFilterFactory.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestReversedWildcardFilterFactory.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestReversedWildcardFilterFactory.java Sat Oct  1 03:04:53 2011
@@ -25,8 +25,8 @@ import java.util.HashMap;
 import java.util.Map;
 
 import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.MockTokenizer;
 import org.apache.lucene.analysis.TokenStream;
-import org.apache.lucene.analysis.core.WhitespaceTokenizer;
 import org.apache.lucene.search.AutomatonQuery;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.util.automaton.Automaton;
@@ -66,7 +66,7 @@ public class TestReversedWildcardFilterF
     String text = "simple text";
     args.put("withOriginal", "true");
     factory.init(args);
-    TokenStream input = factory.create(new WhitespaceTokenizer(DEFAULT_VERSION, new StringReader(text)));
+    TokenStream input = factory.create(new MockTokenizer(new StringReader(text), MockTokenizer.WHITESPACE, false));
     assertTokenStreamContents(input, 
         new String[] { "\u0001elpmis", "simple", "\u0001txet", "text" },
         new int[] { 1, 0, 1, 0 });
@@ -74,7 +74,7 @@ public class TestReversedWildcardFilterF
     // now without original tokens
     args.put("withOriginal", "false");
     factory.init(args);
-    input = factory.create(new WhitespaceTokenizer(DEFAULT_VERSION, new StringReader(text)));
+    input = factory.create(new MockTokenizer(new StringReader(text), MockTokenizer.WHITESPACE, false));
     assertTokenStreamContents(input,
         new String[] { "\u0001elpmis", "\u0001txet" },
         new int[] { 1, 1 });

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestRussianLightStemFilterFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestRussianLightStemFilterFactory.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestRussianLightStemFilterFactory.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestRussianLightStemFilterFactory.java Sat Oct  1 03:04:53 2011
@@ -20,8 +20,8 @@ package org.apache.solr.analysis;
 import java.io.Reader;
 import java.io.StringReader;
 
+import org.apache.lucene.analysis.MockTokenizer;
 import org.apache.lucene.analysis.TokenStream;
-import org.apache.lucene.analysis.core.WhitespaceTokenizer;
 
 /**
  * Simple tests to ensure the Russian light stem factory is working.
@@ -30,7 +30,7 @@ public class TestRussianLightStemFilterF
   public void testStemming() throws Exception {
     Reader reader = new StringReader("журналы");
     RussianLightStemFilterFactory factory = new RussianLightStemFilterFactory();
-    TokenStream stream = factory.create(new WhitespaceTokenizer(DEFAULT_VERSION, reader));
+    TokenStream stream = factory.create(new MockTokenizer(reader, MockTokenizer.WHITESPACE, false));
     assertTokenStreamContents(stream, new String[] { "журнал" });
   }
 }

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestShingleFilterFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestShingleFilterFactory.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestShingleFilterFactory.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestShingleFilterFactory.java Sat Oct  1 03:04:53 2011
@@ -22,8 +22,8 @@ import java.io.StringReader;
 import java.util.HashMap;
 import java.util.Map;
 
+import org.apache.lucene.analysis.MockTokenizer;
 import org.apache.lucene.analysis.TokenStream;
-import org.apache.lucene.analysis.core.WhitespaceTokenizer;
 
 /**
  * Simple tests to ensure the Shingle filter factory works.
@@ -37,7 +37,7 @@ public class TestShingleFilterFactory ex
     Map<String,String> args = new HashMap<String,String>();
     ShingleFilterFactory factory = new ShingleFilterFactory();
     factory.init(args);
-    TokenStream stream = factory.create(new WhitespaceTokenizer(DEFAULT_VERSION, reader));
+    TokenStream stream = factory.create(new MockTokenizer(reader, MockTokenizer.WHITESPACE, false));
     assertTokenStreamContents(stream, new String[] {"this", "this is", "is",
         "is a", "a", "a test", "test"});
   }
@@ -51,7 +51,7 @@ public class TestShingleFilterFactory ex
     args.put("outputUnigrams", "false");
     ShingleFilterFactory factory = new ShingleFilterFactory();
     factory.init(args);
-    TokenStream stream = factory.create(new WhitespaceTokenizer(DEFAULT_VERSION, reader));
+    TokenStream stream = factory.create(new MockTokenizer(reader, MockTokenizer.WHITESPACE, false));
     assertTokenStreamContents(stream,
         new String[] {"this is", "is a", "a test"});
   }
@@ -65,7 +65,7 @@ public class TestShingleFilterFactory ex
     args.put("maxShingleSize", "3");
     ShingleFilterFactory factory = new ShingleFilterFactory();
     factory.init(args);
-    TokenStream stream = factory.create(new WhitespaceTokenizer(DEFAULT_VERSION, reader));
+    TokenStream stream = factory.create(new MockTokenizer(reader, MockTokenizer.WHITESPACE, false));
     assertTokenStreamContents(stream, 
         new String[] {"this", "this is", "this is a", "is",
         "is a", "is a test", "a", "a test", "test"});
@@ -81,7 +81,7 @@ public class TestShingleFilterFactory ex
     args.put("maxShingleSize", "4");
     ShingleFilterFactory factory = new ShingleFilterFactory();
     factory.init(args);
-    TokenStream stream = factory.create(new WhitespaceTokenizer(DEFAULT_VERSION, reader));
+    TokenStream stream = factory.create(new MockTokenizer(reader, MockTokenizer.WHITESPACE, false));
     assertTokenStreamContents(stream, 
         new String[] { "this", "this is a", "this is a test",
         "is", "is a test", "a", "test" });
@@ -98,7 +98,7 @@ public class TestShingleFilterFactory ex
     args.put("outputUnigrams", "false");
     ShingleFilterFactory factory = new ShingleFilterFactory();
     factory.init(args);
-    TokenStream stream = factory.create(new WhitespaceTokenizer(DEFAULT_VERSION, reader));
+    TokenStream stream = factory.create(new MockTokenizer(reader, MockTokenizer.WHITESPACE, false));
     assertTokenStreamContents(stream, 
         new String[] { "this is a", "this is a test", "is a test" });
   }
@@ -113,7 +113,7 @@ public class TestShingleFilterFactory ex
     args.put("maxShingleSize", "3");
     ShingleFilterFactory factory = new ShingleFilterFactory();
     factory.init(args);
-    TokenStream stream = factory.create(new WhitespaceTokenizer(DEFAULT_VERSION, reader));
+    TokenStream stream = factory.create(new MockTokenizer(reader, MockTokenizer.WHITESPACE, false));
     assertTokenStreamContents(stream, 
          new String[] { "this", "this is a", "is", "is a test", "a", "test" });
   }
@@ -129,7 +129,7 @@ public class TestShingleFilterFactory ex
     args.put("outputUnigrams", "false");
     ShingleFilterFactory factory = new ShingleFilterFactory();
     factory.init(args);
-    TokenStream stream = factory.create(new WhitespaceTokenizer(DEFAULT_VERSION, reader));
+    TokenStream stream = factory.create(new MockTokenizer(reader, MockTokenizer.WHITESPACE, false));
     assertTokenStreamContents(stream,
         new String[] { "this is a", "is a test" });
   }
@@ -143,7 +143,7 @@ public class TestShingleFilterFactory ex
     args.put("tokenSeparator", "=BLAH=");
     ShingleFilterFactory factory = new ShingleFilterFactory();
     factory.init(args);
-    TokenStream stream = factory.create(new WhitespaceTokenizer(DEFAULT_VERSION, reader));
+    TokenStream stream = factory.create(new MockTokenizer(reader, MockTokenizer.WHITESPACE, false));
     assertTokenStreamContents(stream, 
         new String[] { "this", "this=BLAH=is", "is", "is=BLAH=a", 
         "a", "a=BLAH=test", "test" });
@@ -159,7 +159,7 @@ public class TestShingleFilterFactory ex
     args.put("outputUnigrams", "false");
     ShingleFilterFactory factory = new ShingleFilterFactory();
     factory.init(args);
-    TokenStream stream = factory.create(new WhitespaceTokenizer(DEFAULT_VERSION, reader));
+    TokenStream stream = factory.create(new MockTokenizer(reader, MockTokenizer.WHITESPACE, false));
     assertTokenStreamContents(stream, 
         new String[] { "this=BLAH=is", "is=BLAH=a", "a=BLAH=test" });
   }
@@ -173,7 +173,7 @@ public class TestShingleFilterFactory ex
     args.put("tokenSeparator", "");
     ShingleFilterFactory factory = new ShingleFilterFactory();
     factory.init(args);
-    TokenStream stream = factory.create(new WhitespaceTokenizer(DEFAULT_VERSION, reader));
+    TokenStream stream = factory.create(new MockTokenizer(reader, MockTokenizer.WHITESPACE, false));
     assertTokenStreamContents(stream, 
         new String[] { "this", "thisis", "is", "isa", "a", "atest", "test" });
   }
@@ -190,7 +190,7 @@ public class TestShingleFilterFactory ex
     args.put("tokenSeparator", "=BLAH=");
     ShingleFilterFactory factory = new ShingleFilterFactory();
     factory.init(args);
-    TokenStream stream = factory.create(new WhitespaceTokenizer(DEFAULT_VERSION, reader));
+    TokenStream stream = factory.create(new MockTokenizer(reader, MockTokenizer.WHITESPACE, false));
     assertTokenStreamContents(stream, 
         new String[] { "this", "this=BLAH=is=BLAH=a", 
         "this=BLAH=is=BLAH=a=BLAH=test", "is", 
@@ -211,7 +211,7 @@ public class TestShingleFilterFactory ex
     args.put("outputUnigrams", "false");
     ShingleFilterFactory factory = new ShingleFilterFactory();
     factory.init(args);
-    TokenStream stream = factory.create(new WhitespaceTokenizer(DEFAULT_VERSION, reader));
+    TokenStream stream = factory.create(new MockTokenizer(reader, MockTokenizer.WHITESPACE, false));
     assertTokenStreamContents(stream, 
         new String[] { "this=BLAH=is=BLAH=a", "this=BLAH=is=BLAH=a=BLAH=test", 
         "is=BLAH=a=BLAH=test", });
@@ -232,7 +232,7 @@ public class TestShingleFilterFactory ex
     args.put("outputUnigramsIfNoShingles", "true");
     ShingleFilterFactory factory = new ShingleFilterFactory();
     factory.init(args);
-    TokenStream stream = factory.create(new WhitespaceTokenizer(DEFAULT_VERSION, reader));
+    TokenStream stream = factory.create(new MockTokenizer(reader, MockTokenizer.WHITESPACE, false));
     assertTokenStreamContents(stream, new String[] { "test" });
   }
 }

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestSpanishLightStemFilterFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestSpanishLightStemFilterFactory.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestSpanishLightStemFilterFactory.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestSpanishLightStemFilterFactory.java Sat Oct  1 03:04:53 2011
@@ -20,8 +20,8 @@ package org.apache.solr.analysis;
 import java.io.Reader;
 import java.io.StringReader;
 
+import org.apache.lucene.analysis.MockTokenizer;
 import org.apache.lucene.analysis.TokenStream;
-import org.apache.lucene.analysis.core.WhitespaceTokenizer;
 
 /**
  * Simple tests to ensure the Spanish Light stem factory is working.
@@ -30,7 +30,7 @@ public class TestSpanishLightStemFilterF
   public void testStemming() throws Exception {
     Reader reader = new StringReader("sociedades");
     SpanishLightStemFilterFactory factory = new SpanishLightStemFilterFactory();
-    TokenStream stream = factory.create(new WhitespaceTokenizer(DEFAULT_VERSION, reader));
+    TokenStream stream = factory.create(new MockTokenizer(reader, MockTokenizer.WHITESPACE, false));
     assertTokenStreamContents(stream, new String[] { "sociedad" });
   }
 }

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestStandardFactories.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestStandardFactories.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestStandardFactories.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestStandardFactories.java Sat Oct  1 03:04:53 2011
@@ -22,9 +22,9 @@ import java.io.StringReader;
 import java.util.HashMap;
 import java.util.Map;
 
+import org.apache.lucene.analysis.MockTokenizer;
 import org.apache.lucene.analysis.TokenStream;
 import org.apache.lucene.analysis.Tokenizer;
-import org.apache.lucene.analysis.core.WhitespaceTokenizer;
 
 /**
  * Simple tests to ensure the standard lucene factories are working.
@@ -158,7 +158,7 @@ public class TestStandardFactories exten
    */
   public void testASCIIFolding() throws Exception {
     Reader reader = new StringReader("Česká");
-    Tokenizer tokenizer = new WhitespaceTokenizer(DEFAULT_VERSION, reader);
+    Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
     ASCIIFoldingFilterFactory factory = new ASCIIFoldingFilterFactory();
     factory.init(DEFAULT_VERSION_PARAM);
     TokenStream stream = factory.create(tokenizer);

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestStemmerOverrideFilterFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestStemmerOverrideFilterFactory.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestStemmerOverrideFilterFactory.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestStemmerOverrideFilterFactory.java Sat Oct  1 03:04:53 2011
@@ -23,8 +23,8 @@ import java.io.StringReader;
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.lucene.analysis.core.WhitespaceTokenizer;
 import org.apache.lucene.analysis.en.PorterStemFilter;
+import org.apache.lucene.analysis.MockTokenizer;
 import org.apache.lucene.analysis.TokenStream;
 import org.apache.lucene.analysis.Tokenizer;
 import org.apache.solr.common.ResourceLoader;
@@ -37,7 +37,7 @@ public class TestStemmerOverrideFilterFa
   public void testKeywords() throws IOException {
     // our stemdict stems dogs to 'cat'
     Reader reader = new StringReader("testing dogs");
-    Tokenizer tokenizer = new WhitespaceTokenizer(DEFAULT_VERSION, reader);
+    Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
     StemmerOverrideFilterFactory factory = new StemmerOverrideFilterFactory();
     Map<String,String> args = new HashMap<String,String>(DEFAULT_VERSION_PARAM);
     ResourceLoader loader = new SolrResourceLoader(null, null);
@@ -51,7 +51,7 @@ public class TestStemmerOverrideFilterFa
   
   public void testKeywordsCaseInsensitive() throws IOException {
     Reader reader = new StringReader("testing DoGs");
-    Tokenizer tokenizer = new WhitespaceTokenizer(DEFAULT_VERSION, reader);
+    Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
     StemmerOverrideFilterFactory factory = new StemmerOverrideFilterFactory();
     Map<String,String> args = new HashMap<String,String>(DEFAULT_VERSION_PARAM);
     ResourceLoader loader = new SolrResourceLoader(null, null);

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestSwedishLightStemFilterFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestSwedishLightStemFilterFactory.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestSwedishLightStemFilterFactory.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestSwedishLightStemFilterFactory.java Sat Oct  1 03:04:53 2011
@@ -20,8 +20,8 @@ package org.apache.solr.analysis;
 import java.io.Reader;
 import java.io.StringReader;
 
+import org.apache.lucene.analysis.MockTokenizer;
 import org.apache.lucene.analysis.TokenStream;
-import org.apache.lucene.analysis.core.WhitespaceTokenizer;
 
 /**
  * Simple tests to ensure the Swedish Light stem factory is working.
@@ -30,7 +30,7 @@ public class TestSwedishLightStemFilterF
   public void testStemming() throws Exception {
     Reader reader = new StringReader("äpplen äpple");
     SwedishLightStemFilterFactory factory = new SwedishLightStemFilterFactory();
-    TokenStream stream = factory.create(new WhitespaceTokenizer(DEFAULT_VERSION, reader));
+    TokenStream stream = factory.create(new MockTokenizer(reader, MockTokenizer.WHITESPACE, false));
     assertTokenStreamContents(stream, new String[] { "äppl", "äppl" });
   }
 }

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestThaiWordFilterFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestThaiWordFilterFactory.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestThaiWordFilterFactory.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestThaiWordFilterFactory.java Sat Oct  1 03:04:53 2011
@@ -20,9 +20,9 @@ package org.apache.solr.analysis;
 import java.io.Reader;
 import java.io.StringReader;
 
+import org.apache.lucene.analysis.MockTokenizer;
 import org.apache.lucene.analysis.TokenStream;
 import org.apache.lucene.analysis.Tokenizer;
-import org.apache.lucene.analysis.core.WhitespaceTokenizer;
 import org.apache.lucene.analysis.th.ThaiWordFilter;
 
 /**
@@ -35,7 +35,7 @@ public class TestThaiWordFilterFactory e
   public void testWordBreak() throws Exception {
     assumeTrue("JRE does not support Thai dictionary-based BreakIterator", ThaiWordFilter.DBBI_AVAILABLE);
     Reader reader = new StringReader("การที่ได้ต้องแสดงว่างานดี");
-    Tokenizer tokenizer = new WhitespaceTokenizer(DEFAULT_VERSION, reader);
+    Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
     ThaiWordFilterFactory factory = new ThaiWordFilterFactory();
     factory.init(DEFAULT_VERSION_PARAM);
     TokenStream stream = factory.create(tokenizer);

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestTrimFilterFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestTrimFilterFactory.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestTrimFilterFactory.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestTrimFilterFactory.java Sat Oct  1 03:04:53 2011
@@ -21,8 +21,8 @@ import java.io.StringReader;
 import java.util.HashMap;
 import java.util.Map;
 
+import org.apache.lucene.analysis.MockTokenizer;
 import org.apache.lucene.analysis.TokenStream;
-import org.apache.lucene.analysis.core.KeywordTokenizer;
 
 /**
  * Simple tests to ensure this factory is working
@@ -33,7 +33,7 @@ public class TestTrimFilterFactory exten
     Map<String,String> args = new HashMap<String,String>();
     args.put("updateOffsets", "false");
     factory.init(args);
-    TokenStream ts = factory.create(new KeywordTokenizer(new StringReader("trim me    ")));
+    TokenStream ts = factory.create(new MockTokenizer(new StringReader("trim me    "), MockTokenizer.KEYWORD, false));
     assertTokenStreamContents(ts, new String[] { "trim me" });
   }
 }

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestTurkishLowerCaseFilterFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestTurkishLowerCaseFilterFactory.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestTurkishLowerCaseFilterFactory.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestTurkishLowerCaseFilterFactory.java Sat Oct  1 03:04:53 2011
@@ -20,9 +20,9 @@ package org.apache.solr.analysis;
 import java.io.Reader;
 import java.io.StringReader;
 
+import org.apache.lucene.analysis.MockTokenizer;
 import org.apache.lucene.analysis.TokenStream;
 import org.apache.lucene.analysis.Tokenizer;
-import org.apache.lucene.analysis.core.WhitespaceTokenizer;
 
 /**
  * Simple tests to ensure the Turkish lowercase filter factory is working.
@@ -33,7 +33,7 @@ public class TestTurkishLowerCaseFilterF
    */
   public void testCasing() throws Exception {
     Reader reader = new StringReader("AĞACI");
-    Tokenizer tokenizer = new WhitespaceTokenizer(DEFAULT_VERSION, reader);
+    Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
     TurkishLowerCaseFilterFactory factory = new TurkishLowerCaseFilterFactory();
     TokenStream stream = factory.create(tokenizer);
     assertTokenStreamContents(stream, new String[] { "ağacı" });

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestWordDelimiterFilterFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestWordDelimiterFilterFactory.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestWordDelimiterFilterFactory.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/analysis/TestWordDelimiterFilterFactory.java Sat Oct  1 03:04:53 2011
@@ -21,8 +21,8 @@ import java.io.StringReader;
 import java.util.HashMap;
 import java.util.Map;
 
+import org.apache.lucene.analysis.MockTokenizer;
 import org.apache.lucene.analysis.TokenStream;
-import org.apache.lucene.analysis.core.WhitespaceTokenizer;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.ResourceLoader;
 import org.apache.solr.core.SolrResourceLoader;
@@ -210,12 +210,12 @@ public class TestWordDelimiterFilterFact
     factoryDefault.inform(loader);
     
     TokenStream ts = factoryDefault.create(
-        new WhitespaceTokenizer(BaseTokenTestCase.DEFAULT_VERSION, new StringReader(testText)));
+        new MockTokenizer(new StringReader(testText), MockTokenizer.WHITESPACE, false));
     BaseTokenTestCase.assertTokenStreamContents(ts, 
         new String[] { "I", "borrowed", "5", "400", "00", "540000", "at", "25", "interest", "rate", "interestrate" });
 
     ts = factoryDefault.create(
-        new WhitespaceTokenizer(BaseTokenTestCase.DEFAULT_VERSION, new StringReader("foo\u200Dbar")));
+        new MockTokenizer(new StringReader("foo\u200Dbar"), MockTokenizer.WHITESPACE, false));
     BaseTokenTestCase.assertTokenStreamContents(ts, 
         new String[] { "foo", "bar", "foobar" });
 
@@ -228,13 +228,13 @@ public class TestWordDelimiterFilterFact
     factoryCustom.inform(loader);
     
     ts = factoryCustom.create(
-        new WhitespaceTokenizer(BaseTokenTestCase.DEFAULT_VERSION, new StringReader(testText)));
+        new MockTokenizer(new StringReader(testText), MockTokenizer.WHITESPACE, false));
     BaseTokenTestCase.assertTokenStreamContents(ts, 
         new String[] { "I", "borrowed", "$5,400.00", "at", "25%", "interest", "rate", "interestrate" });
     
     /* test custom behavior with a char > 0x7F, because we had to make a larger byte[] */
     ts = factoryCustom.create(
-        new WhitespaceTokenizer(BaseTokenTestCase.DEFAULT_VERSION, new StringReader("foo\u200Dbar")));
+        new MockTokenizer(new StringReader("foo\u200Dbar"), MockTokenizer.WHITESPACE, false));
     BaseTokenTestCase.assertTokenStreamContents(ts, 
         new String[] { "foo\u200Dbar" });
   }

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/cloud/BasicZkTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/cloud/BasicZkTest.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/cloud/BasicZkTest.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/cloud/BasicZkTest.java Sat Oct  1 03:04:53 2011
@@ -19,6 +19,7 @@ package org.apache.solr.cloud;
 
 import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.index.LogMergePolicy;
+import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.update.DirectUpdateHandler2;
@@ -139,7 +140,9 @@ public class BasicZkTest extends Abstrac
  
     // we set the solrconfig to nothing, so this reload should fail
     try {
+      SolrTestCaseJ4.ignoreException("SAXParseException");
       h.getCoreContainer().reload(h.getCore().getName());
+      SolrTestCaseJ4.resetExceptionIgnores();
       fail("The reloaded SolrCore did not pick up configs from zookeeper");
     } catch(SAXParseException e) {
       

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/cloud/CloudStateUpdateTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/cloud/CloudStateUpdateTest.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/cloud/CloudStateUpdateTest.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/cloud/CloudStateUpdateTest.java Sat Oct  1 03:04:53 2011
@@ -18,6 +18,7 @@ package org.apache.solr.cloud;
  */
 
 import java.io.File;
+import java.io.IOException;
 import java.util.Map;
 import java.util.Set;
 
@@ -26,12 +27,14 @@ import org.apache.solr.common.cloud.Clou
 import org.apache.solr.common.cloud.Slice;
 import org.apache.solr.common.cloud.SolrZkClient;
 import org.apache.solr.common.cloud.ZkNodeProps;
+import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.core.CoreContainer;
 import org.apache.solr.core.CoreContainer.Initializer;
 import org.apache.solr.core.CoreDescriptor;
 import org.apache.solr.core.SolrConfig;
 import org.apache.solr.core.SolrCore;
 import org.apache.zookeeper.CreateMode;
+import org.apache.zookeeper.KeeperException;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.slf4j.Logger;
@@ -46,6 +49,17 @@ public class CloudStateUpdateTest extend
 
   private static final boolean VERBOSE = false;
 
+  private static final String URL1 = "http://localhost:3133/solr/core0";
+  private static final String URL3 = "http://localhost:3133/solr/core1";
+  private static final String URL2 = "http://localhost:3123/solr/core1";
+  private static final String URL4 = "http://localhost:3123/solr/core4";
+  private static final String SHARD4 = "localhost:3123_solr_core4";
+  private static final String SHARD3 = "localhost:3123_solr_core3";
+  private static final String SHARD2 = "localhost:3123_solr_core2";
+  private static final String SHARD1 = "localhost:3123_solr_core1";
+  
+  private static final int TIMEOUT = 10000;
+
   protected ZkTestServer zkServer;
 
   protected String zkDir;
@@ -123,6 +137,70 @@ public class CloudStateUpdateTest extend
     log.info("####SETUP_END " + getName());
     
   }
+  
+  @Test
+  public void testIncrementalUpdate() throws Exception {
+    System.setProperty("CLOUD_UPDATE_DELAY", "1");
+    String zkDir = dataDir.getAbsolutePath() + File.separator
+        + "zookeeper/server1/data";
+    ZkTestServer server = null;
+    SolrZkClient zkClient = null;
+    ZkController zkController = null;
+    
+    server = new ZkTestServer(zkDir);
+    server.run();
+    try {
+      AbstractZkTestCase.tryCleanSolrZkNode(server.getZkHost());
+      AbstractZkTestCase.makeSolrZkNode(server.getZkHost());
+      
+      zkClient = new SolrZkClient(server.getZkAddress(), TIMEOUT);
+      String shardsPath1 = "/collections/collection1/shards/shardid1";
+      String shardsPath2 = "/collections/collection1/shards/shardid2";
+      zkClient.makePath(shardsPath1);
+      zkClient.makePath(shardsPath2);
+      
+      addShardToZk(zkClient, shardsPath1, SHARD1, URL1);
+      addShardToZk(zkClient, shardsPath1, SHARD2, URL2);
+      addShardToZk(zkClient, shardsPath2, SHARD3, URL3);
+      
+      removeShardFromZk(server.getZkAddress(), zkClient, shardsPath1);
+      
+      zkController = new ZkController(server.getZkAddress(), TIMEOUT, 1000,
+          "localhost", "8983", "solr");
+      
+      zkController.getZkStateReader().updateCloudState(true);
+      CloudState cloudInfo = zkController.getCloudState();
+      Map<String,Slice> slices = cloudInfo.getSlices("collection1");
+      assertFalse(slices.containsKey("shardid1"));
+      
+      zkClient.makePath(shardsPath1);
+      addShardToZk(zkClient, shardsPath1, SHARD1, URL1);
+      
+      zkController.getZkStateReader().updateCloudState(true);
+      cloudInfo = zkController.getCloudState();
+      slices = cloudInfo.getSlices("collection1");
+      assertTrue(slices.containsKey("shardid1"));
+      
+      updateUrl(zkClient, shardsPath1, SHARD1, "fake");
+      
+      addShardToZk(zkClient, shardsPath2, SHARD4, URL4);
+      
+      zkController.getZkStateReader().updateCloudState(true);
+      cloudInfo = zkController.getCloudState();
+      String url = cloudInfo.getSlices("collection1").get("shardid1").getShards().get(SHARD1).get("url");
+      
+      // because of incremental update, we don't expect to find the new 'fake'
+      // url - instead we should still
+      // be using the original url - the correct way to update this would be to
+      // remove the whole node and readd it
+      assertEquals(URL1, url);
+      
+    } finally {
+      server.shutdown();
+      zkClient.close();
+      zkController.close();
+    }
+  }
 
   @Test
   public void testCoreRegistration() throws Exception {
@@ -237,6 +315,37 @@ public class CloudStateUpdateTest extend
     SolrConfig.severeErrors.clear();
   }
 
+  private void addShardToZk(SolrZkClient zkClient, String shardsPath,
+      String zkNodeName, String url) throws IOException,
+      KeeperException, InterruptedException {
+
+    ZkNodeProps props = new ZkNodeProps();
+    props.put(ZkStateReader.URL_PROP, url);
+    props.put(ZkStateReader.NODE_NAME, zkNodeName);
+    byte[] bytes = props.store();
+
+    zkClient
+        .create(shardsPath + "/" + zkNodeName, bytes, CreateMode.PERSISTENT);
+  }
+  
+  private void updateUrl(SolrZkClient zkClient, String shardsPath,
+      String zkNodeName, String url) throws IOException,
+      KeeperException, InterruptedException {
+
+    ZkNodeProps props = new ZkNodeProps();
+    props.put(ZkStateReader.URL_PROP, url);
+    props.put(ZkStateReader.NODE_NAME, zkNodeName);
+    byte[] bytes = props.store();
+
+    zkClient
+        .setData(shardsPath + "/" + zkNodeName, bytes);
+  }
+  
+  private void removeShardFromZk(String zkHost, SolrZkClient zkClient, String shardsPath) throws Exception {
+
+    AbstractZkTestCase.tryCleanPath(zkHost, shardsPath);
+  }
+  
   private void printLayout(String zkHost) throws Exception {
     SolrZkClient zkClient = new SolrZkClient(
         zkHost, AbstractZkTestCase.TIMEOUT);

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/cloud/ZkControllerTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/cloud/ZkControllerTest.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/cloud/ZkControllerTest.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/cloud/ZkControllerTest.java Sat Oct  1 03:04:53 2011
@@ -186,7 +186,7 @@ public class ZkControllerTest extends So
       AbstractZkTestCase.makeSolrZkNode(server.getZkHost());
 
       zkController = new ZkController(server.getZkAddress(),
-          TIMEOUT, 1000, "localhost", "8983", "/solr");
+          TIMEOUT, 10000, "localhost", "8983", "/solr");
 
       zkController.uploadToZK(getFile("solr/conf"),
           ZkController.CONFIGS_ZKNODE + "/config1");

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/cloud/ZkTestServer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/cloud/ZkTestServer.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/cloud/ZkTestServer.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/cloud/ZkTestServer.java Sat Oct  1 03:04:53 2011
@@ -35,6 +35,7 @@ import org.apache.solr.SolrTestCaseJ4;
 import org.apache.zookeeper.jmx.ManagedUtil;
 import org.apache.zookeeper.server.NIOServerCnxn;
 import org.apache.zookeeper.server.ServerConfig;
+import org.apache.zookeeper.server.ZKDatabase;
 import org.apache.zookeeper.server.ZooKeeperServer;
 import org.apache.zookeeper.server.SessionTracker.Session;
 import org.apache.zookeeper.server.persistence.FileTxnSnapLog;
@@ -108,9 +109,16 @@ public class ZkTestServer {
      */
     protected void shutdown() throws IOException {
       zooKeeperServer.shutdown();
-      zooKeeperServer.getZKDatabase().close();
-      waitForServerDown(getZkHost() + ":" + getPort(), 5000);
-      cnxnFactory.shutdown();
+      ZKDatabase zkDb = zooKeeperServer.getZKDatabase();
+      if (zkDb != null) {
+        zkDb.close();
+      }
+      if (cnxnFactory != null && cnxnFactory.getLocalPort() != 0) {
+        waitForServerDown(getZkHost() + ":" + getPort(), 5000);
+      }
+      if (cnxnFactory != null) {
+        cnxnFactory.shutdown();
+      }
     }
 
     public int getLocalPort() {

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/core/TestArbitraryIndexDir.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/core/TestArbitraryIndexDir.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/core/TestArbitraryIndexDir.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/core/TestArbitraryIndexDir.java Sat Oct  1 03:04:53 2011
@@ -106,8 +106,8 @@ public class TestArbitraryIndexDir exten
         new IndexWriterConfig(Version.LUCENE_40, new StandardAnalyzer(Version.LUCENE_40))
     );
     Document doc = new Document();
-    doc.add(new Field("id", TextField.TYPE_STORED, "2"));
-    doc.add(new Field("name", TextField.TYPE_STORED, "name2"));
+    doc.add(new Field("id", "2", TextField.TYPE_STORED));
+    doc.add(new Field("name", "name2", TextField.TYPE_STORED));
     iw.addDocument(doc);
     iw.commit();
     iw.close();

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/handler/DocumentAnalysisRequestHandlerTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/handler/DocumentAnalysisRequestHandlerTest.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/handler/DocumentAnalysisRequestHandlerTest.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/handler/DocumentAnalysisRequestHandlerTest.java Sat Oct  1 03:04:53 2011
@@ -17,6 +17,7 @@
 
 package org.apache.solr.handler;
 
+import org.apache.lucene.analysis.MockTokenizer;
 import org.apache.solr.client.solrj.request.DocumentAnalysisRequest;
 import org.apache.solr.common.SolrInputDocument;
 import org.apache.solr.common.SolrInputField;
@@ -252,8 +253,8 @@ public class DocumentAnalysisRequestHand
     NamedList<NamedList<Object>> whitetokResult = documentResult.get("whitetok");
     assertNotNull("an analysis for the 'whitetok' field should be returned", whitetokResult);
     queryResult = whitetokResult.get("query");
-    tokenList = (List<NamedList>) queryResult.get("org.apache.lucene.analysis.core.WhitespaceTokenizer");
-    assertNotNull("Expecting the 'WhitespaceTokenizer' to be applied on the query for the 'whitetok' field", tokenList);
+    tokenList = (List<NamedList>) queryResult.get(MockTokenizer.class.getName());
+    assertNotNull("Expecting the 'MockTokenizer' to be applied on the query for the 'whitetok' field", tokenList);
     assertEquals("Query has only one token", 1, tokenList.size());
     assertToken(tokenList.get(0), new TokenInfo("JUMPING", null, "word", 0, 7, 1, new int[]{1}, null, false));
     indexResult = whitetokResult.get("index");

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/handler/FieldAnalysisRequestHandlerTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/handler/FieldAnalysisRequestHandlerTest.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/handler/FieldAnalysisRequestHandlerTest.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/handler/FieldAnalysisRequestHandlerTest.java Sat Oct  1 03:04:53 2011
@@ -17,6 +17,7 @@
 
 package org.apache.solr.handler;
 
+import org.apache.lucene.analysis.MockTokenizer;
 import org.apache.lucene.analysis.core.KeywordTokenizer;
 import org.apache.lucene.analysis.core.WhitespaceTokenizer;
 import org.apache.solr.common.params.AnalysisParams;
@@ -261,10 +262,10 @@ public class FieldAnalysisRequestHandler
 
     indexPart = whitetok.get("index");
     assertNotNull("expecting an index token analysis for field 'whitetok'", indexPart);
-    assertEquals("expecting only WhitespaceTokenizer to be applied", 1, indexPart.size());
-    tokenList = indexPart.get(WhitespaceTokenizer.class.getName());
-    assertNotNull("expecting only WhitespaceTokenizer to be applied", tokenList);
-    assertEquals("expecting WhitespaceTokenizer to produce 10 tokens", 10, tokenList.size());
+    assertEquals("expecting only MockTokenizer to be applied", 1, indexPart.size());
+    tokenList = indexPart.get(MockTokenizer.class.getName());
+    assertNotNull("expecting only MockTokenizer to be applied", tokenList);
+    assertEquals("expecting MockTokenizer to produce 10 tokens", 10, tokenList.size());
     assertToken(tokenList.get(0), new TokenInfo("the", null, "word", 0, 3, 1, new int[]{1}, null, false));
     assertToken(tokenList.get(1), new TokenInfo("quick", null, "word", 4, 9, 2, new int[]{2}, null, false));
     assertToken(tokenList.get(2), new TokenInfo("red", null, "word", 10, 13, 3, new int[]{3}, null, false));
@@ -278,10 +279,10 @@ public class FieldAnalysisRequestHandler
 
     queryPart = whitetok.get("query");
     assertNotNull("expecting a query token analysis for field 'whitetok'", queryPart);
-    assertEquals("expecting only WhitespaceTokenizer to be applied", 1, queryPart.size());
-    tokenList = queryPart.get(WhitespaceTokenizer.class.getName());
-    assertNotNull("expecting only WhitespaceTokenizer to be applied", tokenList);
-    assertEquals("expecting WhitespaceTokenizer to produce 2 tokens", 2, tokenList.size());
+    assertEquals("expecting only MockTokenizer to be applied", 1, queryPart.size());
+    tokenList = queryPart.get(MockTokenizer.class.getName());
+    assertNotNull("expecting only MockTokenizer to be applied", tokenList);
+    assertEquals("expecting MockTokenizer to produce 2 tokens", 2, tokenList.size());
     assertToken(tokenList.get(0), new TokenInfo("fox", null, "word", 0, 3, 1, new int[]{1}, null, false));
     assertToken(tokenList.get(1), new TokenInfo("brown", null, "word", 4, 9, 2, new int[]{2}, null, false));
 
@@ -290,18 +291,18 @@ public class FieldAnalysisRequestHandler
 
     indexPart = keywordtok.get("index");
     assertNotNull("expecting an index token analysis for field 'keywordtok'", indexPart);
-    assertEquals("expecting only KeywordTokenizer to be applied", 1, indexPart.size());
-    tokenList = indexPart.get(KeywordTokenizer.class.getName());
-    assertNotNull("expecting only KeywordTokenizer to be applied", tokenList);
-    assertEquals("expecting KeywordTokenizer to produce 1 token", 1, tokenList.size());
+    assertEquals("expecting only MockTokenizer to be applied", 1, indexPart.size());
+    tokenList = indexPart.get(MockTokenizer.class.getName());
+    assertNotNull("expecting only MockTokenizer to be applied", tokenList);
+    assertEquals("expecting MockTokenizer to produce 1 token", 1, tokenList.size());
     assertToken(tokenList.get(0), new TokenInfo("the quick red fox jumped over the lazy brown dogs", null, "word", 0, 49, 1, new int[]{1}, null, false));
 
     queryPart = keywordtok.get("query");
     assertNotNull("expecting a query token analysis for field 'keywordtok'", queryPart);
-    assertEquals("expecting only KeywordTokenizer to be applied", 1, queryPart.size());
-    tokenList = queryPart.get(KeywordTokenizer.class.getName());
-    assertNotNull("expecting only KeywordTokenizer to be applied", tokenList);
-    assertEquals("expecting KeywordTokenizer to produce 1 token", 1, tokenList.size());
+    assertEquals("expecting only MockTokenizer to be applied", 1, queryPart.size());
+    tokenList = queryPart.get(MockTokenizer.class.getName());
+    assertNotNull("expecting only MockTokenizer to be applied", tokenList);
+    assertEquals("expecting MockTokenizer to produce 1 token", 1, tokenList.size());
     assertToken(tokenList.get(0), new TokenInfo("fox brown", null, "word", 0, 9, 1, new int[]{1}, null, false));
 
   }
@@ -328,8 +329,8 @@ public class FieldAnalysisRequestHandler
     assertEquals("  whátëvêr  ", indexPart.get("org.apache.lucene.analysis.charfilter.HTMLStripCharFilter"));
     assertEquals("  whatever  ", indexPart.get("org.apache.lucene.analysis.charfilter.MappingCharFilter"));
 
-    List<NamedList> tokenList = (List<NamedList>)indexPart.get("org.apache.lucene.analysis.core.WhitespaceTokenizer");
-    assertNotNull("Expecting WhitespaceTokenizer analysis breakdown", tokenList);
+    List<NamedList> tokenList = (List<NamedList>)indexPart.get(MockTokenizer.class.getName());
+    assertNotNull("Expecting MockTokenizer analysis breakdown", tokenList);
     assertEquals(tokenList.size(), 1);
     assertToken(tokenList.get(0), new TokenInfo("whatever", null, "word", 12, 20, 1, new int[]{1}, null, false));
   }
@@ -353,8 +354,8 @@ public class FieldAnalysisRequestHandler
     NamedList<List<NamedList>> indexPart = textType.get("index");
     assertNotNull("expecting an index token analysis for field type 'skutype1'", indexPart);
 
-    List<NamedList> tokenList = indexPart.get("org.apache.lucene.analysis.core.WhitespaceTokenizer");
-    assertNotNull("Expcting WhitespaceTokenizer analysis breakdown", tokenList);
+    List<NamedList> tokenList = indexPart.get(MockTokenizer.class.getName());
+    assertNotNull("Expcting MockTokenizer analysis breakdown", tokenList);
     assertEquals(4, tokenList.size());
     assertToken(tokenList.get(0), new TokenInfo("hi,", null, "word", 0, 3, 1, new int[]{1}, null, false));
     assertToken(tokenList.get(1), new TokenInfo("3456-12", null, "word", 4, 11, 2, new int[]{2}, null, false));

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/highlight/HighlighterTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/highlight/HighlighterTest.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/highlight/HighlighterTest.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/highlight/HighlighterTest.java Sat Oct  1 03:04:53 2011
@@ -155,12 +155,17 @@ public class HighlighterTest extends Sol
   public void testTermOffsetsTokenStream() throws Exception {
     String[] multivalued = { "a b c d", "e f g", "h", "i j k l m n" };
     Analyzer a1 = new WhitespaceAnalyzer(TEST_VERSION_CURRENT);
+    TokenStream tokenStream = a1.tokenStream("", new StringReader("a b c d e f g h i j k l m n"));
+    tokenStream.reset();
+
     TermOffsetsTokenStream tots = new TermOffsetsTokenStream(
-        a1.tokenStream( "", new StringReader( "a b c d e f g h i j k l m n" ) ) );
+        tokenStream);
     for( String v : multivalued ){
       TokenStream ts1 = tots.getMultiValuedTokenStream( v.length() );
       Analyzer a2 = new WhitespaceAnalyzer(TEST_VERSION_CURRENT);
-      TokenStream ts2 = a2.tokenStream( "", new StringReader( v ) );
+      TokenStream ts2 = a2.tokenStream("", new StringReader(v));
+      ts2.reset();
+
       while (ts1.incrementToken()) {
         assertTrue(ts2.incrementToken());
         assertEquals(ts1, ts2);

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/schema/IndexSchemaTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/schema/IndexSchemaTest.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/schema/IndexSchemaTest.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/schema/IndexSchemaTest.java Sat Oct  1 03:04:53 2011
@@ -17,27 +17,26 @@
 
 package org.apache.solr.schema;
 
-import java.util.HashMap;
-import java.util.Map;
-
+import org.apache.lucene.search.similarities.SimilarityProvider;
 import org.apache.solr.SolrTestCaseJ4;
-import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.MapSolrParams;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.request.LocalSolrQueryRequest;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.search.similarities.MockConfigurableSimilarityProvider;
-import org.apache.lucene.search.similarities.SimilarityProvider;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
+import java.util.HashMap;
+import java.util.Map;
+
 
 public class IndexSchemaTest extends SolrTestCaseJ4 {
   @BeforeClass
   public static void beforeClass() throws Exception {
     initCore("solrconfig.xml","schema.xml");
-  }    
+  }
 
   /**
    * This test assumes the schema includes:
@@ -45,22 +44,22 @@ public class IndexSchemaTest extends Sol
    * <dynamicField name="*_dynamic" type="string" indexed="true" stored="true"/>
    */
   @Test
-  public void testDynamicCopy() 
+  public void testDynamicCopy()
   {
     SolrCore core = h.getCore();
     assertU(adoc("id", "10", "title", "test", "aaa_dynamic", "aaa"));
     assertU(commit());
-    
+
     Map<String,String> args = new HashMap<String, String>();
     args.put( CommonParams.Q, "title:test" );
     args.put( "indent", "true" );
     SolrQueryRequest req = new LocalSolrQueryRequest( core, new MapSolrParams( args) );
-    
+
     assertQ("Make sure they got in", req
             ,"//*[@numFound='1']"
             ,"//result/doc[1]/int[@name='id'][.='10']"
             );
-    
+
     args = new HashMap<String, String>();
     args.put( CommonParams.Q, "aaa_dynamic:aaa" );
     args.put( "indent", "true" );
@@ -80,46 +79,15 @@ public class IndexSchemaTest extends Sol
             );
     clearIndex();
   }
-  
-  @Test
-  public void testRuntimeFieldCreation()
-  {
-    // any field manipulation needs to happen when you know the core will not 
-    // be accepting any requests.  Typically this is done within the inform() 
-    // method.  Since this is a single threaded test, we can change the fields
-    // willi-nilly
 
+  @Test
+  public void testSimilarityProviderFactory() {
     SolrCore core = h.getCore();
-    IndexSchema schema = core.getSchema();
-    final String fieldName = "runtimefield";
-    SchemaField sf = new SchemaField( fieldName, schema.getFieldTypes().get( "string" ) );
-    schema.getFields().put( fieldName, sf );
-    
-    // also register a new copy field (from our new field)
-    schema.registerCopyField( fieldName, "dynamic_runtime" );
-    schema.refreshAnalyzers();
-    
-    assertU(adoc("id", "10", "title", "test", fieldName, "aaa"));
-    assertU(commit());
-
-    SolrQuery query = new SolrQuery( fieldName+":aaa" );
-    query.set( "indent", "true" );
-    SolrQueryRequest req = new LocalSolrQueryRequest( core, query );
-    
-    assertQ("Make sure they got in", req
-            ,"//*[@numFound='1']"
-            ,"//result/doc[1]/int[@name='id'][.='10']"
-            );
-    
-    // Check to see if our copy field made it out safely
-    query.setQuery( "dynamic_runtime:aaa" );
-    assertQ("Make sure they got in", req
-            ,"//*[@numFound='1']"
-            ,"//result/doc[1]/int[@name='id'][.='10']"
-            );
-    clearIndex();
+    SimilarityProvider similarityProvider = core.getSchema().getSimilarityProvider();
+    assertTrue("wrong class", similarityProvider instanceof MockConfigurableSimilarityProvider);
+    assertEquals("is there an echo?", ((MockConfigurableSimilarityProvider)similarityProvider).getPassthrough());
   }
-  
+
   @Test
   public void testIsDynamicField() throws Exception {
     SolrCore core = h.getCore();
@@ -134,6 +102,5 @@ public class IndexSchemaTest extends Sol
     SolrCore core = h.getCore();
     IndexSchema schema = core.getSchema();
     assertFalse(schema.getField("id").multiValued());
-
   }
 }

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/search/TestSort.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/search/TestSort.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/search/TestSort.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/search/TestSort.java Sat Oct  1 03:04:53 2011
@@ -150,8 +150,8 @@ public class TestSort extends SolrTestCa
 
   public void testSort() throws Exception {
     Directory dir = new RAMDirectory();
-    Field f = new Field("f", StringField.TYPE_UNSTORED,"0");
-    Field f2 = new Field("f2", StringField.TYPE_UNSTORED,"0");
+    Field f = new Field("f", "0", StringField.TYPE_UNSTORED);
+    Field f2 = new Field("f2", "0", StringField.TYPE_UNSTORED);
 
     for (int iterCnt = 0; iterCnt<iter; iterCnt++) {
       IndexWriter iw = new IndexWriter(

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/search/function/TestFunctionQuery.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/search/function/TestFunctionQuery.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/search/function/TestFunctionQuery.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/search/function/TestFunctionQuery.java Sat Oct  1 03:04:53 2011
@@ -571,7 +571,9 @@ public class TestFunctionQuery extends S
     dofunc("sqrt(9)", Math.sqrt(9));
     dofunc("cbrt(8)", Math.cbrt(8));
     dofunc("max(0,1)", Math.max(0,1));
+    dofunc("max(10,3,8,7,5,4)", Math.max(Math.max(Math.max(Math.max(Math.max(10,3),8),7),5),4));
     dofunc("min(0,1)", Math.min(0,1));
+    dofunc("min(10,3,8,7,5,4)", Math.min(Math.min(Math.min(Math.min(Math.min(10,3),8),7),5),4));
     dofunc("log(100)", Math.log10(100));
     dofunc("ln(3)", Math.log(3));
     dofunc("exp(1)", Math.exp(1));

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/spelling/IndexBasedSpellCheckerTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/spelling/IndexBasedSpellCheckerTest.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/spelling/IndexBasedSpellCheckerTest.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/spelling/IndexBasedSpellCheckerTest.java Sat Oct  1 03:04:53 2011
@@ -290,7 +290,7 @@ public class IndexBasedSpellCheckerTest 
     );
     for (int i = 0; i < ALT_DOCS.length; i++) {
       Document doc = new Document();
-      doc.add(new Field("title", TextField.TYPE_STORED, ALT_DOCS[i]));
+      doc.add(new Field("title", ALT_DOCS[i], TextField.TYPE_STORED));
       iw.addDocument(doc);
     }
     iw.optimize();

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/spelling/SimpleQueryConverter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/spelling/SimpleQueryConverter.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/spelling/SimpleQueryConverter.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/spelling/SimpleQueryConverter.java Sat Oct  1 03:04:53 2011
@@ -37,23 +37,25 @@ import java.io.IOException;
  *
  * @since solr 1.3
  **/
-class SimpleQueryConverter extends SpellingQueryConverter{
+class SimpleQueryConverter extends SpellingQueryConverter {
+
   @Override
   public Collection<Token> convert(String origQuery) {
-    Collection<Token> result = new HashSet<Token>();
-    WhitespaceAnalyzer analyzer = new WhitespaceAnalyzer(Version.LUCENE_40);
-    TokenStream ts = analyzer.tokenStream("", new StringReader(origQuery));
-    // TODO: support custom attributes
-    CharTermAttribute termAtt = ts.addAttribute(CharTermAttribute.class);
-    OffsetAttribute offsetAtt = ts.addAttribute(OffsetAttribute.class);
-    TypeAttribute typeAtt = ts.addAttribute(TypeAttribute.class);
-    FlagsAttribute flagsAtt = ts.addAttribute(FlagsAttribute.class);
-    PayloadAttribute payloadAtt = ts.addAttribute(PayloadAttribute.class);
-    PositionIncrementAttribute posIncAtt = ts.addAttribute(PositionIncrementAttribute.class);
-    
     try {
+      Collection<Token> result = new HashSet<Token>();
+      WhitespaceAnalyzer analyzer = new WhitespaceAnalyzer(Version.LUCENE_40);
+      TokenStream ts = analyzer.tokenStream("", new StringReader(origQuery));
+      // TODO: support custom attributes
+      CharTermAttribute termAtt = ts.addAttribute(CharTermAttribute.class);
+      OffsetAttribute offsetAtt = ts.addAttribute(OffsetAttribute.class);
+      TypeAttribute typeAtt = ts.addAttribute(TypeAttribute.class);
+      FlagsAttribute flagsAtt = ts.addAttribute(FlagsAttribute.class);
+      PayloadAttribute payloadAtt = ts.addAttribute(PayloadAttribute.class);
+      PositionIncrementAttribute posIncAtt = ts.addAttribute(PositionIncrementAttribute.class);
+
       ts.reset();
-      while (ts.incrementToken()){
+
+      while (ts.incrementToken()) {
         Token tok = new Token();
         tok.copyBuffer(termAtt.buffer(), 0, termAtt.length());
         tok.setOffset(offsetAtt.startOffset(), offsetAtt.endOffset());
@@ -63,9 +65,12 @@ class SimpleQueryConverter extends Spell
         tok.setType(typeAtt.type());
         result.add(tok);
       }
+      ts.end();
+      ts.close();
+      
+      return result;
     } catch (IOException e) {
       throw new RuntimeException(e);
     }
-    return result;
   }
 }

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/update/AutoCommitTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/update/AutoCommitTest.java?rev=1177888&r1=1177887&r2=1177888&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/update/AutoCommitTest.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/update/AutoCommitTest.java Sat Oct  1 03:04:53 2011
@@ -172,7 +172,7 @@ public class AutoCommitTest extends Abst
         adoc("id", "14", "subject", "info" ), null ) );
     handler.handleRequest( req, rsp );
 
-    assertTrue(trigger.waitForNewSearcher(10000));
+    assertTrue(trigger.waitForNewSearcher(15000));
 
     req.setContentStreams( toContentStreams(
         adoc("id", "15", "subject", "info" ), null ) );
@@ -216,7 +216,7 @@ public class AutoCommitTest extends Abst
     assertQ("shouldn't find any", req("id:529") ,"//result[@numFound=0]" );
 
     // Wait longer than the autocommit time
-    assertTrue(trigger.waitForNewSearcher(30000));
+    assertTrue(trigger.waitForNewSearcher(45000));
     trigger.reset();
     req.setContentStreams( toContentStreams(
       adoc("id", "530", "field_t", "what's inside?", "subject", "info"), null ) );
@@ -330,7 +330,7 @@ public class AutoCommitTest extends Abst
     }
     req.close();
     
-    assertTrue(softTrigger.waitForNewSearcher(10000));
+    assertTrue(softTrigger.waitForNewSearcher(30000));
     softTrigger.reset();
     
     assertTrue(trigger.waitForNewSearcher(10000));
@@ -489,11 +489,11 @@ public class AutoCommitTest extends Abst
     assertTrue("expected:>=2 but got " + totalCommits, totalCommits >= 2);
     assertQ("deleted and time has passed", req("id:529") ,"//result[@numFound=0]" );
     
-    // now make the call 5 times really fast and make sure it 
-    // only commits once
+    // now make the call 2 times really fast and make sure id:500
+    // is not visible right away
     req.setContentStreams( toContentStreams(
         adoc("id", "500" ), null ) );
-    for( int i=0;i<5; i++ ) {
+    for( int i=0;i<2; i++ ) {
       handler.handleRequest( req, rsp );
     }
     assertQ("should not be there yet", req("id:500") ,"//result[@numFound=0]" );