You are viewing a plain text version of this content. The canonical link for it is here.
Posted to java-commits@lucene.apache.org by bu...@apache.org on 2009/08/02 00:52:35 UTC

svn commit: r799953 [4/4] - in /lucene/java/trunk: ./ contrib/analyzers/common/src/java/org/apache/lucene/analysis/ar/ contrib/analyzers/common/src/java/org/apache/lucene/analysis/br/ contrib/analyzers/common/src/java/org/apache/lucene/analysis/cjk/ co...

Modified: lucene/java/trunk/contrib/wikipedia/src/test/org/apache/lucene/wikipedia/analysis/WikipediaTokenizerTest.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/contrib/wikipedia/src/test/org/apache/lucene/wikipedia/analysis/WikipediaTokenizerTest.java?rev=799953&r1=799952&r2=799953&view=diff
==============================================================================
--- lucene/java/trunk/contrib/wikipedia/src/test/org/apache/lucene/wikipedia/analysis/WikipediaTokenizerTest.java (original)
+++ lucene/java/trunk/contrib/wikipedia/src/test/org/apache/lucene/wikipedia/analysis/WikipediaTokenizerTest.java Sat Aug  1 22:52:32 2009
@@ -19,7 +19,6 @@
 package org.apache.lucene.wikipedia.analysis;
 
 import junit.framework.TestCase;
-import org.apache.lucene.analysis.Token;
 
 import java.io.StringReader;
 import java.io.IOException;
@@ -28,6 +27,12 @@
 import java.util.Set;
 import java.util.HashSet;
 
+import org.apache.lucene.analysis.tokenattributes.FlagsAttribute;
+import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
+import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
+import org.apache.lucene.analysis.tokenattributes.TermAttribute;
+import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
+
 
 /**
  *
@@ -131,23 +136,24 @@
     int numBoldItalics = 0;
     int numCategory = 0;
     int numCitation = 0;
-    final Token reusableToken = new Token();
-    for (Token nextToken = tf.next(reusableToken); nextToken != null; nextToken = tf.next(reusableToken)) {
-      String tokText = nextToken.term();
+    TermAttribute termAtt = (TermAttribute) tf.addAttribute(TermAttribute.class);
+    TypeAttribute typeAtt = (TypeAttribute) tf.addAttribute(TypeAttribute.class);
+    
+    while (tf.incrementToken()) {
+      String tokText = termAtt.term();
       //System.out.println("Text: " + tokText + " Type: " + token.type());
-      assertTrue("nextToken is null and it shouldn't be", nextToken != null);
       String expectedType = (String) tcm.get(tokText);
-      assertTrue("expectedType is null and it shouldn't be for: " + nextToken, expectedType != null);
-      assertTrue(nextToken.type() + " is not equal to " + expectedType + " for " + nextToken, nextToken.type().equals(expectedType) == true);
+      assertTrue("expectedType is null and it shouldn't be for: " + tf.toString(), expectedType != null);
+      assertTrue(typeAtt.type() + " is not equal to " + expectedType + " for " + tf.toString(), typeAtt.type().equals(expectedType) == true);
       count++;
-      if (nextToken.type().equals(WikipediaTokenizer.ITALICS)  == true){
+      if (typeAtt.type().equals(WikipediaTokenizer.ITALICS)  == true){
         numItalics++;
-      } else if (nextToken.type().equals(WikipediaTokenizer.BOLD_ITALICS)  == true){
+      } else if (typeAtt.type().equals(WikipediaTokenizer.BOLD_ITALICS)  == true){
         numBoldItalics++;
-      } else if (nextToken.type().equals(WikipediaTokenizer.CATEGORY)  == true){
+      } else if (typeAtt.type().equals(WikipediaTokenizer.CATEGORY)  == true){
         numCategory++;
       }
-      else if (nextToken.type().equals(WikipediaTokenizer.CITATION)  == true){
+      else if (typeAtt.type().equals(WikipediaTokenizer.CITATION)  == true){
         numCitation++;
       }
     }
@@ -166,106 +172,93 @@
   }
 
   private void checkLinkPhrases(WikipediaTokenizer tf) throws IOException {
-    final Token reusableToken = new Token();
-    Token nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "click", nextToken.term().equals("click") == true);
-    assertTrue(nextToken.getPositionIncrement() + " does not equal: " + 1, nextToken.getPositionIncrement() == 1);
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "link", nextToken.term().equals("link") == true);
-    assertTrue(nextToken.getPositionIncrement() + " does not equal: " + 1, nextToken.getPositionIncrement() == 1);
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "here",
-            nextToken.term().equals("here") == true);
+    TermAttribute termAtt = (TermAttribute) tf.addAttribute(TermAttribute.class);
+    PositionIncrementAttribute posIncrAtt = (PositionIncrementAttribute) tf.addAttribute(PositionIncrementAttribute.class);
+    
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "click", termAtt.term().equals("click") == true);
+    assertTrue(posIncrAtt.getPositionIncrement() + " does not equal: " + 1, posIncrAtt.getPositionIncrement() == 1);
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "link", termAtt.term().equals("link") == true);
+    assertTrue(posIncrAtt.getPositionIncrement() + " does not equal: " + 1, posIncrAtt.getPositionIncrement() == 1);
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "here",
+        termAtt.term().equals("here") == true);
     //The link, and here should be at the same position for phrases to work
-    assertTrue(nextToken.getPositionIncrement() + " does not equal: " + 1, nextToken.getPositionIncrement() == 1);
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "again",
-            nextToken.term().equals("again") == true);
-    assertTrue(nextToken.getPositionIncrement() + " does not equal: " + 1, nextToken.getPositionIncrement() == 1);
-
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "click",
-            nextToken.term().equals("click") == true);
-    assertTrue(nextToken.getPositionIncrement() + " does not equal: " + 1, nextToken.getPositionIncrement() == 1);
-
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "http://lucene.apache.org",
-            nextToken.term().equals("http://lucene.apache.org") == true);
-    assertTrue(nextToken.getPositionIncrement() + " does not equal: " + 1, nextToken.getPositionIncrement() == 1);
-
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "here",
-            nextToken.term().equals("here") == true);
-    assertTrue(nextToken.getPositionIncrement() + " does not equal: " + 0, nextToken.getPositionIncrement() == 0);
-
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "again",
-            nextToken.term().equals("again") == true);
-    assertTrue(nextToken.getPositionIncrement() + " does not equal: " + 1, nextToken.getPositionIncrement() == 1);
-
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "a",
-            nextToken.term().equals("a") == true);
-    assertTrue(nextToken.getPositionIncrement() + " does not equal: " + 1, nextToken.getPositionIncrement() == 1);
-
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "b",
-            nextToken.term().equals("b") == true);
-    assertTrue(nextToken.getPositionIncrement() + " does not equal: " + 1, nextToken.getPositionIncrement() == 1);
-
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "c",
-            nextToken.term().equals("c") == true);
-    assertTrue(nextToken.getPositionIncrement() + " does not equal: " + 1, nextToken.getPositionIncrement() == 1);
-
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "d",
-            nextToken.term().equals("d") == true);
-    assertTrue(nextToken.getPositionIncrement() + " does not equal: " + 1, nextToken.getPositionIncrement() == 1);
+    assertTrue(posIncrAtt.getPositionIncrement() + " does not equal: " + 1, posIncrAtt.getPositionIncrement() == 1);
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "again",
+        termAtt.term().equals("again") == true);
+    assertTrue(posIncrAtt.getPositionIncrement() + " does not equal: " + 1, posIncrAtt.getPositionIncrement() == 1);
+
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "click",
+        termAtt.term().equals("click") == true);
+    assertTrue(posIncrAtt.getPositionIncrement() + " does not equal: " + 1, posIncrAtt.getPositionIncrement() == 1);
+
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "http://lucene.apache.org",
+        termAtt.term().equals("http://lucene.apache.org") == true);
+    assertTrue(posIncrAtt.getPositionIncrement() + " does not equal: " + 1, posIncrAtt.getPositionIncrement() == 1);
+
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "here",
+        termAtt.term().equals("here") == true);
+    assertTrue(posIncrAtt.getPositionIncrement() + " does not equal: " + 0, posIncrAtt.getPositionIncrement() == 0);
+
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "again",
+        termAtt.term().equals("again") == true);
+    assertTrue(posIncrAtt.getPositionIncrement() + " does not equal: " + 1, posIncrAtt.getPositionIncrement() == 1);
+
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "a",
+        termAtt.term().equals("a") == true);
+    assertTrue(posIncrAtt.getPositionIncrement() + " does not equal: " + 1, posIncrAtt.getPositionIncrement() == 1);
+
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "b",
+        termAtt.term().equals("b") == true);
+    assertTrue(posIncrAtt.getPositionIncrement() + " does not equal: " + 1, posIncrAtt.getPositionIncrement() == 1);
+
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "c",
+        termAtt.term().equals("c") == true);
+    assertTrue(posIncrAtt.getPositionIncrement() + " does not equal: " + 1, posIncrAtt.getPositionIncrement() == 1);
+
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "d",
+        termAtt.term().equals("d") == true);
+    assertTrue(posIncrAtt.getPositionIncrement() + " does not equal: " + 1, posIncrAtt.getPositionIncrement() == 1);
 
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is not null and it should be", nextToken == null);
+    assertFalse(tf.incrementToken());  
   }
 
   public void testLinks() throws Exception {
     String test = "[http://lucene.apache.org/java/docs/index.html#news here] [http://lucene.apache.org/java/docs/index.html?b=c here] [https://lucene.apache.org/java/docs/index.html?b=c here]";
     WikipediaTokenizer tf = new WikipediaTokenizer(new StringReader(test));
-    final Token reusableToken = new Token();
-    Token nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "http://lucene.apache.org/java/docs/index.html#news",
-            nextToken.term().equals("http://lucene.apache.org/java/docs/index.html#news") == true);
-    assertTrue(nextToken.type() + " is not equal to " + WikipediaTokenizer.EXTERNAL_LINK_URL, nextToken.type().equals(WikipediaTokenizer.EXTERNAL_LINK_URL) == true);
-    tf.next(reusableToken);//skip here
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "http://lucene.apache.org/java/docs/index.html?b=c",
-            nextToken.term().equals("http://lucene.apache.org/java/docs/index.html?b=c") == true);
-    assertTrue(nextToken.type() + " is not equal to " + WikipediaTokenizer.EXTERNAL_LINK_URL, nextToken.type().equals(WikipediaTokenizer.EXTERNAL_LINK_URL) == true);
-    tf.next(reusableToken);//skip here
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "https://lucene.apache.org/java/docs/index.html?b=c",
-            nextToken.term().equals("https://lucene.apache.org/java/docs/index.html?b=c") == true);
-    assertTrue(nextToken.type() + " is not equal to " + WikipediaTokenizer.EXTERNAL_LINK_URL, nextToken.type().equals(WikipediaTokenizer.EXTERNAL_LINK_URL) == true);
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is not null and it should be", nextToken == null);
-
+    TermAttribute termAtt = (TermAttribute) tf.addAttribute(TermAttribute.class);
+    TypeAttribute typeAtt = (TypeAttribute) tf.addAttribute(TypeAttribute.class);
+    
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "http://lucene.apache.org/java/docs/index.html#news",
+        termAtt.term().equals("http://lucene.apache.org/java/docs/index.html#news") == true);
+    assertTrue(typeAtt.type() + " is not equal to " + WikipediaTokenizer.EXTERNAL_LINK_URL, typeAtt.type().equals(WikipediaTokenizer.EXTERNAL_LINK_URL) == true);
+    tf.incrementToken();//skip here
+    
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "http://lucene.apache.org/java/docs/index.html?b=c",
+        termAtt.term().equals("http://lucene.apache.org/java/docs/index.html?b=c") == true);
+    assertTrue(typeAtt.type() + " is not equal to " + WikipediaTokenizer.EXTERNAL_LINK_URL, typeAtt.type().equals(WikipediaTokenizer.EXTERNAL_LINK_URL) == true);
+    tf.incrementToken();//skip here
+    
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "https://lucene.apache.org/java/docs/index.html?b=c",
+        termAtt.term().equals("https://lucene.apache.org/java/docs/index.html?b=c") == true);
+    assertTrue(typeAtt.type() + " is not equal to " + WikipediaTokenizer.EXTERNAL_LINK_URL, typeAtt.type().equals(WikipediaTokenizer.EXTERNAL_LINK_URL) == true);
+    
+    assertTrue(tf.incrementToken());
+    assertFalse(tf.incrementToken());
   }
 
   public void testLucene1133() throws Exception {
@@ -277,72 +270,73 @@
     checkLinkPhrases(tf);
     String test = "[[Category:a b c d]] [[Category:e f g]] [[link here]] [[link there]] ''italics here'' something ''more italics'' [[Category:h   i   j]]";
     tf = new WikipediaTokenizer(new StringReader(test), WikipediaTokenizer.UNTOKENIZED_ONLY, untoks);
-    final Token reusableToken = new Token();
-    Token nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "a b c d",
-            nextToken.term().equals("a b c d") == true);
-    assertTrue(nextToken.getPositionIncrement() + " does not equal: " + 1, nextToken.getPositionIncrement() == 1);
-    assertTrue(nextToken.startOffset() + " does not equal: " + 11, nextToken.startOffset() == 11);
-    assertTrue(nextToken.endOffset() + " does not equal: " + 18, nextToken.endOffset() == 18);
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "e f g",
-            nextToken.term().equals("e f g") == true);
-    assertTrue(nextToken.startOffset() + " does not equal: " + 32, nextToken.startOffset() == 32);
-    assertTrue(nextToken.endOffset() + " does not equal: " + 37, nextToken.endOffset() == 37);
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "link",
-            nextToken.term().equals("link") == true);
-    assertTrue(nextToken.startOffset() + " does not equal: " + 42, nextToken.startOffset() == 42);
-    assertTrue(nextToken.endOffset() + " does not equal: " + 46, nextToken.endOffset() == 46);
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "here",
-            nextToken.term().equals("here") == true);
-    assertTrue(nextToken.startOffset() + " does not equal: " + 47, nextToken.startOffset() == 47);
-    assertTrue(nextToken.endOffset() + " does not equal: " + 51, nextToken.endOffset() == 51);
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "link",
-            nextToken.term().equals("link") == true);
-    assertTrue(nextToken.startOffset() + " does not equal: " + 56, nextToken.startOffset() == 56);
-    assertTrue(nextToken.endOffset() + " does not equal: " + 60, nextToken.endOffset() == 60);
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "there",
-            nextToken.term().equals("there") == true);
-    assertTrue(nextToken.startOffset() + " does not equal: " + 61, nextToken.startOffset() == 61);
-    assertTrue(nextToken.endOffset() + " does not equal: " + 66, nextToken.endOffset() == 66);
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "italics here",
-            nextToken.term().equals("italics here") == true);
-    assertTrue(nextToken.startOffset() + " does not equal: " + 71, nextToken.startOffset() == 71);
-    assertTrue(nextToken.endOffset() + " does not equal: " + 83, nextToken.endOffset() == 83);
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "something",
-            nextToken.term().equals("something") == true);
-    assertTrue(nextToken.startOffset() + " does not equal: " + 86, nextToken.startOffset() == 86);
-    assertTrue(nextToken.endOffset() + " does not equal: " + 95, nextToken.endOffset() == 95);
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "more italics",
-            nextToken.term().equals("more italics") == true);
-    assertTrue(nextToken.startOffset() + " does not equal: " + 98, nextToken.startOffset() == 98);
-    assertTrue(nextToken.endOffset() + " does not equal: " + 110, nextToken.endOffset() == 110);
-
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "h   i   j",
-            nextToken.term().equals("h   i   j") == true);
-    assertTrue(nextToken.startOffset() + " does not equal: " + 124, nextToken.startOffset() == 124);
-    assertTrue(nextToken.endOffset() + " does not equal: " + 133, nextToken.endOffset() == 133);
+    TermAttribute termAtt = (TermAttribute) tf.addAttribute(TermAttribute.class);
+    PositionIncrementAttribute posIncrAtt = (PositionIncrementAttribute) tf.addAttribute(PositionIncrementAttribute.class);
+    OffsetAttribute offsetAtt = (OffsetAttribute) tf.addAttribute(OffsetAttribute.class);
+    
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "a b c d",
+        termAtt.term().equals("a b c d") == true);
+    assertTrue(posIncrAtt.getPositionIncrement() + " does not equal: " + 1, posIncrAtt.getPositionIncrement() == 1);
+    assertTrue(offsetAtt.startOffset() + " does not equal: " + 11, offsetAtt.startOffset() == 11);
+    assertTrue(offsetAtt.endOffset() + " does not equal: " + 18, offsetAtt.endOffset() == 18);
+    
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "e f g",
+        termAtt.term().equals("e f g") == true);
+    assertTrue(offsetAtt.startOffset() + " does not equal: " + 32, offsetAtt.startOffset() == 32);
+    assertTrue(offsetAtt.endOffset() + " does not equal: " + 37, offsetAtt.endOffset() == 37);
+    
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "link",
+        termAtt.term().equals("link") == true);
+    assertTrue(offsetAtt.startOffset() + " does not equal: " + 42, offsetAtt.startOffset() == 42);
+    assertTrue(offsetAtt.endOffset() + " does not equal: " + 46, offsetAtt.endOffset() == 46);
+    
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "here",
+        termAtt.term().equals("here") == true);
+    assertTrue(offsetAtt.startOffset() + " does not equal: " + 47, offsetAtt.startOffset() == 47);
+    assertTrue(offsetAtt.endOffset() + " does not equal: " + 51, offsetAtt.endOffset() == 51);
+
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "link",
+        termAtt.term().equals("link") == true);
+    assertTrue(offsetAtt.startOffset() + " does not equal: " + 56, offsetAtt.startOffset() == 56);
+    assertTrue(offsetAtt.endOffset() + " does not equal: " + 60, offsetAtt.endOffset() == 60);
+    
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "there",
+        termAtt.term().equals("there") == true);
 
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is not null and it should be", nextToken == null);
+    assertTrue(offsetAtt.startOffset() + " does not equal: " + 61, offsetAtt.startOffset() == 61);
+    assertTrue(offsetAtt.endOffset() + " does not equal: " + 66, offsetAtt.endOffset() == 66);
+    
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "italics here",
+        termAtt.term().equals("italics here") == true);
+    assertTrue(offsetAtt.startOffset() + " does not equal: " + 71, offsetAtt.startOffset() == 71);
+    assertTrue(offsetAtt.endOffset() + " does not equal: " + 83, offsetAtt.endOffset() == 83);
+    
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "something",
+        termAtt.term().equals("something") == true);
+    assertTrue(offsetAtt.startOffset() + " does not equal: " + 86, offsetAtt.startOffset() == 86);
+    assertTrue(offsetAtt.endOffset() + " does not equal: " + 95, offsetAtt.endOffset() == 95);
+    
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "more italics",
+        termAtt.term().equals("more italics") == true);
+    assertTrue(offsetAtt.startOffset() + " does not equal: " + 98, offsetAtt.startOffset() == 98);
+    assertTrue(offsetAtt.endOffset() + " does not equal: " + 110, offsetAtt.endOffset() == 110);
+
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "h   i   j",
+        termAtt.term().equals("h   i   j") == true);
+    assertTrue(offsetAtt.startOffset() + " does not equal: " + 124, offsetAtt.startOffset() == 124);
+    assertTrue(offsetAtt.endOffset() + " does not equal: " + 133, offsetAtt.endOffset() == 133);
+
+    assertFalse(tf.incrementToken());
   }
 
   public void testBoth() throws Exception {
@@ -352,225 +346,211 @@
     String test = "[[Category:a b c d]] [[Category:e f g]] [[link here]] [[link there]] ''italics here'' something ''more italics'' [[Category:h   i   j]]";
     //should output all the indivual tokens plus the untokenized tokens as well.  Untokenized tokens
     WikipediaTokenizer tf = new WikipediaTokenizer(new StringReader(test), WikipediaTokenizer.BOTH, untoks);
-    final Token reusableToken = new Token();
-    Token nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "a b c d",
-            nextToken.term().equals("a b c d") == true);
-    assertTrue(nextToken.getPositionIncrement() + " does not equal: " + 1, nextToken.getPositionIncrement() == 1);
-    assertTrue(nextToken.type() + " is not equal to " + WikipediaTokenizer.CATEGORY, nextToken.type().equals(WikipediaTokenizer.CATEGORY) == true);
-    assertTrue(nextToken.getFlags() + " does not equal: " + WikipediaTokenizer.UNTOKENIZED_TOKEN_FLAG, nextToken.getFlags() == WikipediaTokenizer.UNTOKENIZED_TOKEN_FLAG);
-    assertTrue(nextToken.startOffset() + " does not equal: " + 11, nextToken.startOffset() == 11);
-    assertTrue(nextToken.endOffset() + " does not equal: " + 18, nextToken.endOffset() == 18);
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "a",
-            nextToken.term().equals("a") == true);
-    assertTrue(nextToken.getPositionIncrement() + " does not equal: " + 0, nextToken.getPositionIncrement() == 0);
-    assertTrue(nextToken.type() + " is not equal to " + WikipediaTokenizer.CATEGORY, nextToken.type().equals(WikipediaTokenizer.CATEGORY) == true);
-    assertTrue(nextToken.getFlags() + " equals: " + WikipediaTokenizer.UNTOKENIZED_TOKEN_FLAG + " and it shouldn't", nextToken.getFlags() != WikipediaTokenizer.UNTOKENIZED_TOKEN_FLAG);
-    assertTrue(nextToken.startOffset() + " does not equal: " + 11, nextToken.startOffset() == 11);
-    assertTrue(nextToken.endOffset() + " does not equal: " + 12, nextToken.endOffset() == 12);
-
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "b",
-            nextToken.term().equals("b") == true);
-    assertTrue(nextToken.getPositionIncrement() + " does not equal: " + 1, nextToken.getPositionIncrement() == 1);
-    assertTrue(nextToken.type() + " is not equal to " + WikipediaTokenizer.CATEGORY, nextToken.type().equals(WikipediaTokenizer.CATEGORY) == true);
-    assertTrue(nextToken.startOffset() + " does not equal: " + 13, nextToken.startOffset() == 13);
-    assertTrue(nextToken.endOffset() + " does not equal: " + 14, nextToken.endOffset() == 14);
-
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "c",
-            nextToken.term().equals("c") == true);
-    assertTrue(nextToken.getPositionIncrement() + " does not equal: " + 1, nextToken.getPositionIncrement() == 1);
-    assertTrue(nextToken.type() + " is not equal to " + WikipediaTokenizer.CATEGORY, nextToken.type().equals(WikipediaTokenizer.CATEGORY) == true);
-    assertTrue(nextToken.startOffset() + " does not equal: " + 15, nextToken.startOffset() == 15);
-    assertTrue(nextToken.endOffset() + " does not equal: " + 16, nextToken.endOffset() == 16);
-
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "d",
-            nextToken.term().equals("d") == true);
-    assertTrue(nextToken.getPositionIncrement() + " does not equal: " + 1, nextToken.getPositionIncrement() == 1);
-    assertTrue(nextToken.type() + " is not equal to " + WikipediaTokenizer.CATEGORY, nextToken.type().equals(WikipediaTokenizer.CATEGORY) == true);
-    assertTrue(nextToken.startOffset() + " does not equal: " + 17, nextToken.startOffset() == 17);
-    assertTrue(nextToken.endOffset() + " does not equal: " + 18, nextToken.endOffset() == 18);
-
-
-
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "e f g",
-            nextToken.term().equals("e f g") == true);
-    assertTrue(nextToken.type() + " is not equal to " + WikipediaTokenizer.CATEGORY, nextToken.type().equals(WikipediaTokenizer.CATEGORY) == true);
-    assertTrue(nextToken.getFlags() + " does not equal: " + WikipediaTokenizer.UNTOKENIZED_TOKEN_FLAG, nextToken.getFlags() == WikipediaTokenizer.UNTOKENIZED_TOKEN_FLAG);
-    assertTrue(nextToken.startOffset() + " does not equal: " + 32, nextToken.startOffset() == 32);
-    assertTrue(nextToken.endOffset() + " does not equal: " + 37, nextToken.endOffset() == 37);
-
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "e",
-            nextToken.term().equals("e") == true);
-    assertTrue(nextToken.type() + " is not equal to " + WikipediaTokenizer.CATEGORY, nextToken.type().equals(WikipediaTokenizer.CATEGORY) == true);
-    assertTrue(nextToken.getPositionIncrement() + " does not equal: " + 0, nextToken.getPositionIncrement() == 0);
-    assertTrue(nextToken.startOffset() + " does not equal: " + 32, nextToken.startOffset() == 32);
-    assertTrue(nextToken.endOffset() + " does not equal: " + 33, nextToken.endOffset() == 33);
-
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "f",
-            nextToken.term().equals("f") == true);
-    assertTrue(nextToken.type() + " is not equal to " + WikipediaTokenizer.CATEGORY, nextToken.type().equals(WikipediaTokenizer.CATEGORY) == true);
-    assertTrue(nextToken.getPositionIncrement() + " does not equal: " + 1, nextToken.getPositionIncrement() == 1);
-    assertTrue(nextToken.startOffset() + " does not equal: " + 34, nextToken.startOffset() == 34);
-    assertTrue(nextToken.endOffset() + " does not equal: " + 35, nextToken.endOffset() == 35);
-
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "g",
-            nextToken.term().equals("g") == true);
-    assertTrue(nextToken.type() + " is not equal to " + WikipediaTokenizer.CATEGORY, nextToken.type().equals(WikipediaTokenizer.CATEGORY) == true);
-    assertTrue(nextToken.getPositionIncrement() + " does not equal: " + 1, nextToken.getPositionIncrement() == 1);
-    assertTrue(nextToken.startOffset() + " does not equal: " + 36, nextToken.startOffset() == 36);
-    assertTrue(nextToken.endOffset() + " does not equal: " + 37, nextToken.endOffset() == 37);
-
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "link",
-            nextToken.term().equals("link") == true);
-    assertTrue(nextToken.getPositionIncrement() + " does not equal: " + 1, nextToken.getPositionIncrement() == 1);
-    assertTrue(nextToken.type() + " is not equal to " + WikipediaTokenizer.INTERNAL_LINK, nextToken.type().equals(WikipediaTokenizer.INTERNAL_LINK) == true);
-    assertTrue(nextToken.startOffset() + " does not equal: " + 42, nextToken.startOffset() == 42);
-    assertTrue(nextToken.endOffset() + " does not equal: " + 46, nextToken.endOffset() == 46);
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "here",
-            nextToken.term().equals("here") == true);
-    assertTrue(nextToken.getPositionIncrement() + " does not equal: " + 1, nextToken.getPositionIncrement() == 1);
-    assertTrue(nextToken.type() + " is not equal to " + WikipediaTokenizer.INTERNAL_LINK, nextToken.type().equals(WikipediaTokenizer.INTERNAL_LINK) == true);
-    assertTrue(nextToken.startOffset() + " does not equal: " + 47, nextToken.startOffset() == 47);
-    assertTrue(nextToken.endOffset() + " does not equal: " + 51, nextToken.endOffset() == 51);
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "link",
-            nextToken.term().equals("link") == true);
-    assertTrue(nextToken.getPositionIncrement() + " does not equal: " + 1, nextToken.getPositionIncrement() == 1);
-    assertTrue(nextToken.startOffset() + " does not equal: " + 56, nextToken.startOffset() == 56);
-    assertTrue(nextToken.type() + " is not equal to " + WikipediaTokenizer.INTERNAL_LINK, nextToken.type().equals(WikipediaTokenizer.INTERNAL_LINK) == true);
-    assertTrue(nextToken.endOffset() + " does not equal: " + 60, nextToken.endOffset() == 60);
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "there",
-            nextToken.term().equals("there") == true);
-    assertTrue(nextToken.getPositionIncrement() + " does not equal: " + 1, nextToken.getPositionIncrement() == 1);
-    assertTrue(nextToken.type() + " is not equal to " + WikipediaTokenizer.INTERNAL_LINK, nextToken.type().equals(WikipediaTokenizer.INTERNAL_LINK) == true);
-    assertTrue(nextToken.startOffset() + " does not equal: " + 61, nextToken.startOffset() == 61);
-    assertTrue(nextToken.endOffset() + " does not equal: " + 66, nextToken.endOffset() == 66);
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "italics here",
-            nextToken.term().equals("italics here") == true);
-    assertTrue(nextToken.getPositionIncrement() + " does not equal: " + 1, nextToken.getPositionIncrement() == 1);
-    assertTrue(nextToken.type() + " is not equal to " + WikipediaTokenizer.ITALICS, nextToken.type().equals(WikipediaTokenizer.ITALICS) == true);
-    assertTrue(nextToken.getFlags() + " does not equal: " + WikipediaTokenizer.UNTOKENIZED_TOKEN_FLAG, nextToken.getFlags() == WikipediaTokenizer.UNTOKENIZED_TOKEN_FLAG);
-    assertTrue(nextToken.startOffset() + " does not equal: " + 71, nextToken.startOffset() == 71);
-    assertTrue(nextToken.endOffset() + " does not equal: " + 83, nextToken.endOffset() == 83);
-
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "italics",
-            nextToken.term().equals("italics") == true);
-    assertTrue(nextToken.getPositionIncrement() + " does not equal: " + 0, nextToken.getPositionIncrement() == 0);
-    assertTrue(nextToken.type() + " is not equal to " + WikipediaTokenizer.ITALICS, nextToken.type().equals(WikipediaTokenizer.ITALICS) == true);
-    assertTrue(nextToken.startOffset() + " does not equal: " + 71, nextToken.startOffset() == 71);
-    assertTrue(nextToken.endOffset() + " does not equal: " + 78, nextToken.endOffset() == 78);
-
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "here",
-            nextToken.term().equals("here") == true);
-    assertTrue(nextToken.getPositionIncrement() + " does not equal: " + 1, nextToken.getPositionIncrement() == 1);
-    assertTrue(nextToken.type() + " is not equal to " + WikipediaTokenizer.ITALICS, nextToken.type().equals(WikipediaTokenizer.ITALICS) == true);
-    assertTrue(nextToken.startOffset() + " does not equal: " + 79, nextToken.startOffset() == 79);
-    assertTrue(nextToken.endOffset() + " does not equal: " + 83, nextToken.endOffset() == 83);
-
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "something",
-            nextToken.term().equals("something") == true);
-    assertTrue(nextToken.getPositionIncrement() + " does not equal: " + 1, nextToken.getPositionIncrement() == 1);
-    assertTrue(nextToken.startOffset() + " does not equal: " + 86, nextToken.startOffset() == 86);
-    assertTrue(nextToken.endOffset() + " does not equal: " + 95, nextToken.endOffset() == 95);
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "more italics",
-            nextToken.term().equals("more italics") == true);
-    assertTrue(nextToken.getPositionIncrement() + " does not equal: " + 1, nextToken.getPositionIncrement() == 1);
-    assertTrue(nextToken.type() + " is not equal to " + WikipediaTokenizer.ITALICS, nextToken.type().equals(WikipediaTokenizer.ITALICS) == true);
-    assertTrue(nextToken.getFlags() + " does not equal: " + WikipediaTokenizer.UNTOKENIZED_TOKEN_FLAG, nextToken.getFlags() == WikipediaTokenizer.UNTOKENIZED_TOKEN_FLAG);
-    assertTrue(nextToken.startOffset() + " does not equal: " + 98, nextToken.startOffset() == 98);
-    assertTrue(nextToken.endOffset() + " does not equal: " + 110, nextToken.endOffset() == 110);
-
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "more",
-            nextToken.term().equals("more") == true);
-    assertTrue(nextToken.getPositionIncrement() + " does not equal: " + 0, nextToken.getPositionIncrement() == 0);
-    assertTrue(nextToken.type() + " is not equal to " + WikipediaTokenizer.ITALICS, nextToken.type().equals(WikipediaTokenizer.ITALICS) == true);
-    assertTrue(nextToken.startOffset() + " does not equal: " + 98, nextToken.startOffset() == 98);
-    assertTrue(nextToken.endOffset() + " does not equal: " + 102, nextToken.endOffset() == 102);
-
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "italics",
-            nextToken.term().equals("italics") == true);
-    assertTrue(nextToken.getPositionIncrement() + " does not equal: " + 1, nextToken.getPositionIncrement() == 1);
-        assertTrue(nextToken.type() + " is not equal to " + WikipediaTokenizer.ITALICS, nextToken.type().equals(WikipediaTokenizer.ITALICS) == true);
-
-    assertTrue(nextToken.startOffset() + " does not equal: " + 103, nextToken.startOffset() == 103);
-    assertTrue(nextToken.endOffset() + " does not equal: " + 110, nextToken.endOffset() == 110);
-
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "h   i   j",
-            nextToken.term().equals("h   i   j") == true);
-    assertTrue(nextToken.getPositionIncrement() + " does not equal: " + 1, nextToken.getPositionIncrement() == 1);
-    assertTrue(nextToken.type() + " is not equal to " + WikipediaTokenizer.CATEGORY, nextToken.type().equals(WikipediaTokenizer.CATEGORY) == true);
-    assertTrue(nextToken.getFlags() + " does not equal: " + WikipediaTokenizer.UNTOKENIZED_TOKEN_FLAG, nextToken.getFlags() == WikipediaTokenizer.UNTOKENIZED_TOKEN_FLAG);
-    assertTrue(nextToken.startOffset() + " does not equal: " + 124, nextToken.startOffset() == 124);
-    assertTrue(nextToken.endOffset() + " does not equal: " + 133, nextToken.endOffset() == 133);
-
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "h",
-            nextToken.term().equals("h") == true);
-    assertTrue(nextToken.getPositionIncrement() + " does not equal: " + 0, nextToken.getPositionIncrement() == 0);
-    assertTrue(nextToken.type() + " is not equal to " + WikipediaTokenizer.CATEGORY, nextToken.type().equals(WikipediaTokenizer.CATEGORY) == true);
-    assertTrue(nextToken.startOffset() + " does not equal: " + 124, nextToken.startOffset() == 124);
-    assertTrue(nextToken.endOffset() + " does not equal: " + 125, nextToken.endOffset() == 125);
-
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "i",
-            nextToken.term().equals("i") == true);
-    assertTrue(nextToken.getPositionIncrement() + " does not equal: " + 1, nextToken.getPositionIncrement() == 1);
-    assertTrue(nextToken.type() + " is not equal to " + WikipediaTokenizer.CATEGORY, nextToken.type().equals(WikipediaTokenizer.CATEGORY) == true);
-    assertTrue(nextToken.startOffset() + " does not equal: " + 128, nextToken.startOffset() == 128);
-    assertTrue(nextToken.endOffset() + " does not equal: " + 129, nextToken.endOffset() == 129);
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is null and it shouldn't be", nextToken != null);
-    assertTrue(nextToken.term() + " is not equal to " + "j",
-            nextToken.term().equals("j") == true);
-    assertTrue(nextToken.getPositionIncrement() + " does not equal: " + 1, nextToken.getPositionIncrement() == 1);
-    assertTrue(nextToken.type() + " is not equal to " + WikipediaTokenizer.CATEGORY, nextToken.type().equals(WikipediaTokenizer.CATEGORY) == true);
-    assertTrue(nextToken.startOffset() + " does not equal: " + 132, nextToken.startOffset() == 132);
-    assertTrue(nextToken.endOffset() + " does not equal: " + 133, nextToken.endOffset() == 133);
-
-    nextToken = tf.next(reusableToken);
-    assertTrue("nextToken is not null and it should be", nextToken == null);
+    TermAttribute termAtt = (TermAttribute) tf.addAttribute(TermAttribute.class);
+    TypeAttribute typeAtt = (TypeAttribute) tf.addAttribute(TypeAttribute.class);
+    PositionIncrementAttribute posIncrAtt = (PositionIncrementAttribute) tf.addAttribute(PositionIncrementAttribute.class);
+    OffsetAttribute offsetAtt = (OffsetAttribute) tf.addAttribute(OffsetAttribute.class);
+    FlagsAttribute flagsAtt = (FlagsAttribute) tf.addAttribute(FlagsAttribute.class);
+    
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "a b c d",
+            termAtt.term().equals("a b c d") == true);
+    assertTrue(posIncrAtt.getPositionIncrement() + " does not equal: " + 1, posIncrAtt.getPositionIncrement() == 1);
+    assertTrue(typeAtt.type() + " is not equal to " + WikipediaTokenizer.CATEGORY, typeAtt.type().equals(WikipediaTokenizer.CATEGORY) == true);
+    assertTrue(flagsAtt.getFlags() + " does not equal: " + WikipediaTokenizer.UNTOKENIZED_TOKEN_FLAG, flagsAtt.getFlags() == WikipediaTokenizer.UNTOKENIZED_TOKEN_FLAG);
+    assertTrue(offsetAtt.startOffset() + " does not equal: " + 11, offsetAtt.startOffset() == 11);
+    assertTrue(offsetAtt.endOffset() + " does not equal: " + 18, offsetAtt.endOffset() == 18);
+    
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "a",
+            termAtt.term().equals("a") == true);
+    assertTrue(posIncrAtt.getPositionIncrement() + " does not equal: " + 0, posIncrAtt.getPositionIncrement() == 0);
+    assertTrue(typeAtt.type() + " is not equal to " + WikipediaTokenizer.CATEGORY, typeAtt.type().equals(WikipediaTokenizer.CATEGORY) == true);
+    assertTrue(flagsAtt.getFlags() + " equals: " + WikipediaTokenizer.UNTOKENIZED_TOKEN_FLAG + " and it shouldn't", flagsAtt.getFlags() != WikipediaTokenizer.UNTOKENIZED_TOKEN_FLAG);
+    assertTrue(offsetAtt.startOffset() + " does not equal: " + 11, offsetAtt.startOffset() == 11);
+    assertTrue(offsetAtt.endOffset() + " does not equal: " + 12, offsetAtt.endOffset() == 12);
+
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "b",
+            termAtt.term().equals("b") == true);
+    assertTrue(posIncrAtt.getPositionIncrement() + " does not equal: " + 1, posIncrAtt.getPositionIncrement() == 1);
+    assertTrue(typeAtt.type() + " is not equal to " + WikipediaTokenizer.CATEGORY, typeAtt.type().equals(WikipediaTokenizer.CATEGORY) == true);
+    assertTrue(offsetAtt.startOffset() + " does not equal: " + 13, offsetAtt.startOffset() == 13);
+    assertTrue(offsetAtt.endOffset() + " does not equal: " + 14, offsetAtt.endOffset() == 14);
+
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "c",
+            termAtt.term().equals("c") == true);
+    assertTrue(posIncrAtt.getPositionIncrement() + " does not equal: " + 1, posIncrAtt.getPositionIncrement() == 1);
+    assertTrue(typeAtt.type() + " is not equal to " + WikipediaTokenizer.CATEGORY, typeAtt.type().equals(WikipediaTokenizer.CATEGORY) == true);
+    assertTrue(offsetAtt.startOffset() + " does not equal: " + 15, offsetAtt.startOffset() == 15);
+    assertTrue(offsetAtt.endOffset() + " does not equal: " + 16, offsetAtt.endOffset() == 16);
+
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "d",
+            termAtt.term().equals("d") == true);
+    assertTrue(posIncrAtt.getPositionIncrement() + " does not equal: " + 1, posIncrAtt.getPositionIncrement() == 1);
+    assertTrue(typeAtt.type() + " is not equal to " + WikipediaTokenizer.CATEGORY, typeAtt.type().equals(WikipediaTokenizer.CATEGORY) == true);
+    assertTrue(offsetAtt.startOffset() + " does not equal: " + 17, offsetAtt.startOffset() == 17);
+    assertTrue(offsetAtt.endOffset() + " does not equal: " + 18, offsetAtt.endOffset() == 18);
+
+
+
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "e f g",
+            termAtt.term().equals("e f g") == true);
+    assertTrue(typeAtt.type() + " is not equal to " + WikipediaTokenizer.CATEGORY, typeAtt.type().equals(WikipediaTokenizer.CATEGORY) == true);
+    assertTrue(flagsAtt.getFlags() + " does not equal: " + WikipediaTokenizer.UNTOKENIZED_TOKEN_FLAG, flagsAtt.getFlags() == WikipediaTokenizer.UNTOKENIZED_TOKEN_FLAG);
+    assertTrue(offsetAtt.startOffset() + " does not equal: " + 32, offsetAtt.startOffset() == 32);
+    assertTrue(offsetAtt.endOffset() + " does not equal: " + 37, offsetAtt.endOffset() == 37);
+
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "e",
+            termAtt.term().equals("e") == true);
+    assertTrue(typeAtt.type() + " is not equal to " + WikipediaTokenizer.CATEGORY, typeAtt.type().equals(WikipediaTokenizer.CATEGORY) == true);
+    assertTrue(posIncrAtt.getPositionIncrement() + " does not equal: " + 0, posIncrAtt.getPositionIncrement() == 0);
+    assertTrue(offsetAtt.startOffset() + " does not equal: " + 32, offsetAtt.startOffset() == 32);
+    assertTrue(offsetAtt.endOffset() + " does not equal: " + 33, offsetAtt.endOffset() == 33);
+
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "f",
+            termAtt.term().equals("f") == true);
+    assertTrue(typeAtt.type() + " is not equal to " + WikipediaTokenizer.CATEGORY, typeAtt.type().equals(WikipediaTokenizer.CATEGORY) == true);
+    assertTrue(posIncrAtt.getPositionIncrement() + " does not equal: " + 1, posIncrAtt.getPositionIncrement() == 1);
+    assertTrue(offsetAtt.startOffset() + " does not equal: " + 34, offsetAtt.startOffset() == 34);
+    assertTrue(offsetAtt.endOffset() + " does not equal: " + 35, offsetAtt.endOffset() == 35);
+
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "g",
+            termAtt.term().equals("g") == true);
+    assertTrue(typeAtt.type() + " is not equal to " + WikipediaTokenizer.CATEGORY, typeAtt.type().equals(WikipediaTokenizer.CATEGORY) == true);
+    assertTrue(posIncrAtt.getPositionIncrement() + " does not equal: " + 1, posIncrAtt.getPositionIncrement() == 1);
+    assertTrue(offsetAtt.startOffset() + " does not equal: " + 36, offsetAtt.startOffset() == 36);
+    assertTrue(offsetAtt.endOffset() + " does not equal: " + 37, offsetAtt.endOffset() == 37);
+
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "link",
+            termAtt.term().equals("link") == true);
+    assertTrue(posIncrAtt.getPositionIncrement() + " does not equal: " + 1, posIncrAtt.getPositionIncrement() == 1);
+    assertTrue(typeAtt.type() + " is not equal to " + WikipediaTokenizer.INTERNAL_LINK, typeAtt.type().equals(WikipediaTokenizer.INTERNAL_LINK) == true);
+    assertTrue(offsetAtt.startOffset() + " does not equal: " + 42, offsetAtt.startOffset() == 42);
+    assertTrue(offsetAtt.endOffset() + " does not equal: " + 46, offsetAtt.endOffset() == 46);
+    
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "here",
+            termAtt.term().equals("here") == true);
+    assertTrue(posIncrAtt.getPositionIncrement() + " does not equal: " + 1, posIncrAtt.getPositionIncrement() == 1);
+    assertTrue(typeAtt.type() + " is not equal to " + WikipediaTokenizer.INTERNAL_LINK, typeAtt.type().equals(WikipediaTokenizer.INTERNAL_LINK) == true);
+    assertTrue(offsetAtt.startOffset() + " does not equal: " + 47, offsetAtt.startOffset() == 47);
+    assertTrue(offsetAtt.endOffset() + " does not equal: " + 51, offsetAtt.endOffset() == 51);
+    
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "link",
+            termAtt.term().equals("link") == true);
+    assertTrue(posIncrAtt.getPositionIncrement() + " does not equal: " + 1, posIncrAtt.getPositionIncrement() == 1);
+    assertTrue(offsetAtt.startOffset() + " does not equal: " + 56, offsetAtt.startOffset() == 56);
+    assertTrue(typeAtt.type() + " is not equal to " + WikipediaTokenizer.INTERNAL_LINK, typeAtt.type().equals(WikipediaTokenizer.INTERNAL_LINK) == true);
+    assertTrue(offsetAtt.endOffset() + " does not equal: " + 60, offsetAtt.endOffset() == 60);
+    
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "there",
+            termAtt.term().equals("there") == true);
+    assertTrue(posIncrAtt.getPositionIncrement() + " does not equal: " + 1, posIncrAtt.getPositionIncrement() == 1);
+    assertTrue(typeAtt.type() + " is not equal to " + WikipediaTokenizer.INTERNAL_LINK, typeAtt.type().equals(WikipediaTokenizer.INTERNAL_LINK) == true);
+    assertTrue(offsetAtt.startOffset() + " does not equal: " + 61, offsetAtt.startOffset() == 61);
+    assertTrue(offsetAtt.endOffset() + " does not equal: " + 66, offsetAtt.endOffset() == 66);
+    
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "italics here",
+            termAtt.term().equals("italics here") == true);
+    assertTrue(posIncrAtt.getPositionIncrement() + " does not equal: " + 1, posIncrAtt.getPositionIncrement() == 1);
+    assertTrue(typeAtt.type() + " is not equal to " + WikipediaTokenizer.ITALICS, typeAtt.type().equals(WikipediaTokenizer.ITALICS) == true);
+    assertTrue(flagsAtt.getFlags() + " does not equal: " + WikipediaTokenizer.UNTOKENIZED_TOKEN_FLAG, flagsAtt.getFlags() == WikipediaTokenizer.UNTOKENIZED_TOKEN_FLAG);
+    assertTrue(offsetAtt.startOffset() + " does not equal: " + 71, offsetAtt.startOffset() == 71);
+    assertTrue(offsetAtt.endOffset() + " does not equal: " + 83, offsetAtt.endOffset() == 83);
+
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "italics",
+            termAtt.term().equals("italics") == true);
+    assertTrue(posIncrAtt.getPositionIncrement() + " does not equal: " + 0, posIncrAtt.getPositionIncrement() == 0);
+    assertTrue(typeAtt.type() + " is not equal to " + WikipediaTokenizer.ITALICS, typeAtt.type().equals(WikipediaTokenizer.ITALICS) == true);
+    assertTrue(offsetAtt.startOffset() + " does not equal: " + 71, offsetAtt.startOffset() == 71);
+    assertTrue(offsetAtt.endOffset() + " does not equal: " + 78, offsetAtt.endOffset() == 78);
+
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "here",
+            termAtt.term().equals("here") == true);
+    assertTrue(posIncrAtt.getPositionIncrement() + " does not equal: " + 1, posIncrAtt.getPositionIncrement() == 1);
+    assertTrue(typeAtt.type() + " is not equal to " + WikipediaTokenizer.ITALICS, typeAtt.type().equals(WikipediaTokenizer.ITALICS) == true);
+    assertTrue(offsetAtt.startOffset() + " does not equal: " + 79, offsetAtt.startOffset() == 79);
+    assertTrue(offsetAtt.endOffset() + " does not equal: " + 83, offsetAtt.endOffset() == 83);
+
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "something",
+            termAtt.term().equals("something") == true);
+    assertTrue(posIncrAtt.getPositionIncrement() + " does not equal: " + 1, posIncrAtt.getPositionIncrement() == 1);
+    assertTrue(offsetAtt.startOffset() + " does not equal: " + 86, offsetAtt.startOffset() == 86);
+    assertTrue(offsetAtt.endOffset() + " does not equal: " + 95, offsetAtt.endOffset() == 95);
+    
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "more italics",
+            termAtt.term().equals("more italics") == true);
+    assertTrue(posIncrAtt.getPositionIncrement() + " does not equal: " + 1, posIncrAtt.getPositionIncrement() == 1);
+    assertTrue(typeAtt.type() + " is not equal to " + WikipediaTokenizer.ITALICS, typeAtt.type().equals(WikipediaTokenizer.ITALICS) == true);
+    assertTrue(flagsAtt.getFlags() + " does not equal: " + WikipediaTokenizer.UNTOKENIZED_TOKEN_FLAG, flagsAtt.getFlags() == WikipediaTokenizer.UNTOKENIZED_TOKEN_FLAG);
+    assertTrue(offsetAtt.startOffset() + " does not equal: " + 98, offsetAtt.startOffset() == 98);
+    assertTrue(offsetAtt.endOffset() + " does not equal: " + 110, offsetAtt.endOffset() == 110);
+
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "more",
+            termAtt.term().equals("more") == true);
+    assertTrue(posIncrAtt.getPositionIncrement() + " does not equal: " + 0, posIncrAtt.getPositionIncrement() == 0);
+    assertTrue(typeAtt.type() + " is not equal to " + WikipediaTokenizer.ITALICS, typeAtt.type().equals(WikipediaTokenizer.ITALICS) == true);
+    assertTrue(offsetAtt.startOffset() + " does not equal: " + 98, offsetAtt.startOffset() == 98);
+    assertTrue(offsetAtt.endOffset() + " does not equal: " + 102, offsetAtt.endOffset() == 102);
+
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "italics",
+            termAtt.term().equals("italics") == true);
+    assertTrue(posIncrAtt.getPositionIncrement() + " does not equal: " + 1, posIncrAtt.getPositionIncrement() == 1);
+        assertTrue(typeAtt.type() + " is not equal to " + WikipediaTokenizer.ITALICS, typeAtt.type().equals(WikipediaTokenizer.ITALICS) == true);
+
+    assertTrue(offsetAtt.startOffset() + " does not equal: " + 103, offsetAtt.startOffset() == 103);
+    assertTrue(offsetAtt.endOffset() + " does not equal: " + 110, offsetAtt.endOffset() == 110);
+
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "h   i   j",
+            termAtt.term().equals("h   i   j") == true);
+    assertTrue(posIncrAtt.getPositionIncrement() + " does not equal: " + 1, posIncrAtt.getPositionIncrement() == 1);
+    assertTrue(typeAtt.type() + " is not equal to " + WikipediaTokenizer.CATEGORY, typeAtt.type().equals(WikipediaTokenizer.CATEGORY) == true);
+    assertTrue(flagsAtt.getFlags() + " does not equal: " + WikipediaTokenizer.UNTOKENIZED_TOKEN_FLAG, flagsAtt.getFlags() == WikipediaTokenizer.UNTOKENIZED_TOKEN_FLAG);
+    assertTrue(offsetAtt.startOffset() + " does not equal: " + 124, offsetAtt.startOffset() == 124);
+    assertTrue(offsetAtt.endOffset() + " does not equal: " + 133, offsetAtt.endOffset() == 133);
+
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "h",
+            termAtt.term().equals("h") == true);
+    assertTrue(posIncrAtt.getPositionIncrement() + " does not equal: " + 0, posIncrAtt.getPositionIncrement() == 0);
+    assertTrue(typeAtt.type() + " is not equal to " + WikipediaTokenizer.CATEGORY, typeAtt.type().equals(WikipediaTokenizer.CATEGORY) == true);
+    assertTrue(offsetAtt.startOffset() + " does not equal: " + 124, offsetAtt.startOffset() == 124);
+    assertTrue(offsetAtt.endOffset() + " does not equal: " + 125, offsetAtt.endOffset() == 125);
+
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "i",
+            termAtt.term().equals("i") == true);
+    assertTrue(posIncrAtt.getPositionIncrement() + " does not equal: " + 1, posIncrAtt.getPositionIncrement() == 1);
+    assertTrue(typeAtt.type() + " is not equal to " + WikipediaTokenizer.CATEGORY, typeAtt.type().equals(WikipediaTokenizer.CATEGORY) == true);
+    assertTrue(offsetAtt.startOffset() + " does not equal: " + 128, offsetAtt.startOffset() == 128);
+    assertTrue(offsetAtt.endOffset() + " does not equal: " + 129, offsetAtt.endOffset() == 129);
+    
+    assertTrue(tf.incrementToken());
+    assertTrue(termAtt.term() + " is not equal to " + "j",
+            termAtt.term().equals("j") == true);
+    assertTrue(posIncrAtt.getPositionIncrement() + " does not equal: " + 1, posIncrAtt.getPositionIncrement() == 1);
+    assertTrue(typeAtt.type() + " is not equal to " + WikipediaTokenizer.CATEGORY, typeAtt.type().equals(WikipediaTokenizer.CATEGORY) == true);
+    assertTrue(offsetAtt.startOffset() + " does not equal: " + 132, offsetAtt.startOffset() == 132);
+    assertTrue(offsetAtt.endOffset() + " does not equal: " + 133, offsetAtt.endOffset() == 133);
 
+    assertFalse(tf.incrementToken());
   }
 }

Modified: lucene/java/trunk/contrib/wordnet/src/java/org/apache/lucene/wordnet/SynExpand.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/contrib/wordnet/src/java/org/apache/lucene/wordnet/SynExpand.java?rev=799953&r1=799952&r2=799953&view=diff
==============================================================================
--- lucene/java/trunk/contrib/wordnet/src/java/org/apache/lucene/wordnet/SynExpand.java (original)
+++ lucene/java/trunk/contrib/wordnet/src/java/org/apache/lucene/wordnet/SynExpand.java Sat Aug  1 22:52:32 2009
@@ -27,9 +27,9 @@
 import java.util.Set;
 
 import org.apache.lucene.analysis.Analyzer;
-import org.apache.lucene.analysis.Token;
 import org.apache.lucene.analysis.TokenStream;
 import org.apache.lucene.analysis.standard.StandardAnalyzer;
+import org.apache.lucene.analysis.tokenattributes.TermAttribute;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.search.BooleanClause;
@@ -114,10 +114,10 @@
 
 		// [1] Parse query into separate words so that when we expand we can avoid dups
 		TokenStream ts = a.tokenStream( field, new StringReader( query));
-                
-                final Token reusableToken = new Token();
-		for (Token nextToken = ts.next(reusableToken); nextToken != null; nextToken = ts.next(reusableToken)) {
-			String word = nextToken.term();
+		TermAttribute termAtt = (TermAttribute) ts.addAttribute(TermAttribute.class);
+		
+		while (ts.incrementToken()) {
+		  String word = termAtt.term();
 			if ( already.add( word))
 				top.add( word);
 		}

Modified: lucene/java/trunk/contrib/wordnet/src/java/org/apache/lucene/wordnet/SynLookup.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/contrib/wordnet/src/java/org/apache/lucene/wordnet/SynLookup.java?rev=799953&r1=799952&r2=799953&view=diff
==============================================================================
--- lucene/java/trunk/contrib/wordnet/src/java/org/apache/lucene/wordnet/SynLookup.java (original)
+++ lucene/java/trunk/contrib/wordnet/src/java/org/apache/lucene/wordnet/SynLookup.java Sat Aug  1 22:52:32 2009
@@ -27,8 +27,8 @@
 import java.util.Set;
 
 import org.apache.lucene.analysis.Analyzer;
-import org.apache.lucene.analysis.Token;
 import org.apache.lucene.analysis.TokenStream;
+import org.apache.lucene.analysis.tokenattributes.TermAttribute;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.search.BooleanClause;
@@ -101,9 +101,10 @@
 
 		// [1] Parse query into separate words so that when we expand we can avoid dups
 		TokenStream ts = a.tokenStream( field, new StringReader( query));
-                final Token reusableToken = new Token();
-		for (Token nextToken = ts.next(reusableToken); nextToken != null; nextToken = ts.next(reusableToken)) {
-			String word = nextToken.term();
+    TermAttribute termAtt = (TermAttribute) ts.addAttribute(TermAttribute.class);
+    
+		while (ts.incrementToken()) {
+			String word = termAtt.term();
 			if ( already.add( word))
 				top.add( word);
 		}

Modified: lucene/java/trunk/contrib/xml-query-parser/src/java/org/apache/lucene/xmlparser/builders/LikeThisQueryBuilder.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/contrib/xml-query-parser/src/java/org/apache/lucene/xmlparser/builders/LikeThisQueryBuilder.java?rev=799953&r1=799952&r2=799953&view=diff
==============================================================================
--- lucene/java/trunk/contrib/xml-query-parser/src/java/org/apache/lucene/xmlparser/builders/LikeThisQueryBuilder.java (original)
+++ lucene/java/trunk/contrib/xml-query-parser/src/java/org/apache/lucene/xmlparser/builders/LikeThisQueryBuilder.java Sat Aug  1 22:52:32 2009
@@ -9,8 +9,8 @@
 import java.util.Set;
 
 import org.apache.lucene.analysis.Analyzer;
-import org.apache.lucene.analysis.Token;
 import org.apache.lucene.analysis.TokenStream;
+import org.apache.lucene.analysis.tokenattributes.TermAttribute;
 import org.apache.lucene.search.similar.MoreLikeThisQuery;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.xmlparser.DOMUtils;
@@ -72,14 +72,14 @@
 		if((stopWords!=null)&&(fields!=null))
 		{
 		    stopWordsSet=new HashSet();
-                    final Token reusableToken = new Token();
 		    for (int i = 0; i < fields.length; i++)
             {
                 TokenStream ts = analyzer.tokenStream(fields[i],new StringReader(stopWords));
+                TermAttribute termAtt = (TermAttribute) ts.addAttribute(TermAttribute.class);
                 try
                 {
-	                for (Token nextToken = ts.next(reusableToken); nextToken != null; nextToken = ts.next(reusableToken)) {
-	                    stopWordsSet.add(nextToken.term());
+	                while(ts.incrementToken()) {
+	                    stopWordsSet.add(termAtt.term());
 	                }
                 }
                 catch(IOException ioe)

Modified: lucene/java/trunk/contrib/xml-query-parser/src/java/org/apache/lucene/xmlparser/builders/SpanOrTermsBuilder.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/contrib/xml-query-parser/src/java/org/apache/lucene/xmlparser/builders/SpanOrTermsBuilder.java?rev=799953&r1=799952&r2=799953&view=diff
==============================================================================
--- lucene/java/trunk/contrib/xml-query-parser/src/java/org/apache/lucene/xmlparser/builders/SpanOrTermsBuilder.java (original)
+++ lucene/java/trunk/contrib/xml-query-parser/src/java/org/apache/lucene/xmlparser/builders/SpanOrTermsBuilder.java Sat Aug  1 22:52:32 2009
@@ -5,8 +5,8 @@
 import java.util.ArrayList;
 
 import org.apache.lucene.analysis.Analyzer;
-import org.apache.lucene.analysis.Token;
 import org.apache.lucene.analysis.TokenStream;
+import org.apache.lucene.analysis.tokenattributes.TermAttribute;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.search.spans.SpanOrQuery;
 import org.apache.lucene.search.spans.SpanQuery;
@@ -52,9 +52,10 @@
 		{
 			ArrayList clausesList=new ArrayList();
 			TokenStream ts=analyzer.tokenStream(fieldName,new StringReader(value));
-			final Token reusableToken = new Token();
-	                for (Token nextToken = ts.next(reusableToken); nextToken != null; nextToken = ts.next(reusableToken)) {
-			    SpanTermQuery stq=new SpanTermQuery(new Term(fieldName,nextToken.term()));
+			TermAttribute termAtt = (TermAttribute) ts.addAttribute(TermAttribute.class);
+			
+	    while (ts.incrementToken()) {
+			    SpanTermQuery stq=new SpanTermQuery(new Term(fieldName, termAtt.term()));
 			    clausesList.add(stq);
 			}
 			SpanOrQuery soq=new SpanOrQuery((SpanQuery[]) clausesList.toArray(new SpanQuery[clausesList.size()]));

Modified: lucene/java/trunk/contrib/xml-query-parser/src/java/org/apache/lucene/xmlparser/builders/TermsFilterBuilder.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/contrib/xml-query-parser/src/java/org/apache/lucene/xmlparser/builders/TermsFilterBuilder.java?rev=799953&r1=799952&r2=799953&view=diff
==============================================================================
--- lucene/java/trunk/contrib/xml-query-parser/src/java/org/apache/lucene/xmlparser/builders/TermsFilterBuilder.java (original)
+++ lucene/java/trunk/contrib/xml-query-parser/src/java/org/apache/lucene/xmlparser/builders/TermsFilterBuilder.java Sat Aug  1 22:52:32 2009
@@ -4,8 +4,8 @@
 import java.io.StringReader;
 
 import org.apache.lucene.analysis.Analyzer;
-import org.apache.lucene.analysis.Token;
 import org.apache.lucene.analysis.TokenStream;
+import org.apache.lucene.analysis.tokenattributes.TermAttribute;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.search.Filter;
 import org.apache.lucene.search.TermsFilter;
@@ -54,19 +54,19 @@
 		String text = DOMUtils.getNonBlankTextOrFail(e);
 		String fieldName = DOMUtils.getAttributeWithInheritanceOrFail(e, "fieldName");
 		TokenStream ts = analyzer.tokenStream(fieldName, new StringReader(text));
-
+    TermAttribute termAtt = (TermAttribute) ts.addAttribute(TermAttribute.class);
+    
 		try
 		{
-                  final Token reusableToken = new Token();
 			Term term = null;
-	                for (Token nextToken = ts.next(reusableToken); nextToken != null; nextToken = ts.next(reusableToken)) {
+	      while (ts.incrementToken()) {
 				if (term == null)
 				{
-					term = new Term(fieldName, nextToken.term());
+					term = new Term(fieldName, termAtt.term());
 				} else
 				{
 //					 create from previous to save fieldName.intern overhead
-					term = term.createTerm(nextToken.term()); 
+					term = term.createTerm(termAtt.term()); 
 				}
 				tf.addTerm(term);
 			}

Modified: lucene/java/trunk/contrib/xml-query-parser/src/java/org/apache/lucene/xmlparser/builders/TermsQueryBuilder.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/contrib/xml-query-parser/src/java/org/apache/lucene/xmlparser/builders/TermsQueryBuilder.java?rev=799953&r1=799952&r2=799953&view=diff
==============================================================================
--- lucene/java/trunk/contrib/xml-query-parser/src/java/org/apache/lucene/xmlparser/builders/TermsQueryBuilder.java (original)
+++ lucene/java/trunk/contrib/xml-query-parser/src/java/org/apache/lucene/xmlparser/builders/TermsQueryBuilder.java Sat Aug  1 22:52:32 2009
@@ -4,8 +4,8 @@
 import java.io.StringReader;
 
 import org.apache.lucene.analysis.Analyzer;
-import org.apache.lucene.analysis.Token;
 import org.apache.lucene.analysis.TokenStream;
+import org.apache.lucene.analysis.tokenattributes.TermAttribute;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.search.BooleanClause;
 import org.apache.lucene.search.BooleanQuery;
@@ -57,16 +57,16 @@
 		TokenStream ts = analyzer.tokenStream(fieldName, new StringReader(text));
 		try
 		{
-                  final Token reusableToken = new Token();
+		  TermAttribute termAtt = (TermAttribute) ts.addAttribute(TermAttribute.class);
 			Term term = null;
-	                for (Token nextToken = ts.next(reusableToken); nextToken != null; nextToken = ts.next(reusableToken)) {
+			while (ts.incrementToken()) {
 				if (term == null)
 				{
-					term = new Term(fieldName, nextToken.term());
+					term = new Term(fieldName, termAtt.term());
 				} else
 				{
 //					 create from previous to save fieldName.intern overhead
-					term = term.createTerm(nextToken.term()); 
+					term = term.createTerm(termAtt.term()); 
 				}
 				bq.add(new BooleanClause(new TermQuery(term),BooleanClause.Occur.SHOULD));
 			}