You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by rm...@apache.org on 2011/03/21 14:52:16 UTC

svn commit: r1083784 - in /lucene/dev/trunk: lucene/contrib/memory/src/java/org/apache/lucene/index/memory/ lucene/contrib/xml-query-parser/src/java/org/apache/lucene/xmlparser/builders/ lucene/src/java/org/apache/lucene/analysis/ lucene/src/java/org/a...

Author: rmuir
Date: Mon Mar 21 13:52:15 2011
New Revision: 1083784

URL: http://svn.apache.org/viewvc?rev=1083784&view=rev
Log:
LUCENE-2944: fix BytesRef reuse bugs, TermToBytesRefAttribute owns the bytes like other attributes

Modified:
    lucene/dev/trunk/lucene/contrib/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java
    lucene/dev/trunk/lucene/contrib/xml-query-parser/src/java/org/apache/lucene/xmlparser/builders/SpanOrTermsBuilder.java
    lucene/dev/trunk/lucene/contrib/xml-query-parser/src/java/org/apache/lucene/xmlparser/builders/TermsFilterBuilder.java
    lucene/dev/trunk/lucene/contrib/xml-query-parser/src/java/org/apache/lucene/xmlparser/builders/TermsQueryBuilder.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/analysis/NumericTokenStream.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/analysis/tokenattributes/CharTermAttributeImpl.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/analysis/tokenattributes/TermToBytesRefAttribute.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/TermsHashPerField.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/TermsHashPerThread.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/queryParser/QueryParserBase.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/search/QueryTermVector.java
    lucene/dev/trunk/lucene/src/test/org/apache/lucene/analysis/TestNumericTokenStream.java
    lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/Test2BTerms.java
    lucene/dev/trunk/modules/analysis/common/src/java/org/apache/lucene/collation/tokenattributes/CollatedTermAttributeImpl.java
    lucene/dev/trunk/modules/analysis/common/src/test/org/apache/lucene/collation/CollationTestBase.java
    lucene/dev/trunk/modules/analysis/icu/src/java/org/apache/lucene/collation/tokenattributes/ICUCollatedTermAttributeImpl.java
    lucene/dev/trunk/solr/src/java/org/apache/solr/handler/AnalysisRequestHandlerBase.java
    lucene/dev/trunk/solr/src/webapp/web/admin/analysis.jsp

Modified: lucene/dev/trunk/lucene/contrib/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/contrib/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java?rev=1083784&r1=1083783&r2=1083784&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/contrib/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java (original)
+++ lucene/dev/trunk/lucene/contrib/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java Mon Mar 21 13:52:15 2011
@@ -353,10 +353,10 @@ public class MemoryIndex {
       TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class);
       PositionIncrementAttribute posIncrAttribute = stream.addAttribute(PositionIncrementAttribute.class);
       OffsetAttribute offsetAtt = stream.addAttribute(OffsetAttribute.class);
-      BytesRef ref = new BytesRef(10);
+      BytesRef ref = termAtt.getBytesRef();
       stream.reset();
       while (stream.incrementToken()) {
-        termAtt.toBytesRef(ref);
+        termAtt.fillBytesRef();
         if (ref.length == 0) continue; // nothing to do
 //        if (DEBUG) System.err.println("token='" + term + "'");
         numTokens++;

Modified: lucene/dev/trunk/lucene/contrib/xml-query-parser/src/java/org/apache/lucene/xmlparser/builders/SpanOrTermsBuilder.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/contrib/xml-query-parser/src/java/org/apache/lucene/xmlparser/builders/SpanOrTermsBuilder.java?rev=1083784&r1=1083783&r2=1083784&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/contrib/xml-query-parser/src/java/org/apache/lucene/xmlparser/builders/SpanOrTermsBuilder.java (original)
+++ lucene/dev/trunk/lucene/contrib/xml-query-parser/src/java/org/apache/lucene/xmlparser/builders/SpanOrTermsBuilder.java Mon Mar 21 13:52:15 2011
@@ -58,11 +58,10 @@ public class SpanOrTermsBuilder extends 
 			ArrayList<SpanQuery> clausesList=new ArrayList<SpanQuery>();
 			TokenStream ts=analyzer.tokenStream(fieldName,new StringReader(value));
 			TermToBytesRefAttribute termAtt = ts.addAttribute(TermToBytesRefAttribute.class);
-			
+      BytesRef bytes = termAtt.getBytesRef();
 	    while (ts.incrementToken()) {
-	        BytesRef term = new BytesRef();
-	        termAtt.toBytesRef(term);
-			    SpanTermQuery stq=new SpanTermQuery(new Term(fieldName, term));
+	        termAtt.fillBytesRef();
+			    SpanTermQuery stq=new SpanTermQuery(new Term(fieldName, new BytesRef(bytes)));
 			    clausesList.add(stq);
 			}
 			SpanOrQuery soq=new SpanOrQuery(clausesList.toArray(new SpanQuery[clausesList.size()]));

Modified: lucene/dev/trunk/lucene/contrib/xml-query-parser/src/java/org/apache/lucene/xmlparser/builders/TermsFilterBuilder.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/contrib/xml-query-parser/src/java/org/apache/lucene/xmlparser/builders/TermsFilterBuilder.java?rev=1083784&r1=1083783&r2=1083784&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/contrib/xml-query-parser/src/java/org/apache/lucene/xmlparser/builders/TermsFilterBuilder.java (original)
+++ lucene/dev/trunk/lucene/contrib/xml-query-parser/src/java/org/apache/lucene/xmlparser/builders/TermsFilterBuilder.java Mon Mar 21 13:52:15 2011
@@ -63,16 +63,16 @@ public class TermsFilterBuilder implemen
 		try
 		{
 			Term term = null;
+      BytesRef bytes = termAtt.getBytesRef();
 	      while (ts.incrementToken()) {
-	        BytesRef bytes = new BytesRef();
-	        termAtt.toBytesRef(bytes);
+	        termAtt.fillBytesRef();
 				if (term == null)
 				{
-					term = new Term(fieldName, bytes);
+					term = new Term(fieldName, new BytesRef(bytes));
 				} else
 				{
 //					 create from previous to save fieldName.intern overhead
-					term = term.createTerm(bytes); 
+					term = term.createTerm(new BytesRef(bytes)); 
 				}
 				tf.addTerm(term);
 			}

Modified: lucene/dev/trunk/lucene/contrib/xml-query-parser/src/java/org/apache/lucene/xmlparser/builders/TermsQueryBuilder.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/contrib/xml-query-parser/src/java/org/apache/lucene/xmlparser/builders/TermsQueryBuilder.java?rev=1083784&r1=1083783&r2=1083784&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/contrib/xml-query-parser/src/java/org/apache/lucene/xmlparser/builders/TermsQueryBuilder.java (original)
+++ lucene/dev/trunk/lucene/contrib/xml-query-parser/src/java/org/apache/lucene/xmlparser/builders/TermsQueryBuilder.java Mon Mar 21 13:52:15 2011
@@ -60,16 +60,16 @@ public class TermsQueryBuilder implement
 		{
 		  TermToBytesRefAttribute termAtt = ts.addAttribute(TermToBytesRefAttribute.class);
 			Term term = null;
+      BytesRef bytes = termAtt.getBytesRef();
 			while (ts.incrementToken()) {
-        BytesRef bytes = new BytesRef();
-        termAtt.toBytesRef(bytes);
+        termAtt.fillBytesRef();
 				if (term == null)
 				{
-					term = new Term(fieldName, bytes);
+					term = new Term(fieldName, new BytesRef(bytes));
 				} else
 				{
 //					 create from previous to save fieldName.intern overhead
-					term = term.createTerm(bytes); 
+					term = term.createTerm(new BytesRef(bytes)); 
 				}
 				bq.add(new BooleanClause(new TermQuery(term),BooleanClause.Occur.SHOULD));
 			}

Modified: lucene/dev/trunk/lucene/src/java/org/apache/lucene/analysis/NumericTokenStream.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/java/org/apache/lucene/analysis/NumericTokenStream.java?rev=1083784&r1=1083783&r2=1083784&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/java/org/apache/lucene/analysis/NumericTokenStream.java (original)
+++ lucene/dev/trunk/lucene/src/java/org/apache/lucene/analysis/NumericTokenStream.java Mon Mar 21 13:52:15 2011
@@ -142,8 +142,13 @@ public final class NumericTokenStream ex
   public static final class NumericTermAttributeImpl extends AttributeImpl implements NumericTermAttribute,TermToBytesRefAttribute {
     private long value = 0L;
     private int valueSize = 0, shift = 0, precisionStep = 0;
+    private BytesRef bytes = new BytesRef();
+
+    public BytesRef getBytesRef() {
+      return bytes;
+    }
     
-    public int toBytesRef(BytesRef bytes) {
+    public int fillBytesRef() {
       try {
         assert valueSize == 64 || valueSize == 32;
         return (valueSize == 64) ? 
@@ -180,8 +185,7 @@ public final class NumericTokenStream ex
     
     @Override
     public void reflectWith(AttributeReflector reflector) {
-      final BytesRef bytes = new BytesRef();
-      toBytesRef(bytes);
+      fillBytesRef();
       reflector.reflect(TermToBytesRefAttribute.class, "bytes", bytes);
       reflector.reflect(NumericTermAttribute.class, "shift", shift);
       reflector.reflect(NumericTermAttribute.class, "rawValue", getRawValue());

Modified: lucene/dev/trunk/lucene/src/java/org/apache/lucene/analysis/tokenattributes/CharTermAttributeImpl.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/java/org/apache/lucene/analysis/tokenattributes/CharTermAttributeImpl.java?rev=1083784&r1=1083783&r2=1083784&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/java/org/apache/lucene/analysis/tokenattributes/CharTermAttributeImpl.java (original)
+++ lucene/dev/trunk/lucene/src/java/org/apache/lucene/analysis/tokenattributes/CharTermAttributeImpl.java Mon Mar 21 13:52:15 2011
@@ -77,8 +77,16 @@ public class CharTermAttributeImpl exten
   }
   
   // *** TermToBytesRefAttribute interface ***
-  public int toBytesRef(BytesRef target) {
-    return UnicodeUtil.UTF16toUTF8WithHash(termBuffer, 0, termLength, target);
+  private BytesRef bytes = new BytesRef(MIN_BUFFER_SIZE);
+
+  @Override
+  public int fillBytesRef() {
+    return UnicodeUtil.UTF16toUTF8WithHash(termBuffer, 0, termLength, bytes);
+  }
+
+  @Override
+  public BytesRef getBytesRef() {
+    return bytes;
   }
   
   // *** CharSequence interface ***
@@ -205,6 +213,7 @@ public class CharTermAttributeImpl exten
     // Do a deep clone
     t.termBuffer = new char[this.termLength];
     System.arraycopy(this.termBuffer, 0, t.termBuffer, 0, this.termLength);
+    t.bytes = new BytesRef(bytes);
     return t;
   }
   
@@ -246,9 +255,8 @@ public class CharTermAttributeImpl exten
   @Override
   public void reflectWith(AttributeReflector reflector) {
     reflector.reflect(CharTermAttribute.class, "term", toString());
-    final BytesRef bytes = new BytesRef();
-    toBytesRef(bytes);
-    reflector.reflect(TermToBytesRefAttribute.class, "bytes", bytes);
+    fillBytesRef();
+    reflector.reflect(TermToBytesRefAttribute.class, "bytes", new BytesRef(bytes));
   }
   
   @Override

Modified: lucene/dev/trunk/lucene/src/java/org/apache/lucene/analysis/tokenattributes/TermToBytesRefAttribute.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/java/org/apache/lucene/analysis/tokenattributes/TermToBytesRefAttribute.java?rev=1083784&r1=1083783&r2=1083784&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/java/org/apache/lucene/analysis/tokenattributes/TermToBytesRefAttribute.java (original)
+++ lucene/dev/trunk/lucene/src/java/org/apache/lucene/analysis/tokenattributes/TermToBytesRefAttribute.java Mon Mar 21 13:52:15 2011
@@ -22,18 +22,39 @@ import org.apache.lucene.util.BytesRef;
 
 /**
  * This attribute is requested by TermsHashPerField to index the contents.
- * This attribute has no real state, it should be implemented in addition to
- * {@link CharTermAttribute}, to support indexing the term text as
- * UTF-8 bytes.
+ * This attribute can be used to customize the final byte[] encoding of terms.
+ * <p>
+ * Consumers of this attribute call {@link #getBytesRef()} up-front, and then
+ * invoke {@link #fillBytesRef()} for each term. Example:
+ * <pre class="prettyprint">
+ *   final TermToBytesRefAttribute termAtt = tokenStream.getAttribute(TermToBytesRefAttribute.class);
+ *   final BytesRef bytes = termAtt.getBytesRef();
+ *
+ *   while (termAtt.incrementToken() {
+ *
+ *     // you must call termAtt.fillBytesRef() before doing something with the bytes.
+ *     // this encodes the term value (internally it might be a char[], etc) into the bytes.
+ *     int hashCode = termAtt.fillBytesRef();
+ *
+ *     if (isInteresting(bytes)) {
+ *     
+ *       // because the bytes are reused by the attribute (like CharTermAttribute's char[] buffer),
+ *       // you should make a copy if you need persistent access to the bytes, otherwise they will
+ *       // be rewritten across calls to incrementToken()
+ *
+ *       doSomethingWith(new BytesRef(bytes));
+ *     }
+ *   }
+ *   ...
+ * </pre>
  * @lucene.experimental This is a very expert API, please use
  * {@link CharTermAttributeImpl} and its implementation of this method
  * for UTF-8 terms.
  */
 public interface TermToBytesRefAttribute extends Attribute {
-  /** Copies the token's term text into the given {@link BytesRef}.
-   * @param termBytes destination to write the bytes to (UTF-8 for text terms).
-   * The length of the BytesRef's buffer may be not large enough, so you need to grow.
-   * The parameters' {@code bytes} is guaranteed to be not {@code null}.
+  /** 
+   * Updates the bytes {@link #getBytesRef()} to contain this term's
+   * final encoding, and returns its hashcode.
    * @return the hashcode as defined by {@link BytesRef#hashCode}:
    * <pre>
    *  int hash = 0;
@@ -45,5 +66,12 @@ public interface TermToBytesRefAttribute
    * the hash on-the-fly. If this is not the case, just return
    * {@code termBytes.hashCode()}.
    */
-  public int toBytesRef(BytesRef termBytes);
+  public int fillBytesRef();
+  
+  /**
+   * Retrieve this attribute's BytesRef. The bytes are updated 
+   * from the current term when the consumer calls {@link #fillBytesRef()}.
+   * @return this Attributes internal BytesRef.
+   */
+  public BytesRef getBytesRef();
 }

Modified: lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/TermsHashPerField.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/TermsHashPerField.java?rev=1083784&r1=1083783&r2=1083784&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/TermsHashPerField.java (original)
+++ lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/TermsHashPerField.java Mon Mar 21 13:52:15 2011
@@ -39,6 +39,7 @@ final class TermsHashPerField extends In
   final DocumentsWriter.DocState docState;
   final FieldInvertState fieldState;
   TermToBytesRefAttribute termAtt;
+  BytesRef termBytesRef;
 
   // Copied from our perThread
   final IntBlockPool intPool;
@@ -53,7 +54,6 @@ final class TermsHashPerField extends In
   final BytesRefHash bytesHash;
  
   ParallelPostingsArray postingsArray;
-  private final BytesRef termBytesRef;
   private final AtomicLong bytesUsed;
 
   public TermsHashPerField(DocInverterPerField docInverterPerField, final TermsHashPerThread perThread, final TermsHashPerThread nextPerThread, final FieldInfo fieldInfo) {
@@ -70,7 +70,6 @@ final class TermsHashPerField extends In
     bytesHash = new BytesRefHash(termBytePool, HASH_INIT_SIZE, byteStarts); 
     streamCount = consumer.getStreamCount();
     numPostingInt = 2*streamCount;
-    termBytesRef = perThread.termBytesRef;
     this.fieldInfo = fieldInfo;
     if (nextPerThread != null)
       nextPerField = (TermsHashPerField) nextPerThread.addField(docInverterPerField, fieldInfo);
@@ -119,6 +118,7 @@ final class TermsHashPerField extends In
   @Override
   void start(Fieldable f) {
     termAtt = fieldState.attributeSource.getAttribute(TermToBytesRefAttribute.class);
+    termBytesRef = termAtt.getBytesRef();
     consumer.start(f);
     if (nextPerField != null) {
       nextPerField.start(f);
@@ -181,7 +181,7 @@ final class TermsHashPerField extends In
     // Get the text & hash of this term.
     int termID;
     try{
-       termID = bytesHash.add(termBytesRef, termAtt.toBytesRef(termBytesRef));
+       termID = bytesHash.add(termBytesRef, termAtt.fillBytesRef());
     }catch (MaxBytesLengthExceededException e) {
       // Not enough room in current block
       // Just skip this term, to remain as robust as

Modified: lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/TermsHashPerThread.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/TermsHashPerThread.java?rev=1083784&r1=1083783&r2=1083784&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/TermsHashPerThread.java (original)
+++ lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/TermsHashPerThread.java Mon Mar 21 13:52:15 2011
@@ -18,7 +18,6 @@ package org.apache.lucene.index;
  */
 
 import org.apache.lucene.util.ByteBlockPool;
-import org.apache.lucene.util.BytesRef;
 
 import java.io.IOException;
 
@@ -35,8 +34,6 @@ final class TermsHashPerThread extends I
   
   final boolean primary;
   final DocumentsWriter.DocState docState;
-  // Used by perField to obtain terms from the analysis chain
-  final BytesRef termBytesRef = new BytesRef(10);
 
   public TermsHashPerThread(DocInverterPerThread docInverterPerThread, final TermsHash termsHash, final TermsHash nextTermsHash, final TermsHashPerThread primaryPerThread) {
     docState = docInverterPerThread.docState;

Modified: lucene/dev/trunk/lucene/src/java/org/apache/lucene/queryParser/QueryParserBase.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/java/org/apache/lucene/queryParser/QueryParserBase.java?rev=1083784&r1=1083783&r2=1083784&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/java/org/apache/lucene/queryParser/QueryParserBase.java (original)
+++ lucene/dev/trunk/lucene/src/java/org/apache/lucene/queryParser/QueryParserBase.java Mon Mar 21 13:52:15 2011
@@ -532,18 +532,19 @@ public abstract class QueryParserBase {
       // ignore
     }
 
+    BytesRef bytes = termAtt == null ? null : termAtt.getBytesRef();
+
     if (numTokens == 0)
       return null;
     else if (numTokens == 1) {
-      BytesRef term = new BytesRef();
       try {
         boolean hasNext = buffer.incrementToken();
         assert hasNext == true;
-        termAtt.toBytesRef(term);
+        termAtt.fillBytesRef();
       } catch (IOException e) {
         // safe to ignore, because we know the number of tokens
       }
-      return newTermQuery(new Term(field, term));
+      return newTermQuery(new Term(field, new BytesRef(bytes)));
     } else {
       if (severalTokensAtSamePosition || (!quoted && !autoGeneratePhraseQueries)) {
         if (positionCount == 1 || (!quoted && !autoGeneratePhraseQueries)) {
@@ -554,17 +555,15 @@ public abstract class QueryParserBase {
             BooleanClause.Occur.MUST : BooleanClause.Occur.SHOULD;
 
           for (int i = 0; i < numTokens; i++) {
-            BytesRef term = new BytesRef();
             try {
               boolean hasNext = buffer.incrementToken();
               assert hasNext == true;
-              termAtt.toBytesRef(term);
+              termAtt.fillBytesRef();
             } catch (IOException e) {
               // safe to ignore, because we know the number of tokens
             }
-
             Query currentQuery = newTermQuery(
-                new Term(field, term));
+                new Term(field, new BytesRef(bytes)));
             q.add(currentQuery, occur);
           }
           return q;
@@ -576,12 +575,11 @@ public abstract class QueryParserBase {
           List<Term> multiTerms = new ArrayList<Term>();
           int position = -1;
           for (int i = 0; i < numTokens; i++) {
-            BytesRef term = new BytesRef();
             int positionIncrement = 1;
             try {
               boolean hasNext = buffer.incrementToken();
               assert hasNext == true;
-              termAtt.toBytesRef(term);
+              termAtt.fillBytesRef();
               if (posIncrAtt != null) {
                 positionIncrement = posIncrAtt.getPositionIncrement();
               }
@@ -598,7 +596,7 @@ public abstract class QueryParserBase {
               multiTerms.clear();
             }
             position += positionIncrement;
-            multiTerms.add(new Term(field, term));
+            multiTerms.add(new Term(field, new BytesRef(bytes)));
           }
           if (enablePositionIncrements) {
             mpq.add(multiTerms.toArray(new Term[0]),position);
@@ -613,15 +611,13 @@ public abstract class QueryParserBase {
         pq.setSlop(phraseSlop);
         int position = -1;
 
-
         for (int i = 0; i < numTokens; i++) {
-          BytesRef term = new BytesRef();
           int positionIncrement = 1;
 
           try {
             boolean hasNext = buffer.incrementToken();
             assert hasNext == true;
-            termAtt.toBytesRef(term);
+            termAtt.fillBytesRef();
             if (posIncrAtt != null) {
               positionIncrement = posIncrAtt.getPositionIncrement();
             }
@@ -631,9 +627,9 @@ public abstract class QueryParserBase {
 
           if (enablePositionIncrements) {
             position += positionIncrement;
-            pq.add(new Term(field, term),position);
+            pq.add(new Term(field, new BytesRef(bytes)),position);
           } else {
-            pq.add(new Term(field, term));
+            pq.add(new Term(field, new BytesRef(bytes)));
           }
         }
         return pq;
@@ -796,13 +792,13 @@ public abstract class QueryParserBase {
       source = analyzer.tokenStream(field, new StringReader(part));
     }
       
-    BytesRef result = new BytesRef();
     TermToBytesRefAttribute termAtt = source.getAttribute(TermToBytesRefAttribute.class);
-      
+    BytesRef bytes = termAtt.getBytesRef();
+
     try {
       if (!source.incrementToken())
         throw new IllegalArgumentException("analyzer returned no terms for range part: " + part);
-      termAtt.toBytesRef(result);
+      termAtt.fillBytesRef();
       if (source.incrementToken())
         throw new IllegalArgumentException("analyzer returned too many terms for range part: " + part);
     } catch (IOException e) {
@@ -812,8 +808,8 @@ public abstract class QueryParserBase {
     try {
       source.close();
     } catch (IOException ignored) {}
-      
-    return result;
+    
+    return new BytesRef(bytes);
   }
 
   /**

Modified: lucene/dev/trunk/lucene/src/java/org/apache/lucene/search/QueryTermVector.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/java/org/apache/lucene/search/QueryTermVector.java?rev=1083784&r1=1083783&r2=1083784&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/java/org/apache/lucene/search/QueryTermVector.java (original)
+++ lucene/dev/trunk/lucene/src/java/org/apache/lucene/search/QueryTermVector.java Mon Mar 21 13:52:15 2011
@@ -66,10 +66,10 @@ public class QueryTermVector implements 
           final TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class);
 
           hasMoreTokens = stream.incrementToken();
+          BytesRef bytes = termAtt.getBytesRef();
           while (hasMoreTokens) {
-            BytesRef bytes = new BytesRef();
-            termAtt.toBytesRef(bytes);
-            terms.add(bytes);
+            termAtt.fillBytesRef();
+            terms.add(new BytesRef(bytes));
             hasMoreTokens = stream.incrementToken();
           }
           processTerms(terms.toArray(new BytesRef[terms.size()]));

Modified: lucene/dev/trunk/lucene/src/test/org/apache/lucene/analysis/TestNumericTokenStream.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/test/org/apache/lucene/analysis/TestNumericTokenStream.java?rev=1083784&r1=1083783&r2=1083784&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test/org/apache/lucene/analysis/TestNumericTokenStream.java (original)
+++ lucene/dev/trunk/lucene/src/test/org/apache/lucene/analysis/TestNumericTokenStream.java Mon Mar 21 13:52:15 2011
@@ -35,13 +35,13 @@ public class TestNumericTokenStream exte
     final TermToBytesRefAttribute bytesAtt = stream.getAttribute(TermToBytesRefAttribute.class);
     final TypeAttribute typeAtt = stream.getAttribute(TypeAttribute.class);
     final NumericTokenStream.NumericTermAttribute numericAtt = stream.getAttribute(NumericTokenStream.NumericTermAttribute.class);
-    final BytesRef bytes = new BytesRef();
+    final BytesRef bytes = bytesAtt.getBytesRef();
     stream.reset();
     assertEquals(64, numericAtt.getValueSize());
     for (int shift=0; shift<64; shift+=NumericUtils.PRECISION_STEP_DEFAULT) {
       assertTrue("New token is available", stream.incrementToken());
       assertEquals("Shift value wrong", shift, numericAtt.getShift());
-      final int hash = bytesAtt.toBytesRef(bytes);
+      final int hash = bytesAtt.fillBytesRef();
       assertEquals("Hash incorrect", bytes.hashCode(), hash);
       assertEquals("Term is incorrectly encoded", lvalue & ~((1L << shift) - 1L), NumericUtils.prefixCodedToLong(bytes));
       assertEquals("Term raw value is incorrectly encoded", lvalue & ~((1L << shift) - 1L), numericAtt.getRawValue());
@@ -58,13 +58,13 @@ public class TestNumericTokenStream exte
     final TermToBytesRefAttribute bytesAtt = stream.getAttribute(TermToBytesRefAttribute.class);
     final TypeAttribute typeAtt = stream.getAttribute(TypeAttribute.class);
     final NumericTokenStream.NumericTermAttribute numericAtt = stream.getAttribute(NumericTokenStream.NumericTermAttribute.class);
-    final BytesRef bytes = new BytesRef();
+    final BytesRef bytes = bytesAtt.getBytesRef();
     stream.reset();
     assertEquals(32, numericAtt.getValueSize());
     for (int shift=0; shift<32; shift+=NumericUtils.PRECISION_STEP_DEFAULT) {
       assertTrue("New token is available", stream.incrementToken());
       assertEquals("Shift value wrong", shift, numericAtt.getShift());
-      final int hash = bytesAtt.toBytesRef(bytes);
+      final int hash = bytesAtt.fillBytesRef();
       assertEquals("Hash incorrect", bytes.hashCode(), hash);
       assertEquals("Term is incorrectly encoded", ivalue & ~((1 << shift) - 1), NumericUtils.prefixCodedToInt(bytes));
       assertEquals("Term raw value is incorrectly encoded", ((long) ivalue) & ~((1L << shift) - 1L), numericAtt.getRawValue());

Modified: lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/Test2BTerms.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/Test2BTerms.java?rev=1083784&r1=1083783&r2=1083784&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/Test2BTerms.java (original)
+++ lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/Test2BTerms.java Mon Mar 21 13:52:15 2011
@@ -74,12 +74,14 @@ public class Test2BTerms extends LuceneT
     }
 
     private final static class MyTermAttributeImpl extends AttributeImpl implements TermToBytesRefAttribute {
-      public int toBytesRef(BytesRef bs) {
-        bs.bytes = bytes.bytes;
-        bs.offset = bytes.offset;
-        bs.length = bytes.length;
+      public int fillBytesRef() {
         return bytes.hashCode();
       }
+      
+      public BytesRef getBytesRef() {
+        return bytes;
+      }
+
       @Override
       public void clear() {
       }

Modified: lucene/dev/trunk/modules/analysis/common/src/java/org/apache/lucene/collation/tokenattributes/CollatedTermAttributeImpl.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/modules/analysis/common/src/java/org/apache/lucene/collation/tokenattributes/CollatedTermAttributeImpl.java?rev=1083784&r1=1083783&r2=1083784&view=diff
==============================================================================
--- lucene/dev/trunk/modules/analysis/common/src/java/org/apache/lucene/collation/tokenattributes/CollatedTermAttributeImpl.java (original)
+++ lucene/dev/trunk/modules/analysis/common/src/java/org/apache/lucene/collation/tokenattributes/CollatedTermAttributeImpl.java Mon Mar 21 13:52:15 2011
@@ -40,11 +40,12 @@ public class CollatedTermAttributeImpl e
   }
   
   @Override
-  public int toBytesRef(BytesRef target) {
-    target.bytes = collator.getCollationKey(toString()).toByteArray();
-    target.offset = 0;
-    target.length = target.bytes.length;
-    return target.hashCode();
+  public int fillBytesRef() {
+    BytesRef bytes = getBytesRef();
+    bytes.bytes = collator.getCollationKey(toString()).toByteArray();
+    bytes.offset = 0;
+    bytes.length = bytes.bytes.length;
+    return bytes.hashCode();
   }
 
 }

Modified: lucene/dev/trunk/modules/analysis/common/src/test/org/apache/lucene/collation/CollationTestBase.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/modules/analysis/common/src/test/org/apache/lucene/collation/CollationTestBase.java?rev=1083784&r1=1083783&r2=1083784&view=diff
==============================================================================
--- lucene/dev/trunk/modules/analysis/common/src/test/org/apache/lucene/collation/CollationTestBase.java (original)
+++ lucene/dev/trunk/modules/analysis/common/src/test/org/apache/lucene/collation/CollationTestBase.java Mon Mar 21 13:52:15 2011
@@ -283,7 +283,6 @@ public abstract class CollationTestBase 
     int numTestPoints = 100;
     int numThreads = _TestUtil.nextInt(random, 3, 5);
     final HashMap<String,BytesRef> map = new HashMap<String,BytesRef>();
-    BytesRef spare = new BytesRef();
     
     // create a map<String,SortKey> up front.
     // then with multiple threads, generate sort keys for all the keys in the map
@@ -292,12 +291,13 @@ public abstract class CollationTestBase 
     for (int i = 0; i < numTestPoints; i++) {
       String term = randomString();
       TokenStream ts = analyzer.reusableTokenStream("fake", new StringReader(term));
-      TermToBytesRefAttribute bytes = ts.addAttribute(TermToBytesRefAttribute.class);
+      TermToBytesRefAttribute termAtt = ts.addAttribute(TermToBytesRefAttribute.class);
+      BytesRef bytes = termAtt.getBytesRef();
       ts.reset();
       assertTrue(ts.incrementToken());
-      bytes.toBytesRef(spare);
+      termAtt.fillBytesRef();
       // ensure we make a copy of the actual bytes too
-      map.put(term, new BytesRef(spare));
+      map.put(term, new BytesRef(bytes));
     }
     
     Thread threads[] = new Thread[numThreads];
@@ -306,16 +306,16 @@ public abstract class CollationTestBase 
         @Override
         public void run() {
           try {
-            BytesRef spare = new BytesRef();
             for (Map.Entry<String,BytesRef> mapping : map.entrySet()) {
               String term = mapping.getKey();
               BytesRef expected = mapping.getValue();
               TokenStream ts = analyzer.reusableTokenStream("fake", new StringReader(term));
-              TermToBytesRefAttribute bytes = ts.addAttribute(TermToBytesRefAttribute.class);
+              TermToBytesRefAttribute termAtt = ts.addAttribute(TermToBytesRefAttribute.class);
+              BytesRef bytes = termAtt.getBytesRef();
               ts.reset();
               assertTrue(ts.incrementToken());
-              bytes.toBytesRef(spare);
-              assertEquals(expected, spare);
+              termAtt.fillBytesRef();
+              assertEquals(expected, bytes);
             }
           } catch (IOException e) {
             throw new RuntimeException(e);

Modified: lucene/dev/trunk/modules/analysis/icu/src/java/org/apache/lucene/collation/tokenattributes/ICUCollatedTermAttributeImpl.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/modules/analysis/icu/src/java/org/apache/lucene/collation/tokenattributes/ICUCollatedTermAttributeImpl.java?rev=1083784&r1=1083783&r2=1083784&view=diff
==============================================================================
--- lucene/dev/trunk/modules/analysis/icu/src/java/org/apache/lucene/collation/tokenattributes/ICUCollatedTermAttributeImpl.java (original)
+++ lucene/dev/trunk/modules/analysis/icu/src/java/org/apache/lucene/collation/tokenattributes/ICUCollatedTermAttributeImpl.java Mon Mar 21 13:52:15 2011
@@ -30,7 +30,7 @@ import com.ibm.icu.text.RawCollationKey;
 public class ICUCollatedTermAttributeImpl extends CharTermAttributeImpl {
   private final Collator collator;
   private final RawCollationKey key = new RawCollationKey();
-  
+
   /**
    * Create a new ICUCollatedTermAttributeImpl
    * @param collator Collation key generator
@@ -43,13 +43,14 @@ public class ICUCollatedTermAttributeImp
       throw new RuntimeException(e);
     }
   }
-  
+
   @Override
-  public int toBytesRef(BytesRef target) {
+  public int fillBytesRef() {
+    BytesRef bytes = getBytesRef();
     collator.getRawCollationKey(toString(), key);
-    target.bytes = key.bytes;
-    target.offset = 0;
-    target.length = key.size;
-    return target.hashCode();
+    bytes.bytes = key.bytes;
+    bytes.offset = 0;
+    bytes.length = key.size;
+    return bytes.hashCode();
   }
 }

Modified: lucene/dev/trunk/solr/src/java/org/apache/solr/handler/AnalysisRequestHandlerBase.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/src/java/org/apache/solr/handler/AnalysisRequestHandlerBase.java?rev=1083784&r1=1083783&r2=1083784&view=diff
==============================================================================
--- lucene/dev/trunk/solr/src/java/org/apache/solr/handler/AnalysisRequestHandlerBase.java (original)
+++ lucene/dev/trunk/solr/src/java/org/apache/solr/handler/AnalysisRequestHandlerBase.java Mon Mar 21 13:52:15 2011
@@ -38,7 +38,6 @@ import org.apache.solr.common.SolrExcept
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.response.SolrQueryResponse;
 import org.apache.solr.schema.FieldType;
-import org.apache.solr.util.ByteUtils;
 
 import org.apache.noggit.CharArr;
 
@@ -141,12 +140,12 @@ public abstract class AnalysisRequestHan
     final Set<BytesRef> tokens = new HashSet<BytesRef>();
     final TokenStream tokenStream = analyzer.tokenStream("", new StringReader(query));
     final TermToBytesRefAttribute bytesAtt = tokenStream.getAttribute(TermToBytesRefAttribute.class);
+    final BytesRef bytes = bytesAtt.getBytesRef();
     try {
       tokenStream.reset();
       while (tokenStream.incrementToken()) {
-        final BytesRef bytes = new BytesRef();
-        bytesAtt.toBytesRef(bytes);
-        tokens.add(bytes);
+        bytesAtt.fillBytesRef();
+        tokens.add(new BytesRef(bytes));
       }
     } catch (IOException ioe) {
       throw new RuntimeException("Error occured while iterating over tokenstream", ioe);
@@ -236,12 +235,13 @@ public abstract class AnalysisRequestHan
 
     FieldType fieldType = context.getFieldType();
 
-    final BytesRef rawBytes = new BytesRef();
     final CharArr textBuf = new CharArr();
     for (int i = 0, c = tokens.size(); i < c; i++) {
       AttributeSource token = tokens.get(i);
       final NamedList<Object> tokenNamedList = new SimpleOrderedMap<Object>();
-      token.getAttribute(TermToBytesRefAttribute.class).toBytesRef(rawBytes);
+      final TermToBytesRefAttribute termAtt = token.getAttribute(TermToBytesRefAttribute.class);
+      BytesRef rawBytes = termAtt.getBytesRef();
+      termAtt.fillBytesRef();
 
       textBuf.reset();
       fieldType.indexedToReadable(rawBytes, textBuf);

Modified: lucene/dev/trunk/solr/src/webapp/web/admin/analysis.jsp
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/src/webapp/web/admin/analysis.jsp?rev=1083784&r1=1083783&r2=1083784&view=diff
==============================================================================
--- lucene/dev/trunk/solr/src/webapp/web/admin/analysis.jsp (original)
+++ lucene/dev/trunk/solr/src/webapp/web/admin/analysis.jsp Mon Mar 21 13:52:15 2011
@@ -156,10 +156,10 @@
       TermToBytesRefAttribute bytesAtt = tstream.getAttribute(TermToBytesRefAttribute.class);
       tstream.reset();
       matches = new HashSet<BytesRef>();
+      final BytesRef bytes = bytesAtt.getBytesRef();
       while (tstream.incrementToken()) {
-        final BytesRef bytes = new BytesRef();
-        bytesAtt.toBytesRef(bytes);
-        matches.add(bytes);
+        bytesAtt.fillBytesRef();
+        matches.add(new BytesRef(bytes));
       }
     }
 
@@ -273,14 +273,17 @@
   }
   
   private static class Tok {
-    final BytesRef bytes = new BytesRef();
+    final BytesRef bytes;
     final String rawText, text;
     final int pos;
     final List<ReflectItem> reflected = new ArrayList<ReflectItem>();
     
     Tok(AttributeSource token, int pos, FieldType ft) {
       this.pos = pos;
-      token.getAttribute(TermToBytesRefAttribute.class).toBytesRef(bytes);
+      TermToBytesRefAttribute termAtt = token.getAttribute(TermToBytesRefAttribute.class);
+      BytesRef spare = termAtt.getBytesRef();
+	  termAtt.fillBytesRef();
+	  bytes = new BytesRef(spare);
       rawText = (token.hasAttribute(CharTermAttribute.class)) ?
         token.getAttribute(CharTermAttribute.class).toString() : null;
       final CharArr textBuf = new CharArr(bytes.length);