You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by ma...@apache.org on 2011/11/21 00:45:35 UTC

svn commit: r1204297 [9/10] - in /lucene/dev/branches/solrcloud: ./ dev-tools/eclipse/ dev-tools/idea/lucene/contrib/ dev-tools/idea/lucene/contrib/instantiated/ dev-tools/maven/lucene/contrib/ dev-tools/maven/lucene/contrib/instantiated/ lucene/ lucen...

Modified: lucene/dev/branches/solrcloud/modules/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solrcloud/modules/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyWriter.java?rev=1204297&r1=1204296&r2=1204297&view=diff
==============================================================================
--- lucene/dev/branches/solrcloud/modules/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyWriter.java (original)
+++ lucene/dev/branches/solrcloud/modules/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyWriter.java Sun Nov 20 23:45:25 2011
@@ -9,6 +9,7 @@ import java.io.FileInputStream;
 import java.io.FileNotFoundException;
 import java.io.FileOutputStream;
 import java.io.IOException;
+import java.util.HashMap;
 import java.util.Map;
 
 import org.apache.lucene.analysis.core.KeywordAnalyzer;
@@ -31,6 +32,7 @@ import org.apache.lucene.index.MultiFiel
 import org.apache.lucene.index.Terms;
 import org.apache.lucene.index.TermsEnum;
 import org.apache.lucene.search.DocIdSetIterator;
+import org.apache.lucene.store.AlreadyClosedException;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.LockObtainFailedException;
 import org.apache.lucene.store.NativeFSLockFactory;
@@ -82,6 +84,14 @@ import org.apache.lucene.facet.taxonomy.
  */
 public class DirectoryTaxonomyWriter implements TaxonomyWriter {
 
+  /**
+   * Property name of user commit data that contains the creation time of a taxonomy index.
+   * <p>
+   * Applications making use of {@link TaxonomyWriter#commit(Map)} should not use this
+   * particular property name. 
+   */
+  public static final String INDEX_CREATE_TIME = "index.create.time";
+  
   private IndexWriter indexWriter;
   private int nextID;
   private char delimiter = Consts.DEFAULT_DELIMITER;
@@ -105,6 +115,12 @@ public class DirectoryTaxonomyWriter imp
   private int cacheMisses;
 
   /**
+   * When a taxonomy is created, we mark that its create time should be committed in the 
+   * next commit.
+   */
+  private String taxoIndexCreateTime = null;
+  
+  /**
    * setDelimiter changes the character that the taxonomy uses in its internal
    * storage as a delimiter between category components. Do not use this
    * method unless you really know what you are doing. It has nothing to do
@@ -117,6 +133,7 @@ public class DirectoryTaxonomyWriter imp
    * objects you create for the same directory.
    */
   public void setDelimiter(char delimiter) {
+    ensureOpen();
     this.delimiter = delimiter;
   }
 
@@ -170,6 +187,10 @@ public class DirectoryTaxonomyWriter imp
   throws CorruptIndexException, LockObtainFailedException,
   IOException {
 
+    if (!IndexReader.indexExists(directory) || openMode==OpenMode.CREATE) {
+      taxoIndexCreateTime = Long.toString(System.nanoTime());
+    }
+    
     indexWriter = openIndexWriter(directory, openMode);
     reader = null;
 
@@ -278,10 +299,17 @@ public class DirectoryTaxonomyWriter imp
   @Override
   public synchronized void close() throws CorruptIndexException, IOException {
     if (indexWriter != null) {
-      indexWriter.close();
-      indexWriter = null;
+      if (taxoIndexCreateTime != null) {
+        indexWriter.commit(combinedCommitData(null));
+        taxoIndexCreateTime = null;
+      }
+      doClose();
     }
-
+  }
+  
+  private void doClose() throws CorruptIndexException, IOException {
+    indexWriter.close();
+    indexWriter = null;
     closeResources();
   }
 
@@ -290,6 +318,7 @@ public class DirectoryTaxonomyWriter imp
    * @return Number of cache bytes in memory, for CL2O only; zero otherwise.
    */
   public int getCacheMemoryUsage() {
+    ensureOpen();
     if (this.cache == null || !(this.cache instanceof Cl2oTaxonomyWriterCache)) {
       return 0;
     }
@@ -403,8 +432,8 @@ public class DirectoryTaxonomyWriter imp
   // calls - even those which could immediately return a cached value.
   // We definitely need to fix this situation!
   @Override
-  public synchronized int addCategory(CategoryPath categoryPath)
-  throws IOException {
+  public synchronized int addCategory(CategoryPath categoryPath) throws IOException {
+    ensureOpen();
     // If the category is already in the cache and/or the taxonomy, we
     // should return its existing ordinal:
     int res = findCategory(categoryPath);
@@ -453,6 +482,16 @@ public class DirectoryTaxonomyWriter imp
     return id;
   }
 
+  /**
+   * Verifies that this instance wasn't closed, or throws
+   * {@link AlreadyClosedException} if it is.
+   */
+  protected final void ensureOpen() {
+    if (indexWriter == null) {
+      throw new AlreadyClosedException("The taxonomy writer has already been closed");
+    }
+  }
+  
   // Note that the methods calling addCategoryDocument() are synchornized,
   // so this method is effectively synchronized as well, but we'll add
   // synchronized to be on the safe side, and we can reuse class-local objects
@@ -570,18 +609,42 @@ public class DirectoryTaxonomyWriter imp
    */ 
   @Override
   public synchronized void commit() throws CorruptIndexException, IOException {
-    indexWriter.commit();
+    ensureOpen();
+    if (taxoIndexCreateTime != null) {
+      indexWriter.commit(combinedCommitData(null));
+      taxoIndexCreateTime = null;
+    } else {
+      indexWriter.commit();
+    }
     refreshReader();
   }
 
   /**
+   * Combine original user data with that of the taxonomy creation time
+   */
+  private Map<String,String> combinedCommitData(Map<String,String> userData) {
+    Map<String,String> m = new HashMap<String, String>();
+    if (userData != null) {
+      m.putAll(userData);
+    }
+    m.put(INDEX_CREATE_TIME, taxoIndexCreateTime);
+    return m;
+  }
+  
+  /**
    * Like commit(), but also store properties with the index. These properties
    * are retrievable by {@link DirectoryTaxonomyReader#getCommitUserData}.
    * See {@link TaxonomyWriter#commit(Map)}. 
    */
   @Override
   public synchronized void commit(Map<String,String> commitUserData) throws CorruptIndexException, IOException {
-    indexWriter.commit(commitUserData);
+    ensureOpen();
+    if (taxoIndexCreateTime != null) {
+      indexWriter.commit(combinedCommitData(commitUserData));
+      taxoIndexCreateTime = null;
+    } else {
+      indexWriter.commit(commitUserData);
+    }
     refreshReader();
   }
   
@@ -591,7 +654,13 @@ public class DirectoryTaxonomyWriter imp
    */
   @Override
   public synchronized void prepareCommit() throws CorruptIndexException, IOException {
-    indexWriter.prepareCommit();
+    ensureOpen();
+    if (taxoIndexCreateTime != null) {
+      indexWriter.prepareCommit(combinedCommitData(null));
+      taxoIndexCreateTime = null;
+    } else {
+      indexWriter.prepareCommit();
+    }
   }
 
   /**
@@ -600,7 +669,13 @@ public class DirectoryTaxonomyWriter imp
    */
   @Override
   public synchronized void prepareCommit(Map<String,String> commitUserData) throws CorruptIndexException, IOException {
-    indexWriter.prepareCommit(commitUserData);
+    ensureOpen();
+    if (taxoIndexCreateTime != null) {
+      indexWriter.prepareCommit(combinedCommitData(commitUserData));
+      taxoIndexCreateTime = null;
+    } else {
+      indexWriter.prepareCommit(commitUserData);
+    }
   }
   
   /**
@@ -616,6 +691,7 @@ public class DirectoryTaxonomyWriter imp
    */
   @Override
   synchronized public int getSize() {
+    ensureOpen();
     return indexWriter.maxDoc();
   }
 
@@ -643,8 +719,10 @@ public class DirectoryTaxonomyWriter imp
    * method. 
    */
   public void setCacheMissesUntilFill(int i) {
+    ensureOpen();
     cacheMissesUntilFill = i;
   }
+  
   private int cacheMissesUntilFill = 11;
 
   private boolean perhapsFillCache() throws IOException {
@@ -677,7 +755,7 @@ public class DirectoryTaxonomyWriter imp
     // executed we're safe, because we only iterate as long as there are next()
     // terms.
     if (terms != null) {
-      TermsEnum termsEnum = terms.iterator();
+      TermsEnum termsEnum = terms.iterator(null);
       Bits liveDocs = MultiFields.getLiveDocs(reader);
       DocsEnum docsEnum = null;
       while (termsEnum.next() != null) {
@@ -717,6 +795,7 @@ public class DirectoryTaxonomyWriter imp
   }
   @Override
   public int getParent(int ordinal) throws IOException {
+    ensureOpen();
     // Note: the following if() just enforces that a user can never ask
     // for the parent of a nonexistant category - even if the parent array
     // was allocated bigger than it really needs to be.
@@ -744,6 +823,7 @@ public class DirectoryTaxonomyWriter imp
    * and does not need to be commit()ed before this call. 
    */
   public void addTaxonomies(Directory[] taxonomies, OrdinalMap[] ordinalMaps) throws IOException {
+    ensureOpen();
     // To prevent us stepping on the rest of this class's decisions on when
     // to open a reader, and when not, we'll be opening a new reader instead
     // of using the existing "reader" object:
@@ -751,7 +831,7 @@ public class DirectoryTaxonomyWriter imp
     // TODO (Facet): can this then go segment-by-segment and avoid MultiDocsEnum etc?
     Terms terms = MultiFields.getTerms(mainreader, Consts.FULL);
     assert terms != null; // TODO (Facet): explicit check / throw exception?
-    TermsEnum mainte = terms.iterator();
+    TermsEnum mainte = terms.iterator(null);
     DocsEnum mainde = null;
 
     IndexReader[] otherreaders = new IndexReader[taxonomies.length];
@@ -761,7 +841,7 @@ public class DirectoryTaxonomyWriter imp
       otherreaders[i] = IndexReader.open(taxonomies[i]);
       terms = MultiFields.getTerms(otherreaders[i], Consts.FULL);
       assert terms != null; // TODO (Facet): explicit check / throw exception?
-      othertes[i] = terms.iterator();
+      othertes[i] = terms.iterator(null);
       // Also tell the ordinal maps their expected sizes:
       ordinalMaps[i].setSize(otherreaders[i].numDocs());
     }
@@ -1009,10 +1089,16 @@ public class DirectoryTaxonomyWriter imp
     return null;
   }
 
+  /**
+   * Rollback changes to the taxonomy writer and closes the instance. Following
+   * this method the instance becomes unusable (calling any of its API methods
+   * will yield an {@link AlreadyClosedException}).
+   */
   @Override
-  public void rollback() throws IOException {
+  public synchronized void rollback() throws IOException {
+    ensureOpen();
     indexWriter.rollback();
-    refreshReader();
+    doClose();
   }
   
 }

Modified: lucene/dev/branches/solrcloud/modules/facet/src/test/org/apache/lucene/facet/FacetTestBase.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solrcloud/modules/facet/src/test/org/apache/lucene/facet/FacetTestBase.java?rev=1204297&r1=1204296&r2=1204297&view=diff
==============================================================================
--- lucene/dev/branches/solrcloud/modules/facet/src/test/org/apache/lucene/facet/FacetTestBase.java (original)
+++ lucene/dev/branches/solrcloud/modules/facet/src/test/org/apache/lucene/facet/FacetTestBase.java Sun Nov 20 23:45:25 2011
@@ -282,7 +282,7 @@ public abstract class FacetTestBase exte
         continue;
       }
       Bits liveDocs = MultiFields.getLiveDocs(indexReader);
-      TermsEnum te = terms.iterator();
+      TermsEnum te = terms.iterator(null);
       DocsEnum de = null;
       while (te.next() != null) {
         de = te.docs(liveDocs, de);

Modified: lucene/dev/branches/solrcloud/modules/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solrcloud/modules/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyReader.java?rev=1204297&r1=1204296&r2=1204297&view=diff
==============================================================================
--- lucene/dev/branches/solrcloud/modules/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyReader.java (original)
+++ lucene/dev/branches/solrcloud/modules/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyReader.java Sun Nov 20 23:45:25 2011
@@ -1,10 +1,17 @@
 package org.apache.lucene.facet.taxonomy.directory;
 
+import java.util.Random;
+
 import org.apache.lucene.facet.taxonomy.CategoryPath;
+import org.apache.lucene.facet.taxonomy.InconsistentTaxonomyException;
+import org.apache.lucene.facet.taxonomy.TaxonomyReader;
+import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
 import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
 import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
+import org.apache.lucene.index.IndexWriterConfig.OpenMode;
 import org.apache.lucene.store.AlreadyClosedException;
 import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.LuceneTestCase;
 import org.junit.Test;
 
@@ -59,6 +66,36 @@ public class TestDirectoryTaxonomyReader
     dir.close();
   }
   
+  /**
+   * Test the boolean returned by TR.refresh
+   * @throws Exception
+   */
+  @Test
+  public void testReaderRefreshResult() throws Exception {
+    Directory dir = null;
+    DirectoryTaxonomyWriter ltw = null;
+    DirectoryTaxonomyReader ltr = null;
+    
+    try {
+      dir = newDirectory();
+      ltw = new DirectoryTaxonomyWriter(dir);
+      
+      ltw.addCategory(new CategoryPath("a"));
+      ltw.commit();
+      
+      ltr = new DirectoryTaxonomyReader(dir);
+      assertFalse("Nothing has changed",ltr.refresh());
+      
+      ltw.addCategory(new CategoryPath("b"));
+      ltw.commit();
+      
+      assertTrue("changes were committed",ltr.refresh());
+      assertFalse("Nothing has changed",ltr.refresh());
+    } finally {
+      IOUtils.close(ltw, ltr, dir);
+    }
+  }
+  
   @Test
   public void testAlreadyClosed() throws Exception {
     Directory dir = newDirectory();
@@ -77,4 +114,68 @@ public class TestDirectoryTaxonomyReader
     dir.close();
   }
   
+  /**
+   * recreating a taxonomy should work well with a freshly opened taxonomy reader 
+   */
+  @Test
+  public void testFreshReadRecreatedTaxonomy() throws Exception {
+    doTestReadRecreatedTaxono(random, true);
+  }
+  
+  /**
+   * recreating a taxonomy should work well with a refreshed taxonomy reader 
+   */
+  @Test
+  public void testRefreshReadRecreatedTaxonomy() throws Exception {
+    doTestReadRecreatedTaxono(random, false);
+  }
+  
+  private void doTestReadRecreatedTaxono(Random random, boolean closeReader) throws Exception {
+    Directory dir = null;
+    TaxonomyWriter tw = null;
+    TaxonomyReader tr = null;
+    
+    // prepare a few categories
+    int  n = 10;
+    CategoryPath[] cp = new CategoryPath[n];
+    for (int i=0; i<n; i++) {
+      cp[i] = new CategoryPath("a", Integer.toString(i));
+    }
+    
+    try {
+      dir = newDirectory();
+      
+      tw = new DirectoryTaxonomyWriter(dir);
+      tw.addCategory(new CategoryPath("a"));
+      tw.close();
+      
+      tr = new DirectoryTaxonomyReader(dir);
+      int baseNumcategories = tr.getSize();
+      
+      for (int i=0; i<n; i++) {
+        int k = random.nextInt(n);
+        tw = new DirectoryTaxonomyWriter(dir, OpenMode.CREATE);
+        for (int j=0; j<=k; j++) {
+          tw.addCategory(new CategoryPath(cp[j]));
+        }
+        tw.close();
+        if (closeReader) {
+          tr.close();
+          tr = new DirectoryTaxonomyReader(dir);
+        } else {
+          try {
+            tr.refresh();
+            fail("Expected InconsistentTaxonomyException");
+          } catch (InconsistentTaxonomyException e) {
+            tr.close();
+            tr = new DirectoryTaxonomyReader(dir);
+          }
+        }
+        assertEquals("Wrong #categories in taxonomy (i="+i+", k="+k+")", baseNumcategories + 1 + k, tr.getSize());
+      }
+    } finally {
+      IOUtils.close(tr, tw, dir);
+    }
+  }
+  
 }

Modified: lucene/dev/branches/solrcloud/modules/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solrcloud/modules/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyWriter.java?rev=1204297&r1=1204296&r2=1204297&view=diff
==============================================================================
--- lucene/dev/branches/solrcloud/modules/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyWriter.java (original)
+++ lucene/dev/branches/solrcloud/modules/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyWriter.java Sun Nov 20 23:45:25 2011
@@ -5,6 +5,7 @@ import java.util.Map;
 
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.IndexWriterConfig.OpenMode;
+import org.apache.lucene.store.AlreadyClosedException;
 import org.apache.lucene.store.Directory;
 import org.junit.Test;
 
@@ -86,4 +87,35 @@ public class TestDirectoryTaxonomyWriter
     dir.close();
   }
   
+  @Test
+  public void testRollback() throws Exception {
+    // Verifies that if callback is called, DTW is closed.
+    Directory dir = newDirectory();
+    DirectoryTaxonomyWriter dtw = new DirectoryTaxonomyWriter(dir);
+    dtw.addCategory(new CategoryPath("a"));
+    dtw.rollback();
+    try {
+      dtw.addCategory(new CategoryPath("a"));
+      fail("should not have succeeded to add a category following rollback.");
+    } catch (AlreadyClosedException e) {
+      // expected
+    }
+    dir.close();
+  }
+  
+  @Test
+  public void testEnsureOpen() throws Exception {
+    // verifies that an exception is thrown if DTW was closed
+    Directory dir = newDirectory();
+    DirectoryTaxonomyWriter dtw = new DirectoryTaxonomyWriter(dir);
+    dtw.close();
+    try {
+      dtw.addCategory(new CategoryPath("a"));
+      fail("should not have succeeded to add a category following close.");
+    } catch (AlreadyClosedException e) {
+      // expected
+    }
+    dir.close();
+  }
+  
 }

Modified: lucene/dev/branches/solrcloud/modules/queries/src/java/org/apache/lucene/queries/TermsFilter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solrcloud/modules/queries/src/java/org/apache/lucene/queries/TermsFilter.java?rev=1204297&r1=1204296&r2=1204297&view=diff
==============================================================================
--- lucene/dev/branches/solrcloud/modules/queries/src/java/org/apache/lucene/queries/TermsFilter.java (original)
+++ lucene/dev/branches/solrcloud/modules/queries/src/java/org/apache/lucene/queries/TermsFilter.java Sun Nov 20 23:45:25 2011
@@ -74,7 +74,7 @@ public class TermsFilter extends Filter 
         if (termsC == null) {
           return result;
         }
-        termsEnum = termsC.iterator();
+        termsEnum = termsC.iterator(null);
         lastField = term.field();
       }
 

Modified: lucene/dev/branches/solrcloud/modules/queries/src/java/org/apache/lucene/queries/function/valuesource/TFValueSource.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solrcloud/modules/queries/src/java/org/apache/lucene/queries/function/valuesource/TFValueSource.java?rev=1204297&r1=1204296&r2=1204297&view=diff
==============================================================================
--- lucene/dev/branches/solrcloud/modules/queries/src/java/org/apache/lucene/queries/function/valuesource/TFValueSource.java (original)
+++ lucene/dev/branches/solrcloud/modules/queries/src/java/org/apache/lucene/queries/function/valuesource/TFValueSource.java Sun Nov 20 23:45:25 2011
@@ -59,7 +59,17 @@ public class TFValueSource extends TermF
 
       public void reset() throws IOException {
         // no one should call us for deleted docs?
-        docs = terms==null ? null : terms.docs(null, indexedBytes, null);
+        if (terms != null) {
+          final TermsEnum termsEnum = terms.iterator(null);
+          if (termsEnum.seekExact(indexedBytes, false)) {
+            docs = termsEnum.docs(null, null);
+          } else {
+            docs = null;
+          }
+        } else {
+          docs = null;
+        }
+
         if (docs == null) {
           docs = new DocsEnum() {
             @Override

Modified: lucene/dev/branches/solrcloud/modules/queries/src/java/org/apache/lucene/queries/function/valuesource/TermFreqValueSource.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solrcloud/modules/queries/src/java/org/apache/lucene/queries/function/valuesource/TermFreqValueSource.java?rev=1204297&r1=1204296&r2=1204297&view=diff
==============================================================================
--- lucene/dev/branches/solrcloud/modules/queries/src/java/org/apache/lucene/queries/function/valuesource/TermFreqValueSource.java (original)
+++ lucene/dev/branches/solrcloud/modules/queries/src/java/org/apache/lucene/queries/function/valuesource/TermFreqValueSource.java Sun Nov 20 23:45:25 2011
@@ -51,7 +51,18 @@ public class TermFreqValueSource extends
 
       public void reset() throws IOException {
         // no one should call us for deleted docs?
-        docs = terms == null ? null : terms.docs(null, indexedBytes, null);
+        
+        if (terms != null) {
+          final TermsEnum termsEnum = terms.iterator(null);
+          if (termsEnum.seekExact(indexedBytes, false)) {
+            docs = termsEnum.docs(null, null);
+          } else {
+            docs = null;
+          }
+        } else {
+          docs = null;
+        }
+
         if (docs == null) {
           docs = new DocsEnum() {
             @Override

Modified: lucene/dev/branches/solrcloud/modules/queries/src/java/org/apache/lucene/queries/mlt/MoreLikeThis.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solrcloud/modules/queries/src/java/org/apache/lucene/queries/mlt/MoreLikeThis.java?rev=1204297&r1=1204296&r2=1204297&view=diff
==============================================================================
--- lucene/dev/branches/solrcloud/modules/queries/src/java/org/apache/lucene/queries/mlt/MoreLikeThis.java (original)
+++ lucene/dev/branches/solrcloud/modules/queries/src/java/org/apache/lucene/queries/mlt/MoreLikeThis.java Sun Nov 20 23:45:25 2011
@@ -15,14 +15,19 @@
  */
 package org.apache.lucene.queries.mlt;
 
+import java.io.*;
+import java.util.*;
+
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.TokenStream;
 import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
 import org.apache.lucene.document.Document;
+import org.apache.lucene.index.Fields;
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.IndexableField;
 import org.apache.lucene.index.Term;
-import org.apache.lucene.index.TermFreqVector;
+import org.apache.lucene.index.Terms;
+import org.apache.lucene.index.TermsEnum;
 import org.apache.lucene.search.*;
 import org.apache.lucene.search.similarities.DefaultSimilarity;
 import org.apache.lucene.search.similarities.TFIDFSimilarity;
@@ -30,9 +35,6 @@ import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.CharsRef;
 import org.apache.lucene.util.PriorityQueue;
 
-import java.io.*;
-import java.util.*;
-
 
 /**
  * Generate "more like this" similarity queries.
@@ -701,7 +703,13 @@ public final class MoreLikeThis {
   public PriorityQueue<Object[]> retrieveTerms(int docNum) throws IOException {
     Map<String, Int> termFreqMap = new HashMap<String, Int>();
     for (String fieldName : fieldNames) {
-      TermFreqVector vector = ir.getTermFreqVector(docNum, fieldName);
+      final Fields vectors = ir.getTermVectors(docNum);
+      final Terms vector;
+      if (vectors != null) {
+        vector = vectors.terms(fieldName);
+      } else {
+        vector = null;
+      }
 
       // field does not store term vector info
       if (vector == null) {
@@ -716,7 +724,6 @@ public final class MoreLikeThis {
       } else {
         addTermFrequencies(termFreqMap, vector);
       }
-
     }
 
     return createQueue(termFreqMap);
@@ -728,24 +735,25 @@ public final class MoreLikeThis {
    * @param termFreqMap a Map of terms and their frequencies
    * @param vector List of terms and their frequencies for a doc/field
    */
-  private void addTermFrequencies(Map<String, Int> termFreqMap, TermFreqVector vector) {
-    BytesRef[] terms = vector.getTerms();
-    int freqs[] = vector.getTermFrequencies();
+  private void addTermFrequencies(Map<String, Int> termFreqMap, Terms vector) throws IOException {
+    final TermsEnum termsEnum = vector.iterator(null);
     final CharsRef spare = new CharsRef();
-    for (int j = 0; j < terms.length; j++) {
-      final String term = terms[j].utf8ToChars(spare).toString();
-
+    BytesRef text;
+    while((text = termsEnum.next()) != null) {
+      final String term = text.utf8ToChars(spare).toString();
       if (isNoiseWord(term)) {
         continue;
       }
+      final int freq = (int) termsEnum.totalTermFreq();
+
       // increment frequency
       Int cnt = termFreqMap.get(term);
       if (cnt == null) {
         cnt = new Int();
         termFreqMap.put(term, cnt);
-        cnt.x = freqs[j];
+        cnt.x = freq;
       } else {
-        cnt.x += freqs[j];
+        cnt.x += freq;
       }
     }
   }

Modified: lucene/dev/branches/solrcloud/modules/queries/src/test/org/apache/lucene/queries/TestCustomScoreQuery.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solrcloud/modules/queries/src/test/org/apache/lucene/queries/TestCustomScoreQuery.java?rev=1204297&r1=1204296&r2=1204297&view=diff
==============================================================================
--- lucene/dev/branches/solrcloud/modules/queries/src/test/org/apache/lucene/queries/TestCustomScoreQuery.java (original)
+++ lucene/dev/branches/solrcloud/modules/queries/src/test/org/apache/lucene/queries/TestCustomScoreQuery.java Sun Nov 20 23:45:25 2011
@@ -29,6 +29,7 @@ import java.util.HashMap;
 import java.util.Map;
 
 import org.apache.lucene.index.IndexReader.AtomicReaderContext;
+import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.Term;
 
 /**
@@ -198,7 +199,8 @@ public class TestCustomScoreQuery extend
     final Query q = new CustomExternalQuery(q1);
     log(q);
 
-    IndexSearcher s = new IndexSearcher(dir, true);
+    IndexReader r = IndexReader.open(dir);
+    IndexSearcher s = new IndexSearcher(r);
     TopDocs hits = s.search(q, 1000);
     assertEquals(N_DOCS, hits.totalHits);
     for(int i=0;i<N_DOCS;i++) {
@@ -207,11 +209,13 @@ public class TestCustomScoreQuery extend
       assertEquals("doc=" + doc, (float) 1+(4*doc) % N_DOCS, score, 0.0001);
     }
     s.close();
+    r.close();
   }
   
   @Test
   public void testRewrite() throws Exception {
-    final IndexSearcher s = new IndexSearcher(dir, true);
+    IndexReader r = IndexReader.open(dir);
+    final IndexSearcher s = new IndexSearcher(r);
 
     Query q = new TermQuery(new Term(TEXT_FIELD, "first"));
     CustomScoreQuery original = new CustomScoreQuery(q);
@@ -229,13 +233,15 @@ public class TestCustomScoreQuery extend
     assertEquals(s.search(q,1).totalHits, s.search(rewritten,1).totalHits);
     
     s.close();
+    r.close();
   }
   
   // Test that FieldScoreQuery returns docs with expected score.
   private void doTestCustomScore(ValueSource valueSource, double dboost) throws Exception {
     float boost = (float) dboost;
     FunctionQuery functionQuery = new FunctionQuery(valueSource);
-    IndexSearcher s = new IndexSearcher(dir, true);
+    IndexReader r = IndexReader.open(dir);
+    IndexSearcher s = new IndexSearcher(r);
 
     // regular (boolean) query.
     BooleanQuery q1 = new BooleanQuery();
@@ -285,6 +291,7 @@ public class TestCustomScoreQuery extend
         h1, h2CustomNeutral, h3CustomMul, h4CustomAdd, h5CustomMulAdd,
         q1, q2CustomNeutral, q3CustomMul, q4CustomAdd, q5CustomMulAdd);
     s.close();
+    r.close();
   }
 
   // verify results are as expected.

Modified: lucene/dev/branches/solrcloud/modules/queries/src/test/org/apache/lucene/queries/function/TestFieldScoreQuery.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solrcloud/modules/queries/src/test/org/apache/lucene/queries/function/TestFieldScoreQuery.java?rev=1204297&r1=1204296&r2=1204297&view=diff
==============================================================================
--- lucene/dev/branches/solrcloud/modules/queries/src/test/org/apache/lucene/queries/function/TestFieldScoreQuery.java (original)
+++ lucene/dev/branches/solrcloud/modules/queries/src/test/org/apache/lucene/queries/function/TestFieldScoreQuery.java Sun Nov 20 23:45:25 2011
@@ -17,6 +17,7 @@ package org.apache.lucene.queries.functi
  * limitations under the License.
  */
 
+import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.queries.function.FunctionQuery;
 import org.apache.lucene.queries.function.ValueSource;
 import org.apache.lucene.queries.function.valuesource.ByteFieldSource;
@@ -80,7 +81,8 @@ public class TestFieldScoreQuery extends
   // Test that FieldScoreQuery returns docs in expected order.
   private void doTestRank (ValueSource valueSource) throws Exception {
     FunctionQuery functionQuery = new FunctionQuery(valueSource);
-    IndexSearcher s = new IndexSearcher(dir, true);
+    IndexReader r = IndexReader.open(dir);
+    IndexSearcher s = new IndexSearcher(r);
     log("test: "+ functionQuery);
     QueryUtils.check(random, functionQuery,s);
     ScoreDoc[] h = s.search(functionQuery, null, 1000).scoreDocs;
@@ -94,6 +96,7 @@ public class TestFieldScoreQuery extends
       prevID = resID;
     }
     s.close();
+    r.close();
   }
 
   /** Test that FieldScoreQuery of Type.BYTE returns the expected scores. */
@@ -128,7 +131,8 @@ public class TestFieldScoreQuery extends
   // Test that FieldScoreQuery returns docs with expected score.
   private void doTestExactScore (ValueSource valueSource) throws Exception {
     FunctionQuery functionQuery = new FunctionQuery(valueSource);
-    IndexSearcher s = new IndexSearcher(dir, true);
+    IndexReader r = IndexReader.open(dir);
+    IndexSearcher s = new IndexSearcher(r);
     TopDocs td = s.search(functionQuery,null,1000);
     assertEquals("All docs should be matched!",N_DOCS,td.totalHits);
     ScoreDoc sd[] = td.scoreDocs;
@@ -140,6 +144,7 @@ public class TestFieldScoreQuery extends
       assertEquals("score of " + id + " shuould be " + expectedScore + " != " + score, expectedScore, score, TEST_SCORE_TOLERANCE_DELTA);
     }
     s.close();
+    r.close();
   }
 
 }

Modified: lucene/dev/branches/solrcloud/modules/queries/src/test/org/apache/lucene/queries/function/TestOrdValues.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solrcloud/modules/queries/src/test/org/apache/lucene/queries/function/TestOrdValues.java?rev=1204297&r1=1204296&r2=1204297&view=diff
==============================================================================
--- lucene/dev/branches/solrcloud/modules/queries/src/test/org/apache/lucene/queries/function/TestOrdValues.java (original)
+++ lucene/dev/branches/solrcloud/modules/queries/src/test/org/apache/lucene/queries/function/TestOrdValues.java Sun Nov 20 23:45:25 2011
@@ -18,6 +18,7 @@ package org.apache.lucene.queries.functi
  */
 
 import org.apache.lucene.index.CorruptIndexException;
+import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.IndexReader.AtomicReaderContext;
 import org.apache.lucene.queries.function.valuesource.OrdFieldSource;
 import org.apache.lucene.queries.function.valuesource.ReverseOrdFieldSource;
@@ -62,7 +63,8 @@ public class TestOrdValues extends Funct
 
   // Test that queries based on reverse/ordFieldScore scores correctly
   private void doTestRank(String field, boolean inOrder) throws CorruptIndexException, Exception {
-    IndexSearcher s = new IndexSearcher(dir, true);
+    IndexReader r = IndexReader.open(dir);
+    IndexSearcher s = new IndexSearcher(r);
     ValueSource vs;
     if (inOrder) {
       vs = new OrdFieldSource(field);
@@ -91,6 +93,7 @@ public class TestOrdValues extends Funct
       prevID = resID;
     }
     s.close();
+    r.close();
   }
 
   /**
@@ -112,7 +115,8 @@ public class TestOrdValues extends Funct
 
   // Test that queries based on reverse/ordFieldScore returns docs with expected score.
   private void doTestExactScore(String field, boolean inOrder) throws CorruptIndexException, Exception {
-    IndexSearcher s = new IndexSearcher(dir, true);
+    IndexReader r = IndexReader.open(dir);
+    IndexSearcher s = new IndexSearcher(r);
     ValueSource vs;
     if (inOrder) {
       vs = new OrdFieldSource(field);
@@ -136,6 +140,7 @@ public class TestOrdValues extends Funct
       assertTrue("id of result " + i + " shuould be " + expectedId + " != " + score, expectedId.equals(id));
     }
     s.close();
+    r.close();
   }
   
   // LUCENE-1250

Modified: lucene/dev/branches/solrcloud/modules/queryparser/src/java/org/apache/lucene/queryparser/surround/query/SrndPrefixQuery.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solrcloud/modules/queryparser/src/java/org/apache/lucene/queryparser/surround/query/SrndPrefixQuery.java?rev=1204297&r1=1204296&r2=1204297&view=diff
==============================================================================
--- lucene/dev/branches/solrcloud/modules/queryparser/src/java/org/apache/lucene/queryparser/surround/query/SrndPrefixQuery.java (original)
+++ lucene/dev/branches/solrcloud/modules/queryparser/src/java/org/apache/lucene/queryparser/surround/query/SrndPrefixQuery.java Sun Nov 20 23:45:25 2011
@@ -60,7 +60,7 @@ public class SrndPrefixQuery extends Sim
     /* inspired by PrefixQuery.rewrite(): */
     Terms terms = MultiFields.getTerms(reader, fieldName);
     if (terms != null) {
-      TermsEnum termsEnum = terms.iterator();
+      TermsEnum termsEnum = terms.iterator(null);
 
       boolean skip = false;
       TermsEnum.SeekStatus status = termsEnum.seekCeil(new BytesRef(getPrefix()));

Modified: lucene/dev/branches/solrcloud/modules/queryparser/src/java/org/apache/lucene/queryparser/surround/query/SrndTermQuery.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solrcloud/modules/queryparser/src/java/org/apache/lucene/queryparser/surround/query/SrndTermQuery.java?rev=1204297&r1=1204296&r2=1204297&view=diff
==============================================================================
--- lucene/dev/branches/solrcloud/modules/queryparser/src/java/org/apache/lucene/queryparser/surround/query/SrndTermQuery.java (original)
+++ lucene/dev/branches/solrcloud/modules/queryparser/src/java/org/apache/lucene/queryparser/surround/query/SrndTermQuery.java Sun Nov 20 23:45:25 2011
@@ -51,7 +51,7 @@ public class SrndTermQuery extends Simpl
     /* check term presence in index here for symmetry with other SimpleTerm's */
     Terms terms = MultiFields.getTerms(reader, fieldName);
     if (terms != null) {
-      TermsEnum termsEnum = terms.iterator();
+      TermsEnum termsEnum = terms.iterator(null);
 
       TermsEnum.SeekStatus status = termsEnum.seekCeil(new BytesRef(getTermText()));
       if (status == TermsEnum.SeekStatus.FOUND) {

Modified: lucene/dev/branches/solrcloud/modules/queryparser/src/java/org/apache/lucene/queryparser/surround/query/SrndTruncQuery.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solrcloud/modules/queryparser/src/java/org/apache/lucene/queryparser/surround/query/SrndTruncQuery.java?rev=1204297&r1=1204296&r2=1204297&view=diff
==============================================================================
--- lucene/dev/branches/solrcloud/modules/queryparser/src/java/org/apache/lucene/queryparser/surround/query/SrndTruncQuery.java (original)
+++ lucene/dev/branches/solrcloud/modules/queryparser/src/java/org/apache/lucene/queryparser/surround/query/SrndTruncQuery.java Sun Nov 20 23:45:25 2011
@@ -93,7 +93,7 @@ public class SrndTruncQuery extends Simp
     if (terms != null) {
       Matcher matcher = pattern.matcher("");
       try {
-        TermsEnum termsEnum = terms.iterator();
+        TermsEnum termsEnum = terms.iterator(null);
 
         TermsEnum.SeekStatus status = termsEnum.seekCeil(prefixRef);
         BytesRef text;

Modified: lucene/dev/branches/solrcloud/modules/queryparser/src/test/org/apache/lucene/queryparser/classic/TestMultiFieldQueryParser.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solrcloud/modules/queryparser/src/test/org/apache/lucene/queryparser/classic/TestMultiFieldQueryParser.java?rev=1204297&r1=1204296&r2=1204297&view=diff
==============================================================================
--- lucene/dev/branches/solrcloud/modules/queryparser/src/test/org/apache/lucene/queryparser/classic/TestMultiFieldQueryParser.java (original)
+++ lucene/dev/branches/solrcloud/modules/queryparser/src/test/org/apache/lucene/queryparser/classic/TestMultiFieldQueryParser.java Sun Nov 20 23:45:25 2011
@@ -25,6 +25,7 @@ import java.util.Map;
 import org.apache.lucene.analysis.*;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.TextField;
+import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.search.BooleanClause;
 import org.apache.lucene.search.IndexSearcher;
@@ -290,10 +291,12 @@ public class TestMultiFieldQueryParser e
       new MultiFieldQueryParser(TEST_VERSION_CURRENT, new String[] {"body"}, analyzer);
     mfqp.setDefaultOperator(QueryParser.Operator.AND);
     Query q = mfqp.parse("the footest");
-    IndexSearcher is = new IndexSearcher(ramDir, true);
+    IndexReader ir = IndexReader.open(ramDir);
+    IndexSearcher is = new IndexSearcher(ir);
     ScoreDoc[] hits = is.search(q, null, 1000).scoreDocs;
     assertEquals(1, hits.length);
     is.close();
+    ir.close();
     ramDir.close();
   }
   

Modified: lucene/dev/branches/solrcloud/modules/queryparser/src/test/org/apache/lucene/queryparser/complexPhrase/TestComplexPhraseQuery.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solrcloud/modules/queryparser/src/test/org/apache/lucene/queryparser/complexPhrase/TestComplexPhraseQuery.java?rev=1204297&r1=1204296&r2=1204297&view=diff
==============================================================================
--- lucene/dev/branches/solrcloud/modules/queryparser/src/test/org/apache/lucene/queryparser/complexPhrase/TestComplexPhraseQuery.java (original)
+++ lucene/dev/branches/solrcloud/modules/queryparser/src/test/org/apache/lucene/queryparser/complexPhrase/TestComplexPhraseQuery.java Sun Nov 20 23:45:25 2011
@@ -23,6 +23,7 @@ import org.apache.lucene.analysis.Analyz
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.TextField;
+import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.queryparser.classic.QueryParser;
 import org.apache.lucene.search.IndexSearcher;
@@ -42,6 +43,7 @@ public class TestComplexPhraseQuery exte
       new DocData("jackson waits tom", "4") };
 
   private IndexSearcher searcher;
+  private IndexReader reader;
 
   String defaultFieldName = "name";
 
@@ -120,12 +122,14 @@ public class TestComplexPhraseQuery exte
       w.addDocument(doc);
     }
     w.close();
-    searcher = new IndexSearcher(rd, true);
+    reader = IndexReader.open(rd);
+    searcher = new IndexSearcher(reader);
   }
 
   @Override
   public void tearDown() throws Exception {
     searcher.close();
+    reader.close();
     rd.close();
     super.tearDown();
   }

Modified: lucene/dev/branches/solrcloud/modules/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestMultiFieldQPHelper.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solrcloud/modules/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestMultiFieldQPHelper.java?rev=1204297&r1=1204296&r2=1204297&view=diff
==============================================================================
--- lucene/dev/branches/solrcloud/modules/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestMultiFieldQPHelper.java (original)
+++ lucene/dev/branches/solrcloud/modules/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestMultiFieldQPHelper.java Sun Nov 20 23:45:25 2011
@@ -24,6 +24,7 @@ import java.util.Map;
 import org.apache.lucene.analysis.*;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.TextField;
+import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.queryparser.flexible.core.QueryNodeException;
 import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler;
@@ -328,10 +329,12 @@ public class TestMultiFieldQPHelper exte
     mfqp.setAnalyzer(analyzer);
     mfqp.setDefaultOperator(StandardQueryConfigHandler.Operator.AND);
     Query q = mfqp.parse("the footest", null);
-    IndexSearcher is = new IndexSearcher(ramDir, true);
+    IndexReader ir = IndexReader.open(ramDir);
+    IndexSearcher is = new IndexSearcher(ir);
     ScoreDoc[] hits = is.search(q, null, 1000).scoreDocs;
     assertEquals(1, hits.length);
     is.close();
+    ir.close();
     ramDir.close();
   }
 

Modified: lucene/dev/branches/solrcloud/modules/queryparser/src/test/org/apache/lucene/queryparser/surround/query/BooleanQueryTst.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solrcloud/modules/queryparser/src/test/org/apache/lucene/queryparser/surround/query/BooleanQueryTst.java?rev=1204297&r1=1204296&r2=1204297&view=diff
==============================================================================
--- lucene/dev/branches/solrcloud/modules/queryparser/src/test/org/apache/lucene/queryparser/surround/query/BooleanQueryTst.java (original)
+++ lucene/dev/branches/solrcloud/modules/queryparser/src/test/org/apache/lucene/queryparser/surround/query/BooleanQueryTst.java Sun Nov 20 23:45:25 2011
@@ -19,6 +19,7 @@ package org.apache.lucene.queryparser.su
 
 import java.io.IOException;
 
+import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.IndexReader.AtomicReaderContext;
 import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.search.Collector;
@@ -121,11 +122,13 @@ public class BooleanQueryTst {
     /* if (verbose) System.out.println("Lucene: " + query.toString()); */
 
     TestCollector tc = new TestCollector();
-    IndexSearcher searcher = new IndexSearcher(dBase.getDb(), true);
+    IndexReader reader = IndexReader.open(dBase.getDb());
+    IndexSearcher searcher = new IndexSearcher(reader);
     try {
       searcher.search(query, tc);
     } finally {
       searcher.close();
+      reader.close();
     }
     tc.checkNrHits();
   }

Modified: lucene/dev/branches/solrcloud/modules/queryparser/src/test/org/apache/lucene/queryparser/xml/TestQueryTemplateManager.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solrcloud/modules/queryparser/src/test/org/apache/lucene/queryparser/xml/TestQueryTemplateManager.java?rev=1204297&r1=1204296&r2=1204297&view=diff
==============================================================================
--- lucene/dev/branches/solrcloud/modules/queryparser/src/test/org/apache/lucene/queryparser/xml/TestQueryTemplateManager.java (original)
+++ lucene/dev/branches/solrcloud/modules/queryparser/src/test/org/apache/lucene/queryparser/xml/TestQueryTemplateManager.java Sun Nov 20 23:45:25 2011
@@ -20,6 +20,7 @@ package org.apache.lucene.queryparser.xm
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.document.TextField;
+import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.search.Query;
@@ -46,6 +47,7 @@ public class TestQueryTemplateManager ex
   private CoreParser builder;
   private final Analyzer analyzer = new MockAnalyzer(random);
   private IndexSearcher searcher;
+  private IndexReader reader;
   private Directory dir;
 
   //A collection of documents' field values for use in our tests
@@ -147,7 +149,8 @@ public class TestQueryTemplateManager ex
     }
     w.forceMerge(1);
     w.close();
-    searcher = new IndexSearcher(dir, true);
+    reader = IndexReader.open(dir);
+    searcher = new IndexSearcher(reader);
 
     //initialize the parser
     builder = new CorePlusExtensionsParser("artist", analyzer);
@@ -157,6 +160,7 @@ public class TestQueryTemplateManager ex
   @Override
   public void tearDown() throws Exception {
     searcher.close();
+    reader.close();
     dir.close();
     super.tearDown();
   }

Modified: lucene/dev/branches/solrcloud/modules/suggest/src/java/org/apache/lucene/search/spell/HighFrequencyDictionary.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solrcloud/modules/suggest/src/java/org/apache/lucene/search/spell/HighFrequencyDictionary.java?rev=1204297&r1=1204296&r2=1204297&view=diff
==============================================================================
--- lucene/dev/branches/solrcloud/modules/suggest/src/java/org/apache/lucene/search/spell/HighFrequencyDictionary.java (original)
+++ lucene/dev/branches/solrcloud/modules/suggest/src/java/org/apache/lucene/search/spell/HighFrequencyDictionary.java Sun Nov 20 23:45:25 2011
@@ -24,7 +24,6 @@ import org.apache.lucene.index.IndexRead
 import org.apache.lucene.index.TermsEnum;
 import org.apache.lucene.index.Terms;
 import org.apache.lucene.index.MultiFields;
-import org.apache.lucene.search.spell.Dictionary;
 import org.apache.lucene.util.CharsRef;
 import org.apache.lucene.util.BytesRef;
 
@@ -64,7 +63,7 @@ public class HighFrequencyDictionary imp
       try {
         Terms terms = MultiFields.getTerms(reader, field);
         if (terms != null) {
-          termsEnum = terms.iterator();
+          termsEnum = terms.iterator(null);
         }
         minNumDocs = (int)(thresh * (float)reader.numDocs());
       } catch (IOException e) {

Modified: lucene/dev/branches/solrcloud/modules/suggest/src/java/org/apache/lucene/search/spell/LuceneDictionary.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solrcloud/modules/suggest/src/java/org/apache/lucene/search/spell/LuceneDictionary.java?rev=1204297&r1=1204296&r2=1204297&view=diff
==============================================================================
--- lucene/dev/branches/solrcloud/modules/suggest/src/java/org/apache/lucene/search/spell/LuceneDictionary.java (original)
+++ lucene/dev/branches/solrcloud/modules/suggest/src/java/org/apache/lucene/search/spell/LuceneDictionary.java Sun Nov 20 23:45:25 2011
@@ -62,7 +62,7 @@ public class LuceneDictionary implements
       try {
         final Terms terms = MultiFields.getTerms(reader, field);
         if (terms != null) {
-          termsEnum = terms.iterator();
+          termsEnum = terms.iterator(null);
           pendingTerm = termsEnum.next();
         }
       } catch (IOException e) {

Modified: lucene/dev/branches/solrcloud/modules/suggest/src/java/org/apache/lucene/search/spell/SpellChecker.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solrcloud/modules/suggest/src/java/org/apache/lucene/search/spell/SpellChecker.java?rev=1204297&r1=1204296&r2=1204297&view=diff
==============================================================================
--- lucene/dev/branches/solrcloud/modules/suggest/src/java/org/apache/lucene/search/spell/SpellChecker.java (original)
+++ lucene/dev/branches/solrcloud/modules/suggest/src/java/org/apache/lucene/search/spell/SpellChecker.java Sun Nov 20 23:45:25 2011
@@ -500,7 +500,7 @@ public class SpellChecker implements jav
           protected void add(int base, IndexReader r) throws IOException {
             Terms terms = r.terms(F_WORD);
             if (terms != null)
-              termsEnums.add(terms.iterator());
+              termsEnums.add(terms.iterator(null));
           }
         }.run();
       }
@@ -638,6 +638,7 @@ public class SpellChecker implements jav
       ensureOpen();
       closed = true;
       if (searcher != null) {
+        searcher.getIndexReader().close();
         searcher.close();
       }
       searcher = null;
@@ -653,10 +654,12 @@ public class SpellChecker implements jav
     final IndexSearcher indexSearcher = createSearcher(dir);
     synchronized (searcherLock) {
       if(closed){
+        indexSearcher.getIndexReader().close();
         indexSearcher.close();
         throw new AlreadyClosedException("Spellchecker has been closed");
       }
       if (searcher != null) {
+        searcher.getIndexReader().close();
         searcher.close();
       }
       // set the spellindex in the sync block - ensure consistency.
@@ -673,7 +676,7 @@ public class SpellChecker implements jav
    */
   // for testing purposes
   IndexSearcher createSearcher(final Directory dir) throws IOException{
-    return new IndexSearcher(dir, true);
+    return new IndexSearcher(IndexReader.open(dir));
   }
   
   /**

Modified: lucene/dev/branches/solrcloud/solr/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solrcloud/solr/CHANGES.txt?rev=1204297&r1=1204296&r2=1204297&view=diff
==============================================================================
--- lucene/dev/branches/solrcloud/solr/CHANGES.txt (original)
+++ lucene/dev/branches/solrcloud/solr/CHANGES.txt Sun Nov 20 23:45:25 2011
@@ -109,6 +109,8 @@ New Features
   as results. 
   (ryan with patches from grant, noble, cmale, yonik, Jan Høydahl, 
   Arul Kalaipandian, hossman)
+  SOLR-2037: Thanks to SOLR-1566, documents boosted by the QueryElevationComponent
+  can be marked as boosted.  (gsingers, ryan, yonik)
 
 * SOLR-2396: Add CollationField, which is much more efficient than 
   the Solr 3.x CollationKeyFilterFactory, and also supports 
@@ -190,9 +192,6 @@ New Features
 Optimizations
 ----------------------
 
-* SOLR-2742: SolrJ: Provide commitWithinMs as optional parameter for all add() methods,
-  making the feature more conveniently accessible for developers (janhoy)
-
 * SOLR-1875: Per-segment field faceting for single valued string fields.
   Enable with facet.method=fcs, control the number of threads used with
   the "threads" local param on the facet.field param.  This algorithm will
@@ -393,6 +392,22 @@ New Features
 
 * SOLR-1926: Add hl.q parameter. (koji)
 
+* SOLR-2881: Numeric types now support sortMissingFirst/Last. This includes Trie and date types
+  (Ryan McKinley, Mike McCandless, Uwe Schindler, Erick Erickson)
+
+* SOLR-1023: StatsComponent now supports date fields and string fields.
+  (Chris Male, Mark Holland, Gunnlaugur Thor Briem, Ryan McKinley)
+
+* SOLR-2578: ReplicationHandler's backup command now supports a 'numberToKeep' 
+  param that can be used to delete all but the most recent N backups.
+  (James Dyer via hossman)
+
+Optimizations
+----------------------
+
+* SOLR-2742: SolrJ: Provide commitWithinMs as optional parameter for all add() methods,
+  making the feature more conveniently accessible for developers (janhoy)
+
 Bug Fixes
 ----------------------
 * SOLR-2748: The CommitTracker used for commitWith or autoCommit by maxTime
@@ -419,6 +434,12 @@ Bug Fixes
 * SOLR-2861: Fix extremely rare race condition on commit that can result
   in a NPE (yonik)
 
+* SOLR-2813: Fix HTTP error codes returned when requests contain strings that
+  can not be parsed as numbers for Trie fields. (Jeff Crump and hossman)
+  
+* SOLR-2902: List of collations are wrong parsed in SpellCheckResponse causing
+  a wrong number of collation results in the response.
+  (Bastiaan Verhoef, James Dyer via Simon Willnauer)
 
  Other Changes
 ----------------------

Modified: lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/analysis/TrieTokenizerFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/analysis/TrieTokenizerFactory.java?rev=1204297&r1=1204296&r2=1204297&view=diff
==============================================================================
--- lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/analysis/TrieTokenizerFactory.java (original)
+++ lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/analysis/TrieTokenizerFactory.java Sun Nov 20 23:45:25 2011
@@ -89,24 +89,29 @@ final class TrieTokenizer extends Tokeni
       this.startOfs = correctOffset(0);
       this.endOfs = correctOffset(len);
       String v = new String(buf, 0, len);
-      switch (type) {
-        case INTEGER:
-          ts.setIntValue(Integer.parseInt(v));
-          break;
-        case FLOAT:
-          ts.setFloatValue(Float.parseFloat(v));
-          break;
-        case LONG:
-          ts.setLongValue(Long.parseLong(v));
-          break;
-        case DOUBLE:
-          ts.setDoubleValue(Double.parseDouble(v));
-          break;
-        case DATE:
-          ts.setLongValue(dateField.parseMath(null, v).getTime());
-          break;
-        default:
-          throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unknown type for trie field");
+      try {
+        switch (type) {
+          case INTEGER:
+            ts.setIntValue(Integer.parseInt(v));
+            break;
+          case FLOAT:
+            ts.setFloatValue(Float.parseFloat(v));
+            break;
+          case LONG:
+            ts.setLongValue(Long.parseLong(v));
+            break;
+          case DOUBLE:
+            ts.setDoubleValue(Double.parseDouble(v));
+            break;
+          case DATE:
+            ts.setLongValue(dateField.parseMath(null, v).getTime());
+            break;
+          default:
+            throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unknown type for trie field");
+        }
+      } catch (NumberFormatException nfe) {
+        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, 
+                                "Invalid Number: " + v);
       }
     } catch (IOException e) {
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unable to create TrieIndexTokenizer", e);

Modified: lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/core/SolrCore.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/core/SolrCore.java?rev=1204297&r1=1204296&r2=1204297&view=diff
==============================================================================
--- lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/core/SolrCore.java (original)
+++ lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/core/SolrCore.java Sun Nov 20 23:45:25 2011
@@ -1703,6 +1703,10 @@ public final class SolrCore implements S
   public TransformerFactory getTransformerFactory(String name) {
     return transformerFactories.get(name);
   }
+
+  public void addTransformerFactory(String name, TransformerFactory factory){
+    transformerFactories.put(name, factory);
+  }
   
 
   /**

Modified: lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java?rev=1204297&r1=1204296&r2=1204297&view=diff
==============================================================================
--- lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java (original)
+++ lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java Sun Nov 20 23:45:25 2011
@@ -129,7 +129,7 @@ public class ReplicationHandler extends 
     }
     // This command does not give the current index version of the master
     // It gives the current 'replicateable' index version
-   if (command.equals(CMD_INDEX_VERSION)) {
+    if (command.equals(CMD_INDEX_VERSION)) {
       IndexCommit commitPoint = indexCommitPoint;  // make a copy so it won't change
       
       // this is only set after commit or optimize or something - if it's not set,
@@ -214,10 +214,10 @@ public class ReplicationHandler extends 
     } else if (CMD_ENABLE_REPL.equalsIgnoreCase(command)) {
       replicationEnabled.set(true);
       rsp.add(STATUS, OK_STATUS);
-   } else if (CMD_DISABLE_REPL.equalsIgnoreCase(command)) {
-     replicationEnabled.set(false);
-     rsp.add(STATUS, OK_STATUS);
-   }
+    } else if (CMD_DISABLE_REPL.equalsIgnoreCase(command)) {
+      replicationEnabled.set(false);
+      rsp.add(STATUS, OK_STATUS);
+    }
   }
 
   private List<NamedList<Object>> getCommits() {
@@ -308,16 +308,17 @@ public class ReplicationHandler extends 
 
   private void doSnapShoot(SolrParams params, SolrQueryResponse rsp, SolrQueryRequest req) {
     try {
+      int numberToKeep = params.getInt(NUMBER_BACKUPS_TO_KEEP, Integer.MAX_VALUE);
       IndexDeletionPolicyWrapper delPolicy = core.getDeletionPolicy();
       IndexCommit indexCommit = delPolicy.getLatestCommit();
-
+      
       if(indexCommit == null) {
         indexCommit = req.getSearcher().getIndexReader().getIndexCommit();
       }
-
+      
       // small race here before the commit point is saved
-      new SnapShooter(core, params.get("location")).createSnapAsync(indexCommit, this);
-
+      new SnapShooter(core, params.get("location")).createSnapAsync(indexCommit, numberToKeep, this);
+      
     } catch (Exception e) {
       LOG.warn("Exception during creating a snapshot", e);
       rsp.add("exception", e);
@@ -368,7 +369,7 @@ public class ReplicationHandler extends 
       rsp.add("status", "unable to get file names for given indexversion");
       rsp.add("exception", e);
       LOG.warn("Unable to get file names for indexCommit version: "
-              + version, e);
+               + version, e);
     }
     rsp.add(CMD_GET_FILE_LIST, result);
     if (confFileNameAlias.size() < 1)
@@ -1163,4 +1164,6 @@ public class ReplicationHandler extends 
   public static final String OK_STATUS = "OK";
 
   public static final String NEXT_EXECUTION_AT = "nextExecutionAt";
+  
+  public static final String NUMBER_BACKUPS_TO_KEEP = "numberToKeep";
 }

Modified: lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/handler/SnapShooter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/handler/SnapShooter.java?rev=1204297&r1=1204296&r2=1204297&view=diff
==============================================================================
--- lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/handler/SnapShooter.java (original)
+++ lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/handler/SnapShooter.java Sun Nov 20 23:45:25 2011
@@ -22,9 +22,14 @@ import java.io.FileNotFoundException;
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.text.SimpleDateFormat;
+import java.util.ArrayList;
 import java.util.Collection;
+import java.util.Collections;
 import java.util.Date;
+import java.util.List;
 import java.util.Locale;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
 
 import org.apache.commons.io.IOUtils;
 import org.apache.lucene.index.IndexCommit;
@@ -59,19 +64,23 @@ public class SnapShooter {
     }
     lockFactory = new SimpleFSLockFactory(snapDir);
   }
-
+  
   void createSnapAsync(final IndexCommit indexCommit, final ReplicationHandler replicationHandler) {
+    createSnapAsync(indexCommit, Integer.MAX_VALUE, replicationHandler);
+  }
+
+  void createSnapAsync(final IndexCommit indexCommit, final int numberToKeep, final ReplicationHandler replicationHandler) {
     replicationHandler.core.getDeletionPolicy().saveCommitPoint(indexCommit.getVersion());
 
     new Thread() {
       @Override
       public void run() {
-        createSnapshot(indexCommit, replicationHandler);
+        createSnapshot(indexCommit, numberToKeep, replicationHandler);
       }
     }.start();
   }
 
-  void createSnapshot(final IndexCommit indexCommit, ReplicationHandler replicationHandler) {
+  void createSnapshot(final IndexCommit indexCommit, int numberToKeep, ReplicationHandler replicationHandler) {
 
     NamedList<Object> details = new NamedList<Object>();
     details.add("startTime", new Date().toString());
@@ -79,6 +88,9 @@ public class SnapShooter {
     String directoryName = null;
     Lock lock = null;
     try {
+      if(numberToKeep<Integer.MAX_VALUE) {
+        deleteOldBackups(numberToKeep);
+      }
       SimpleDateFormat fmt = new SimpleDateFormat(DATE_FMT, Locale.US);
       directoryName = "snapshot." + fmt.format(new Date());
       lock = lockFactory.makeLock(directoryName + ".lock");
@@ -100,8 +112,8 @@ public class SnapShooter {
       LOG.error("Exception while creating snapshot", e);
       details.add("snapShootException", e.getMessage());
     } finally {
-        replicationHandler.core.getDeletionPolicy().releaseCommitPoint(indexCommit.getVersion());   
-        replicationHandler.snapShootDetails = details;
+      replicationHandler.core.getDeletionPolicy().releaseCommitPoint(indexCommit.getVersion());   
+      replicationHandler.snapShootDetails = details;
       if (lock != null) {
         try {
           lock.release();
@@ -111,6 +123,46 @@ public class SnapShooter {
       }
     }
   }
+  private void deleteOldBackups(int numberToKeep) {
+    File[] files = new File(snapDir).listFiles();
+    List<OldBackupDirectory> dirs = new ArrayList<OldBackupDirectory>();
+    for(File f : files) {
+      OldBackupDirectory obd = new OldBackupDirectory(f);
+      if(obd.dir != null) {
+        dirs.add(obd);
+      }
+    }
+    Collections.sort(dirs);
+    int i=1;
+    for(OldBackupDirectory dir : dirs) {
+      if( i > numberToKeep-1 ) {
+        SnapPuller.delTree(dir.dir);
+      }
+    }   
+  }
+  private class OldBackupDirectory implements Comparable<OldBackupDirectory>{
+    File dir;
+    Date timestamp;
+    final Pattern dirNamePattern = Pattern.compile("^snapshot[.](.*)$");
+    
+    OldBackupDirectory(File dir) {
+      if(dir.isDirectory()) {
+        Matcher m = dirNamePattern.matcher(dir.getName());
+        if(m.find()) {
+          try {
+            this.dir = dir;
+            this.timestamp = new SimpleDateFormat(DATE_FMT).parse(m.group(1));
+          } catch(Exception e) {
+            this.dir = null;
+            this.timestamp = null;
+          }
+        }
+      }
+    }
+    public int compareTo(OldBackupDirectory that) {
+      return that.timestamp.compareTo(this.timestamp);
+    }
+  }
 
   public static final String SNAP_DIR = "snapDir";
   public static final String DATE_FMT = "yyyyMMddHHmmss";
@@ -135,7 +187,7 @@ public class SnapShooter {
     }
     
     public void copyFile(File source, File destination, boolean preserveFileDate)
-        throws IOException {
+      throws IOException {
       // check source exists
       if (!source.exists()) {
         String message = "File " + source + " does not exist";
@@ -185,7 +237,7 @@ public class SnapShooter {
 
       if (source.length() != destination.length()) {
         String message = "Failed to copy full contents from " + source + " to "
-            + destination;
+          + destination;
         throw new IOException(message);
       }
 

Modified: lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java?rev=1204297&r1=1204296&r2=1204297&view=diff
==============================================================================
--- lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java (original)
+++ lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java Sun Nov 20 23:45:25 2011
@@ -29,27 +29,25 @@ import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
+import org.apache.lucene.index.Fields;
+import org.apache.lucene.index.FieldsEnum;
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.IndexableField;
+import org.apache.lucene.index.MultiFields;
 import org.apache.lucene.index.Term;
-import org.apache.lucene.index.Fields;
-import org.apache.lucene.index.FieldsEnum;
+import org.apache.lucene.index.Terms;
 import org.apache.lucene.index.TermsEnum;
-import org.apache.lucene.index.MultiFields;
-import org.apache.lucene.index.TermFreqVector;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.search.TermRangeQuery;
 import org.apache.lucene.search.TopDocs;
 import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.CharsRef;
 import org.apache.lucene.util.PriorityQueue;
-import org.apache.lucene.util.BytesRef;
 import org.apache.solr.analysis.CharFilterFactory;
 import org.apache.solr.analysis.TokenFilterFactory;
 import org.apache.solr.analysis.TokenizerChain;
@@ -58,9 +56,9 @@ import org.apache.solr.common.SolrExcept
 import org.apache.solr.common.luke.FieldFlag;
 import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.SolrParams;
+import org.apache.solr.common.util.Base64;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SimpleOrderedMap;
-import org.apache.solr.common.util.Base64;
 import org.apache.solr.handler.RequestHandlerBase;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.response.SolrQueryResponse;
@@ -68,6 +66,8 @@ import org.apache.solr.schema.FieldType;
 import org.apache.solr.schema.IndexSchema;
 import org.apache.solr.schema.SchemaField;
 import org.apache.solr.search.SolrIndexSearcher;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This handler exposes the internal lucene index.  It is inspired by and 
@@ -266,11 +266,14 @@ public class LukeRequestHandler extends 
       // If we have a term vector, return that
       if( field.fieldType().storeTermVectors() ) {
         try {
-          TermFreqVector v = reader.getTermFreqVector( docId, field.name() );
+          Terms v = reader.getTermVector( docId, field.name() );
           if( v != null ) {
             SimpleOrderedMap<Integer> tfv = new SimpleOrderedMap<Integer>();
-            for( int i=0; i<v.size(); i++ ) {
-              tfv.add( v.getTerms()[i].utf8ToChars(spare).toString(), v.getTermFrequencies()[i] );
+            final TermsEnum termsEnum = v.iterator(null);
+            BytesRef text;
+            while((text = termsEnum.next()) != null) {
+              final int freq = (int) termsEnum.totalTermFreq();
+              tfv.add( text.utf8ToChars(spare).toString(), freq );
             }
             f.add( "termVector", tfv );
           }
@@ -482,9 +485,12 @@ public class LukeRequestHandler extends 
       if (fields != null) {
         FieldsEnum fieldsEnum = fields.iterator();
         while(fieldsEnum.next() != null) {
-          TermsEnum termsEnum = fieldsEnum.terms();
-          while(termsEnum.next() != null) {
-            numTerms++;
+          Terms terms = fieldsEnum.terms();
+          if (terms != null) {
+            TermsEnum termsEnum = terms.iterator(null);
+            while(termsEnum.next() != null) {
+              numTerms++;
+            }
           }
         }
       }
@@ -636,7 +642,11 @@ public class LukeRequestHandler extends 
       String field;
       while((field = fieldsEnum.next()) != null) {
 
-        TermsEnum termsEnum = fieldsEnum.terms();
+        Terms terms = fieldsEnum.terms();
+        if (terms == null) {
+          continue;
+        }
+        TermsEnum termsEnum = terms.iterator(null);
         BytesRef text;
         while((text = termsEnum.next()) != null) {
           String t = text.utf8ToChars(spare).toString();

Modified: lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java?rev=1204297&r1=1204296&r2=1204297&view=diff
==============================================================================
--- lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java (original)
+++ lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java Sun Nov 20 23:45:25 2011
@@ -23,14 +23,10 @@ import java.io.InputStream;
 import java.io.StringReader;
 import java.net.MalformedURLException;
 import java.net.URL;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.WeakHashMap;
+import java.util.*;
 
 import org.apache.solr.common.params.QueryElevationParams;
+import org.apache.solr.response.transform.EditorialMarkerFactory;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -103,6 +99,7 @@ public class QueryElevationComponent ext
     final BooleanClause[] exclude;
     final BooleanQuery include;
     final Map<BytesRef,Integer> priority;
+    final Set<String> ids;
     
     // use singletons so hashCode/equals on Sort will just work
     final FieldComparatorSource comparatorSource;
@@ -111,12 +108,14 @@ public class QueryElevationComponent ext
     {
       this.text = qstr;
       this.analyzed = getAnalyzedQuery( this.text );
+      this.ids = new HashSet<String>();
       
       this.include = new BooleanQuery();
       this.include.setBoost( 0 );
       this.priority = new HashMap<BytesRef, Integer>();
       int max = elevate.size()+5;
       for( String id : elevate ) {
+        ids.add(id);
         TermQuery tq = new TermQuery( new Term( idField, id ) );
         include.add( tq, BooleanClause.Occur.SHOULD );
         this.priority.put( new BytesRef(id), max-- );
@@ -161,7 +160,13 @@ public class QueryElevationComponent ext
           "QueryElevationComponent requires the schema to have a uniqueKeyField implemented using StrField" );
     }
     idField = sf.getName();
-    
+    //register the EditorialMarkerFactory
+    EditorialMarkerFactory factory = new EditorialMarkerFactory();
+    String markerName = initArgs.get(QueryElevationParams.EDITORIAL_MARKER_FIELD_NAME, "elevated");
+    if (markerName == null || markerName.equals("") == true){
+      markerName = "elevated";
+    }
+    core.addTransformerFactory(markerName, factory);
     forceElevation = initArgs.getBool( QueryElevationParams.FORCE_ELEVATION, forceElevation );
     try {
       synchronized( elevationCache ) {
@@ -357,6 +362,8 @@ public class QueryElevationComponent ext
     }
     
     if( booster != null ) {
+      rb.req.getContext().put("BOOSTED", booster.ids);
+      
       // Change the query to insert forced documents
       if (exclusive == true){
         //we only want these results

Modified: lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java?rev=1204297&r1=1204296&r2=1204297&view=diff
==============================================================================
--- lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java (original)
+++ lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java Sun Nov 20 23:45:25 2011
@@ -31,6 +31,7 @@ import org.apache.solr.core.SolrCore;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.response.SolrQueryResponse;
 import org.apache.solr.response.transform.DocTransformer;
+import org.apache.solr.response.transform.TransformContext;
 import org.apache.solr.schema.FieldType;
 import org.apache.solr.schema.IndexSchema;
 import org.apache.solr.schema.SchemaField;
@@ -40,6 +41,7 @@ import org.apache.solr.update.DocumentBu
 import org.apache.solr.update.UpdateLog;
 import org.apache.solr.util.RefCounted;
 
+import javax.xml.transform.Transformer;
 import java.io.IOException;
 import java.net.URL;
 import java.util.ArrayList;
@@ -103,6 +105,11 @@ public class RealTimeGetComponent extend
     RefCounted<SolrIndexSearcher> searcherHolder = null;
 
     DocTransformer transformer = rsp.getReturnFields().getTransformer();
+    if (transformer != null) {
+      TransformContext context = new TransformContext();
+      context.req = req;
+      transformer.setContext(context);
+    }
    try {
      SolrIndexSearcher searcher = null;
 

Modified: lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/handler/component/TermVectorComponent.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/handler/component/TermVectorComponent.java?rev=1204297&r1=1204296&r2=1204297&view=diff
==============================================================================
--- lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/handler/component/TermVectorComponent.java (original)
+++ lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/handler/component/TermVectorComponent.java Sun Nov 20 23:45:25 2011
@@ -8,16 +8,17 @@ import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
+import org.apache.lucene.index.DocsAndPositionsEnum;
 import org.apache.lucene.index.FieldInfo;
+import org.apache.lucene.index.Fields;
+import org.apache.lucene.index.FieldsEnum;
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.MultiFields;
-import org.apache.lucene.index.StoredFieldVisitor;
 import org.apache.lucene.index.StoredFieldVisitor.Status;
-import org.apache.lucene.index.TermVectorMapper;
-import org.apache.lucene.index.TermVectorOffsetInfo;
+import org.apache.lucene.index.StoredFieldVisitor;
 import org.apache.lucene.index.Terms;
 import org.apache.lucene.index.TermsEnum;
-import org.apache.lucene.store.IndexInput;
 import org.apache.lucene.util.BytesRef;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.params.CommonParams;
@@ -225,12 +226,11 @@ public class TermVectorComponent extends
       }
     };
 
-    TVMapper mapper = new TVMapper(reader);
-    mapper.fieldOptions = allFields; //this will only stay set if fieldOptions.isEmpty() (in other words, only if the user didn't set any fields)
+    TermsEnum termsEnum = null;
+
     while (iter.hasNext()) {
       Integer docId = iter.next();
       NamedList<Object> docNL = new NamedList<Object>();
-      mapper.docNL = docNL;
       termVectors.add("doc-" + docId, docNL);
 
       if (keyField != null) {
@@ -245,12 +245,91 @@ public class TermVectorComponent extends
       }
       if (!fieldOptions.isEmpty()) {
         for (Map.Entry<String, FieldOptions> entry : fieldOptions.entrySet()) {
-          mapper.fieldOptions = entry.getValue();
-          reader.getTermFreqVector(docId, entry.getKey(), mapper);
+          final String field = entry.getKey();
+          final Terms vector = reader.getTermVector(docId, field);
+          if (vector != null) {
+            termsEnum = vector.iterator(termsEnum);
+            mapOneVector(docNL, entry.getValue(), reader, docId, vector.iterator(termsEnum), field);
+          }
         }
       } else {
-        //deal with all fields by using the allFieldMapper
-        reader.getTermFreqVector(docId, mapper);
+        // extract all fields
+        final Fields vectors = reader.getTermVectors(docId);
+        final FieldsEnum fieldsEnum = vectors.iterator();
+        String field;
+        while((field = fieldsEnum.next()) != null) {
+          Terms terms = fieldsEnum.terms();
+          if (terms != null) {
+            termsEnum = terms.iterator(termsEnum);
+            mapOneVector(docNL, allFields, reader, docId, termsEnum, field);
+          }
+        }
+      }
+    }
+  }
+
+  private void mapOneVector(NamedList<Object> docNL, FieldOptions fieldOptions, IndexReader reader, int docID, TermsEnum termsEnum, String field) throws IOException {
+    NamedList<Object> fieldNL = new NamedList<Object>();
+    docNL.add(field, fieldNL);
+
+    BytesRef text;
+    DocsAndPositionsEnum dpEnum = null;
+    while((text = termsEnum.next()) != null) {
+      String term = text.utf8ToString();
+      NamedList<Object> termInfo = new NamedList<Object>();
+      fieldNL.add(term, termInfo);
+      final int freq = (int) termsEnum.totalTermFreq();
+      if (fieldOptions.termFreq == true) {
+        termInfo.add("tf", freq);
+      }
+
+      dpEnum = termsEnum.docsAndPositions(null, dpEnum);
+
+      boolean usePositions = false;
+      boolean useOffsets = false;
+      OffsetAttribute offsetAtt = null;
+      if (dpEnum != null) {
+        dpEnum.nextDoc();
+        usePositions = fieldOptions.positions;
+        if (fieldOptions.offsets && dpEnum.attributes().hasAttribute(OffsetAttribute.class)) {
+          useOffsets = true;
+          offsetAtt = dpEnum.attributes().getAttribute(OffsetAttribute.class);
+        }
+      }
+
+      NamedList<Number> theOffsets = null;
+      if (useOffsets) {
+        theOffsets = new NamedList<Number>();
+        termInfo.add("offsets", theOffsets);
+      }
+
+      NamedList<Integer> positionsNL = null;
+
+      if (usePositions || theOffsets != null) {
+        for (int i = 0; i < freq; i++) {
+          final int pos = dpEnum.nextPosition();
+          if (usePositions && pos >= 0) {
+            if (positionsNL == null) {
+              positionsNL = new NamedList<Integer>();
+              termInfo.add("positions", positionsNL);
+            }
+            positionsNL.add("position", pos);
+          }
+
+          if (theOffsets != null) {
+            theOffsets.add("start", offsetAtt.startOffset());
+            theOffsets.add("end", offsetAtt.endOffset());
+          }
+        }
+      }
+
+      if (fieldOptions.docFreq) {
+        termInfo.add("df", getDocFreq(reader, field, text));
+      }
+
+      if (fieldOptions.tfIdf) {
+        double tfIdfVal = ((double) freq) / getDocFreq(reader, field, text);
+        termInfo.add("tf-idf", tfIdfVal);
       }
     }
   }
@@ -310,90 +389,20 @@ public class TermVectorComponent extends
     return result;
   }
 
-  private static class TVMapper extends TermVectorMapper {
-    private IndexReader reader;
-    private NamedList<Object> docNL;
-
-    //needs to be set for each new field
-    FieldOptions fieldOptions;
-
-    //internal vars not passed in by construction
-    private boolean useOffsets, usePositions;
-    //private Map<String, Integer> idfCache;
-    private NamedList<Object> fieldNL;
-    private String field;
-
-
-    public TVMapper(IndexReader reader) {
-      this.reader = reader;
-    }
-
-    @Override
-    public void map(BytesRef term, int frequency, TermVectorOffsetInfo[] offsets, int[] positions) {
-      NamedList<Object> termInfo = new NamedList<Object>();
-      fieldNL.add(term.utf8ToString(), termInfo);
-      if (fieldOptions.termFreq == true) {
-        termInfo.add("tf", frequency);
-      }
-      if (useOffsets) {
-        NamedList<Number> theOffsets = new NamedList<Number>();
-        termInfo.add("offsets", theOffsets);
-        for (int i = 0; i < offsets.length; i++) {
-          TermVectorOffsetInfo offset = offsets[i];
-          theOffsets.add("start", offset.getStartOffset());
-          theOffsets.add("end", offset.getEndOffset());
-        }
-      }
-      if (usePositions) {
-        NamedList<Integer> positionsNL = new NamedList<Integer>();
-        for (int i = 0; i < positions.length; i++) {
-          positionsNL.add("position", positions[i]);
-        }
-        termInfo.add("positions", positionsNL);
-      }
-      if (fieldOptions.docFreq) {
-        termInfo.add("df", getDocFreq(term));
-      }
-      if (fieldOptions.tfIdf) {
-        double tfIdfVal = ((double) frequency) / getDocFreq(term);
-        termInfo.add("tf-idf", tfIdfVal);
-      }
-    }
-
-    private int getDocFreq(BytesRef term) {
-      int result = 1;
-      try {
-        Terms terms = MultiFields.getTerms(reader, field);
-        if (terms != null) {
-          TermsEnum termsEnum = terms.iterator();
-          if (termsEnum.seekExact(term, true)) {
-            result = termsEnum.docFreq();
-          }
+  private static int getDocFreq(IndexReader reader, String field, BytesRef term) {
+    int result = 1;
+    try {
+      Terms terms = MultiFields.getTerms(reader, field);
+      if (terms != null) {
+        TermsEnum termsEnum = terms.iterator(null);
+        if (termsEnum.seekExact(term, true)) {
+          result = termsEnum.docFreq();
         }
-      } catch (IOException e) {
-        throw new RuntimeException(e);
       }
-      return result;
-    }
-
-    @Override
-    public void setExpectations(String field, int numTerms, boolean storeOffsets, boolean storePositions) {
-      this.field = field;
-      useOffsets = storeOffsets && fieldOptions.offsets;
-      usePositions = storePositions && fieldOptions.positions;
-      fieldNL = new NamedList<Object>();
-      docNL.add(field, fieldNL);
-    }
-
-    @Override
-    public boolean isIgnoringPositions() {
-      return !fieldOptions.positions;  // if we are not interested in positions, then return true telling Lucene to skip loading them
-    }
-
-    @Override
-    public boolean isIgnoringOffsets() {
-      return !fieldOptions.offsets;  //  if we are not interested in offsets, then return true telling Lucene to skip loading them
+    } catch (IOException e) {
+      throw new RuntimeException(e);
     }
+    return result;
   }
 
   @Override

Modified: lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/handler/component/TermsComponent.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/handler/component/TermsComponent.java?rev=1204297&r1=1204296&r2=1204297&view=diff
==============================================================================
--- lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/handler/component/TermsComponent.java (original)
+++ lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/handler/component/TermsComponent.java Sun Nov 20 23:45:25 2011
@@ -158,7 +158,7 @@ public class TermsComponent extends Sear
       }
 
 
-     TermsEnum termsEnum = terms.iterator();
+     TermsEnum termsEnum = terms.iterator(null);
      BytesRef term = null;
 
       if (lowerBytes != null) {

Modified: lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/request/SimpleFacets.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/request/SimpleFacets.java?rev=1204297&r1=1204296&r2=1204297&view=diff
==============================================================================
--- lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/request/SimpleFacets.java (original)
+++ lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/request/SimpleFacets.java Sun Nov 20 23:45:25 2011
@@ -633,7 +633,7 @@ public class SimpleFacets {
     SolrIndexSearcher.DocsEnumState deState = null;
     BytesRef term = null;
     if (terms != null) {
-      termsEnum = terms.iterator();
+      termsEnum = terms.iterator(null);
 
       // TODO: OPT: if seek(ord) is supported for this termsEnum, then we could use it for
       // facet.offset when sorting by index order.

Modified: lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/response/BinaryResponseWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/response/BinaryResponseWriter.java?rev=1204297&r1=1204296&r2=1204297&view=diff
==============================================================================
--- lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/response/BinaryResponseWriter.java (original)
+++ lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/response/BinaryResponseWriter.java Sun Nov 20 23:45:25 2011
@@ -119,17 +119,17 @@ public class BinaryResponseWriter implem
     protected void writeResultsBody( ResultContext res, JavaBinCodec codec ) throws IOException 
     {
       DocList ids = res.docs;
-      TransformContext context = new TransformContext();
-      context.query = res.query;
-      context.wantsScores = returnFields.wantsScore() && ids.hasScores();
-      
       int sz = ids.size();
       codec.writeTag(JavaBinCodec.ARR, sz);
       if(searcher == null) searcher = solrQueryRequest.getSearcher();
-      if(schema == null) schema = solrQueryRequest.getSchema(); 
-      
-      context.searcher = searcher;
+      if(schema == null) schema = solrQueryRequest.getSchema();
+
       DocTransformer transformer = returnFields.getTransformer();
+      TransformContext context = new TransformContext();
+      context.query = res.query;
+      context.wantsScores = returnFields.wantsScore() && ids.hasScores();
+      context.req = solrQueryRequest;
+      context.searcher = searcher;
       if( transformer != null ) {
         transformer.setContext( context );
       }
@@ -141,7 +141,7 @@ public class BinaryResponseWriter implem
         Document doc = searcher.doc(id, fnames);
         SolrDocument sdoc = getDoc(doc);
         if( transformer != null ) {
-          transformer.transform(sdoc, id );
+          transformer.transform(sdoc, id);
         }
         codec.writeSolrDocument(sdoc);
       }

Modified: lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/response/TextResponseWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/response/TextResponseWriter.java?rev=1204297&r1=1204296&r2=1204297&view=diff
==============================================================================
--- lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/response/TextResponseWriter.java (original)
+++ lucene/dev/branches/solrcloud/solr/core/src/java/org/apache/solr/response/TextResponseWriter.java Sun Nov 20 23:45:25 2011
@@ -146,8 +146,12 @@ public abstract class TextResponseWriter
       writeDouble(name, ((Double)val).doubleValue());
     } else if (val instanceof Document) {
       SolrDocument doc = toSolrDocument( (Document)val );
-      if( returnFields.getTransformer() != null ) {
-        returnFields.getTransformer().transform( doc, -1 );
+      DocTransformer transformer = returnFields.getTransformer();
+      if( transformer != null ) {
+        TransformContext context = new TransformContext();
+        context.req = req;
+        transformer.setContext(context);
+        transformer.transform(doc, -1);
       }
       writeSolrDocument(name, doc, returnFields, 0 );
     } else if (val instanceof SolrDocument) {
@@ -232,6 +236,7 @@ public abstract class TextResponseWriter
     TransformContext context = new TransformContext();
     context.query = res.query;
     context.wantsScores = fields.wantsScore() && ids.hasScores();
+    context.req = req;
     writeStartDocumentList(name, ids.offset(), ids.size(), ids.matches(), 
         context.wantsScores ? new Float(ids.maxScore()) : null );
     
@@ -248,7 +253,7 @@ public abstract class TextResponseWriter
       Document doc = context.searcher.doc(id, fnames);
       SolrDocument sdoc = toSolrDocument( doc );
       if( transformer != null ) {
-        transformer.transform( sdoc, id );
+        transformer.transform( sdoc, id);
       }
       writeSolrDocument( null, sdoc, returnFields, i );
     }