You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by rm...@apache.org on 2011/11/11 22:13:56 UTC

svn commit: r1201053 [2/4] - in /lucene/dev/branches/lucene2621: ./ dev-tools/idea/lucene/contrib/ lucene/ lucene/contrib/demo/src/java/org/apache/lucene/demo/ lucene/contrib/highlighter/src/test/org/apache/lucene/search/highlight/ lucene/contrib/insta...

Modified: lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/MergePolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/MergePolicy.java?rev=1201053&r1=1201052&r2=1201053&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/MergePolicy.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/MergePolicy.java Fri Nov 11 21:13:51 2011
@@ -30,8 +30,7 @@ import org.apache.lucene.util.SetOnce;
 
 /**
  * <p>Expert: a MergePolicy determines the sequence of
- * primitive merge operations to be used for overall merge
- * and optimize operations.</p>
+ * primitive merge operations.</p>
  * 
  * <p>Whenever the segments in an index have been altered by
  * {@link IndexWriter}, either the addition of a newly
@@ -42,8 +41,8 @@ import org.apache.lucene.util.SetOnce;
  * merges that are now required.  This method returns a
  * {@link MergeSpecification} instance describing the set of
  * merges that should be done, or null if no merges are
- * necessary.  When IndexWriter.optimize is called, it calls
- * {@link #findMergesForOptimize} and the MergePolicy should
+ * necessary.  When IndexWriter.forceMerge is called, it calls
+ * {@link #findForcedMerges(SegmentInfos,int,Map)} and the MergePolicy should
  * then return the necessary merges.</p>
  *
  * <p>Note that the policy can return more than one merge at
@@ -69,11 +68,10 @@ public abstract class MergePolicy implem
   public static class OneMerge {
 
     SegmentInfo info;               // used by IndexWriter
-    boolean optimize;               // used by IndexWriter
     boolean registerDone;           // used by IndexWriter
     long mergeGen;                  // used by IndexWriter
     boolean isExternal;             // used by IndexWriter
-    int maxNumSegmentsOptimize;     // used by IndexWriter
+    int maxNumSegments = -1;        // used by IndexWriter
     public long estimatedMergeBytes;       // used by IndexWriter
     List<SegmentReader> readers;        // used by IndexWriter
     List<BitVector> readerLiveDocs;   // used by IndexWriter
@@ -160,8 +158,8 @@ public abstract class MergePolicy implem
       }
       if (info != null)
         b.append(" into ").append(info.name);
-      if (optimize)
-        b.append(" [optimize]");
+      if (maxNumSegments != -1)
+        b.append(" [maxNumSegments=" + maxNumSegments + "]");
       if (aborted) {
         b.append(" [ABORTED]");
       }
@@ -193,7 +191,7 @@ public abstract class MergePolicy implem
     }
     
     public MergeInfo getMergeInfo() {
-      return new MergeInfo(totalDocCount, estimatedMergeBytes, isExternal, optimize);
+      return new MergeInfo(totalDocCount, estimatedMergeBytes, isExternal, maxNumSegments);
     }    
   }
 
@@ -290,9 +288,9 @@ public abstract class MergePolicy implem
       throws CorruptIndexException, IOException;
 
   /**
-   * Determine what set of merge operations is necessary in order to optimize
-   * the index. {@link IndexWriter} calls this when its
-   * {@link IndexWriter#optimize()} method is called. This call is always
+   * Determine what set of merge operations is necessary in
+   * order to merge to <= the specified segment count. {@link IndexWriter} calls this when its
+   * {@link IndexWriter#forceMerge} method is called. This call is always
    * synchronized on the {@link IndexWriter} instance so only one thread at a
    * time will call this method.
    * 
@@ -301,17 +299,17 @@ public abstract class MergePolicy implem
    * @param maxSegmentCount
    *          requested maximum number of segments in the index (currently this
    *          is always 1)
-   * @param segmentsToOptimize
+   * @param segmentsToMerge
    *          contains the specific SegmentInfo instances that must be merged
    *          away. This may be a subset of all
    *          SegmentInfos.  If the value is True for a
    *          given SegmentInfo, that means this segment was
    *          an original segment present in the
-   *          to-be-optimized index; else, it was a segment
+   *          to-be-merged index; else, it was a segment
    *          produced by a cascaded merge.
    */
-  public abstract MergeSpecification findMergesForOptimize(
-          SegmentInfos segmentInfos, int maxSegmentCount, Map<SegmentInfo,Boolean> segmentsToOptimize)
+  public abstract MergeSpecification findForcedMerges(
+          SegmentInfos segmentInfos, int maxSegmentCount, Map<SegmentInfo,Boolean> segmentsToMerge)
       throws CorruptIndexException, IOException;
 
   /**

Modified: lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/MultiReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/MultiReader.java?rev=1201053&r1=1201052&r2=1201053&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/MultiReader.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/MultiReader.java Fri Nov 11 21:13:51 2011
@@ -234,12 +234,6 @@ public class MultiReader extends IndexRe
   }
 
   @Override
-  public boolean isOptimized() {
-    ensureOpen();
-    return false;
-  }
-  
-  @Override
   public int numDocs() {
     // Don't call ensureOpen() here (it could affect performance)
     // NOTE: multiple threads may wind up init'ing

Modified: lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/NoMergePolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/NoMergePolicy.java?rev=1201053&r1=1201052&r2=1201053&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/NoMergePolicy.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/NoMergePolicy.java Fri Nov 11 21:13:51 2011
@@ -58,8 +58,8 @@ public final class NoMergePolicy extends
       throws CorruptIndexException, IOException { return null; }
 
   @Override
-  public MergeSpecification findMergesForOptimize(SegmentInfos segmentInfos,
-             int maxSegmentCount, Map<SegmentInfo,Boolean> segmentsToOptimize)
+  public MergeSpecification findForcedMerges(SegmentInfos segmentInfos,
+             int maxSegmentCount, Map<SegmentInfo,Boolean> segmentsToMerge)
       throws CorruptIndexException, IOException { return null; }
 
   @Override

Modified: lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/ParallelReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/ParallelReader.java?rev=1201053&r1=1201052&r2=1201053&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/ParallelReader.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/ParallelReader.java Fri Nov 11 21:13:51 2011
@@ -477,23 +477,6 @@ public class ParallelReader extends Inde
     return true;
   }
 
-  /**
-   * Checks recursively if all subindexes are optimized 
-   */
-  @Override
-  public boolean isOptimized() {
-    ensureOpen();
-    for (final IndexReader reader : readers) {
-      if (!reader.isOptimized()) {
-        return false;
-      }
-    }
-    
-    // all subindexes are optimized
-    return true;
-  }
-
-  
   /** Not implemented.
    * @throws UnsupportedOperationException
    */

Modified: lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/SnapshotDeletionPolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/SnapshotDeletionPolicy.java?rev=1201053&r1=1201052&r2=1201053&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/SnapshotDeletionPolicy.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/SnapshotDeletionPolicy.java Fri Nov 11 21:13:51 2011
@@ -136,8 +136,8 @@ public class SnapshotDeletionPolicy impl
     }
 
     @Override
-    public boolean isOptimized() {
-      return cp.isOptimized();
+    public int getSegmentCount() {
+      return cp.getSegmentCount();
     }
   }
 
@@ -340,7 +340,7 @@ public class SnapshotDeletionPolicy impl
    * <b>NOTE:</b> while the snapshot is held, the files it references will not
    * be deleted, which will consume additional disk space in your index. If you
    * take a snapshot at a particularly bad time (say just before you call
-   * optimize()) then in the worst case this could consume an extra 1X of your
+   * forceMerge) then in the worst case this could consume an extra 1X of your
    * total index size, until you release the snapshot.
    * 
    * @param id

Modified: lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/TieredMergePolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/TieredMergePolicy.java?rev=1201053&r1=1201052&r2=1201053&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/TieredMergePolicy.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/TieredMergePolicy.java Fri Nov 11 21:13:51 2011
@@ -62,7 +62,7 @@ import org.apache.lucene.util.InfoStream
  *  <p><b>NOTE</b>: This policy always merges by byte size
  *  of the segments, always pro-rates by percent deletes,
  *  and does not apply any maximum segment size during
- *  optimize (unlike {@link LogByteSizeMergePolicy}).
+ *  forceMerge (unlike {@link LogByteSizeMergePolicy}).
  *
  *  @lucene.experimental
  */
@@ -88,7 +88,7 @@ public class TieredMergePolicy extends M
 
   /** Maximum number of segments to be merged at a time
    *  during "normal" merging.  For explicit merging (eg,
-   *  optimize or expungeDeletes was called), see {@link
+   *  forceMerge or expungeDeletes was called), see {@link
    *  #setMaxMergeAtOnceExplicit}.  Default is 10. */
   public TieredMergePolicy setMaxMergeAtOnce(int v) {
     if (v < 2) {
@@ -107,7 +107,7 @@ public class TieredMergePolicy extends M
   // if user calls IW.maybeMerge "explicitly"
 
   /** Maximum number of segments to be merged at a time,
-   *  during optimize or expungeDeletes. Default is 30. */
+   *  during forceMerge or expungeDeletes. Default is 30. */
   public TieredMergePolicy setMaxMergeAtOnceExplicit(int v) {
     if (v < 2) {
       throw new IllegalArgumentException("maxMergeAtOnceExplicit must be > 1 (got " + v + ")");
@@ -478,23 +478,23 @@ public class TieredMergePolicy extends M
   }
 
   @Override
-  public MergeSpecification findMergesForOptimize(SegmentInfos infos, int maxSegmentCount, Map<SegmentInfo,Boolean> segmentsToOptimize) throws IOException {
+  public MergeSpecification findForcedMerges(SegmentInfos infos, int maxSegmentCount, Map<SegmentInfo,Boolean> segmentsToMerge) throws IOException {
     if (verbose()) {
-      message("findMergesForOptimize maxSegmentCount=" + maxSegmentCount + " infos=" + writer.get().segString(infos) + " segmentsToOptimize=" + segmentsToOptimize);
+      message("findForcedMerges maxSegmentCount=" + maxSegmentCount + " infos=" + writer.get().segString(infos) + " segmentsToMerge=" + segmentsToMerge);
     }
 
     List<SegmentInfo> eligible = new ArrayList<SegmentInfo>();
-    boolean optimizeMergeRunning = false;
+    boolean forceMergeRunning = false;
     final Collection<SegmentInfo> merging = writer.get().getMergingSegments();
     boolean segmentIsOriginal = false;
     for(SegmentInfo info : infos) {
-      final Boolean isOriginal = segmentsToOptimize.get(info);
+      final Boolean isOriginal = segmentsToMerge.get(info);
       if (isOriginal != null) {
         segmentIsOriginal = isOriginal;
         if (!merging.contains(info)) {
           eligible.add(info);
         } else {
-          optimizeMergeRunning = true;
+          forceMergeRunning = true;
         }
       }
     }
@@ -504,9 +504,9 @@ public class TieredMergePolicy extends M
     }
 
     if ((maxSegmentCount > 1 && eligible.size() <= maxSegmentCount) ||
-        (maxSegmentCount == 1 && eligible.size() == 1 && (!segmentIsOriginal || isOptimized(eligible.get(0))))) {
+        (maxSegmentCount == 1 && eligible.size() == 1 && (!segmentIsOriginal || isMerged(eligible.get(0))))) {
       if (verbose()) {
-        message("already optimized");
+        message("already merged");
       }
       return null;
     }
@@ -515,7 +515,7 @@ public class TieredMergePolicy extends M
 
     if (verbose()) {
       message("eligible=" + eligible);
-      message("optimizeMergeRunning=" + optimizeMergeRunning);
+      message("forceMergeRunning=" + forceMergeRunning);
     }
 
     int end = eligible.size();
@@ -535,7 +535,7 @@ public class TieredMergePolicy extends M
       end -= maxMergeAtOnceExplicit;
     }
 
-    if (spec == null && !optimizeMergeRunning) {
+    if (spec == null && !forceMergeRunning) {
       // Do final merge
       final int numToMerge = end - maxSegmentCount + 1;
       final OneMerge merge = new OneMerge(eligible.subList(end-numToMerge, end));
@@ -580,7 +580,7 @@ public class TieredMergePolicy extends M
     while(start < eligible.size()) {
       // Don't enforce max merged size here: app is explicitly
       // calling expungeDeletes, and knows this may take a
-      // long time / produce big segments (like optimize):
+      // long time / produce big segments (like forceMerge):
       final int end = Math.min(start + maxMergeAtOnceExplicit, eligible.size());
       if (spec == null) {
         spec = new MergeSpecification();
@@ -619,7 +619,7 @@ public class TieredMergePolicy extends M
   public void close() {
   }
 
-  private boolean isOptimized(SegmentInfo info)
+  private boolean isMerged(SegmentInfo info)
     throws IOException {
     IndexWriter w = writer.get();
     assert w != null;

Modified: lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/UpgradeIndexMergePolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/UpgradeIndexMergePolicy.java?rev=1201053&r1=1201052&r2=1201053&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/UpgradeIndexMergePolicy.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/UpgradeIndexMergePolicy.java Fri Nov 11 21:13:51 2011
@@ -27,24 +27,24 @@ import java.util.Map;
 import java.util.HashMap;
 
 /** This {@link MergePolicy} is used for upgrading all existing segments of
-  * an index when calling {@link IndexWriter#optimize()}.
+  * an index when calling {@link IndexWriter#forceMerge(int)}.
   * All other methods delegate to the base {@code MergePolicy} given to the constructor.
   * This allows for an as-cheap-as possible upgrade of an older index by only upgrading segments that
-  * are created by previous Lucene versions. Optimize does no longer really optimize
-  * it is just used to &quot;optimize&quot; older segment versions away.
+  * are created by previous Lucene versions. forceMerge does no longer really merge;
+  * it is just used to &quot;forceMerge&quot; older segment versions away.
   * <p>In general one would use {@link IndexUpgrader}, but for a fully customizeable upgrade,
-  * you can use this like any other {@code MergePolicy} and call {@link IndexWriter#optimize()}:
+  * you can use this like any other {@code MergePolicy} and call {@link IndexWriter#forceMerge(int)}:
   * <pre class="prettyprint lang-java">
   *  IndexWriterConfig iwc = new IndexWriterConfig(Version.LUCENE_XX, new KeywordAnalyzer());
   *  iwc.setMergePolicy(new UpgradeIndexMergePolicy(iwc.getMergePolicy()));
   *  IndexWriter w = new IndexWriter(dir, iwc);
-  *  w.optimize();
+  *  w.forceMerge(1);
   *  w.close();
   * </pre>
   * <p><b>Warning:</b> This merge policy may reorder documents if the index was partially
-  * upgraded before calling optimize (e.g., documents were added). If your application relies
+  * upgraded before calling forceMerge (e.g., documents were added). If your application relies
   * on &quot;monotonicity&quot; of doc IDs (which means that the order in which the documents
-  * were added to the index is preserved), do a full optimize instead. Please note, the
+  * were added to the index is preserved), do a forceMerge(1) instead. Please note, the
   * delegate {@code MergePolicy} may also reorder documents.
   * @lucene.experimental
   * @see IndexUpgrader
@@ -53,7 +53,7 @@ public class UpgradeIndexMergePolicy ext
 
   protected final MergePolicy base;
 
-  /** Wrap the given {@link MergePolicy} and intercept optimize requests to
+  /** Wrap the given {@link MergePolicy} and intercept forceMerge requests to
    * only upgrade segments written with previous Lucene versions. */
   public UpgradeIndexMergePolicy(MergePolicy base) {
     this.base = base;
@@ -80,22 +80,22 @@ public class UpgradeIndexMergePolicy ext
   }
   
   @Override
-  public MergeSpecification findMergesForOptimize(SegmentInfos segmentInfos, int maxSegmentCount, Map<SegmentInfo,Boolean> segmentsToOptimize) throws CorruptIndexException, IOException {
+  public MergeSpecification findForcedMerges(SegmentInfos segmentInfos, int maxSegmentCount, Map<SegmentInfo,Boolean> segmentsToMerge) throws CorruptIndexException, IOException {
     // first find all old segments
     final Map<SegmentInfo,Boolean> oldSegments = new HashMap<SegmentInfo,Boolean>();
     for (final SegmentInfo si : segmentInfos) {
-      final Boolean v =segmentsToOptimize.get(si);
+      final Boolean v = segmentsToMerge.get(si);
       if (v != null && shouldUpgradeSegment(si)) {
         oldSegments.put(si, v);
       }
     }
     
-    if (verbose()) message("findMergesForOptimize: segmentsToUpgrade=" + oldSegments);
+    if (verbose()) message("findForcedMerges: segmentsToUpgrade=" + oldSegments);
       
     if (oldSegments.isEmpty())
       return null;
 
-    MergeSpecification spec = base.findMergesForOptimize(segmentInfos, maxSegmentCount, oldSegments);
+    MergeSpecification spec = base.findForcedMerges(segmentInfos, maxSegmentCount, oldSegments);
     
     if (spec != null) {
       // remove all segments that are in merge specification from oldSegments,
@@ -108,7 +108,7 @@ public class UpgradeIndexMergePolicy ext
 
     if (!oldSegments.isEmpty()) {
       if (verbose())
-        message("findMergesForOptimize: " +  base.getClass().getSimpleName() +
+        message("findForcedMerges: " +  base.getClass().getSimpleName() +
         " does not want to merge all old segments, merge remaining ones into new segment: " + oldSegments);
       final List<SegmentInfo> newInfos = new ArrayList<SegmentInfo>();
       for (final SegmentInfo si : segmentInfos) {

Modified: lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/search/MultiTermQuery.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/search/MultiTermQuery.java?rev=1201053&r1=1201052&r2=1201053&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/search/MultiTermQuery.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/search/MultiTermQuery.java Fri Nov 11 21:13:51 2011
@@ -273,14 +273,14 @@ public abstract class MultiTermQuery ext
   /**
    * Expert: Return the number of unique terms visited during execution of the query.
    * If there are many of them, you may consider using another query type
-   * or optimize your total term count in index.
+   * or reduce your total term count in index.
    * <p>This method is not thread safe, be sure to only call it when no query is running!
    * If you re-use the same query instance for another
    * search, be sure to first reset the term counter
    * with {@link #clearTotalNumberOfTerms}.
-   * <p>On optimized indexes / no MultiReaders, you get the correct number of
+   * <p>On single-segment indexes / no MultiReaders, you get the correct number of
    * unique terms for the whole index. Use this number to compare different queries.
-   * For non-optimized indexes this number can also be achieved in
+   * For multi-segment indexes this number can also be achieved in
    * non-constant-score mode. In constant-score mode you get the total number of
    * terms seeked for all segments / sub-readers.
    * @see #clearTotalNumberOfTerms

Modified: lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/store/FlushInfo.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/store/FlushInfo.java?rev=1201053&r1=1201052&r2=1201053&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/store/FlushInfo.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/store/FlushInfo.java Fri Nov 11 21:13:51 2011
@@ -18,7 +18,7 @@ package org.apache.lucene.store;
  */
 
 /**
- * <p>A FlushInfo provides information required for a FLUSH context and other optimization operations.
+ * <p>A FlushInfo provides information required for a FLUSH context.
  *  It is used as part of an {@link IOContext} in case of FLUSH context.</p>
  */
 

Modified: lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/store/MergeInfo.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/store/MergeInfo.java?rev=1201053&r1=1201052&r2=1201053&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/store/MergeInfo.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/store/MergeInfo.java Fri Nov 11 21:13:51 2011
@@ -17,7 +17,7 @@ package org.apache.lucene.store;
  */
 
 /**
- * <p>A MergeInfo provides information required for a MERGE context and other optimization operations.
+ * <p>A MergeInfo provides information required for a MERGE context.
  *  It is used as part of an {@link IOContext} in case of MERGE context.</p>
  */
 
@@ -29,7 +29,7 @@ public class MergeInfo {
   
   public final boolean isExternal;
   
-  public final boolean optimize;
+  public final int mergeMaxNumSegments;
   
 
   /**
@@ -40,11 +40,11 @@ public class MergeInfo {
    * 
    */
 
-  public MergeInfo(int totalDocCount, long estimatedMergeBytes, boolean isExternal, boolean optimize) {
+  public MergeInfo(int totalDocCount, long estimatedMergeBytes, boolean isExternal, int mergeMaxNumSegments) {
     this.totalDocCount = totalDocCount;
     this.estimatedMergeBytes = estimatedMergeBytes;
     this.isExternal = isExternal;
-    this.optimize = optimize;
+    this.mergeMaxNumSegments = mergeMaxNumSegments;
   }
 
 
@@ -55,7 +55,7 @@ public class MergeInfo {
     result = prime * result
         + (int) (estimatedMergeBytes ^ (estimatedMergeBytes >>> 32));
     result = prime * result + (isExternal ? 1231 : 1237);
-    result = prime * result + (optimize ? 1231 : 1237);
+    result = prime * result + mergeMaxNumSegments;
     result = prime * result + totalDocCount;
     return result;
   }
@@ -73,7 +73,7 @@ public class MergeInfo {
       return false;
     if (isExternal != other.isExternal)
       return false;
-    if (optimize != other.optimize)
+    if (mergeMaxNumSegments != other.mergeMaxNumSegments)
       return false;
     if (totalDocCount != other.totalDocCount)
       return false;
@@ -84,6 +84,6 @@ public class MergeInfo {
   public String toString() {
     return "MergeInfo [totalDocCount=" + totalDocCount
         + ", estimatedMergeBytes=" + estimatedMergeBytes + ", isExternal="
-        + isExternal + ", optimize=" + optimize + "]";
+        + isExternal + ", mergeMaxNumSegments=" + mergeMaxNumSegments + "]";
   }
-}
\ No newline at end of file
+}

Modified: lucene/dev/branches/lucene2621/lucene/src/test-framework/java/org/apache/lucene/analysis/CollationTestBase.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/test-framework/java/org/apache/lucene/analysis/CollationTestBase.java?rev=1201053&r1=1201052&r2=1201053&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/test-framework/java/org/apache/lucene/analysis/CollationTestBase.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/test-framework/java/org/apache/lucene/analysis/CollationTestBase.java Fri Nov 11 21:13:51 2011
@@ -216,7 +216,7 @@ public abstract class CollationTestBase 
         doc.add(new TextField("Denmark", denmarkAnalyzer.tokenStream("Denmark", new StringReader(sortData[i][5]))));
       writer.addDocument(doc);
     }
-    writer.optimize();
+    writer.forceMerge(1);
     writer.close();
     IndexSearcher searcher = new IndexSearcher(indexStore, true);
 

Modified: lucene/dev/branches/lucene2621/lucene/src/test-framework/java/org/apache/lucene/index/MockRandomMergePolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/test-framework/java/org/apache/lucene/index/MockRandomMergePolicy.java?rev=1201053&r1=1201052&r2=1201053&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/test-framework/java/org/apache/lucene/index/MockRandomMergePolicy.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/test-framework/java/org/apache/lucene/index/MockRandomMergePolicy.java Fri Nov 11 21:13:51 2011
@@ -55,18 +55,18 @@ public class MockRandomMergePolicy exten
   }
 
   @Override
-  public MergeSpecification findMergesForOptimize(
-       SegmentInfos segmentInfos, int maxSegmentCount, Map<SegmentInfo,Boolean> segmentsToOptimize)
+  public MergeSpecification findForcedMerges(
+       SegmentInfos segmentInfos, int maxSegmentCount, Map<SegmentInfo,Boolean> segmentsToMerge)
     throws CorruptIndexException, IOException {
 
     final List<SegmentInfo> eligibleSegments = new ArrayList<SegmentInfo>();
     for(SegmentInfo info : segmentInfos) {
-      if (segmentsToOptimize.containsKey(info)) {
+      if (segmentsToMerge.containsKey(info)) {
         eligibleSegments.add(info);
       }
     }
 
-    //System.out.println("MRMP: findMergesForOptimize sis=" + segmentInfos + " eligible=" + eligibleSegments);
+    //System.out.println("MRMP: findMerges sis=" + segmentInfos + " eligible=" + eligibleSegments);
     MergeSpecification mergeSpec = null;
     if (eligibleSegments.size() > 1 || (eligibleSegments.size() == 1 && eligibleSegments.get(0).hasDeletions())) {
       mergeSpec = new MergeSpecification();
@@ -85,7 +85,7 @@ public class MockRandomMergePolicy exten
     if (mergeSpec != null) {
       for(OneMerge merge : mergeSpec.merges) {
         for(SegmentInfo info : merge.segments) {
-          assert segmentsToOptimize.containsKey(info);
+          assert segmentsToMerge.containsKey(info);
         }
       }
     }

Modified: lucene/dev/branches/lucene2621/lucene/src/test-framework/java/org/apache/lucene/index/RandomIndexWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/test-framework/java/org/apache/lucene/index/RandomIndexWriter.java?rev=1201053&r1=1201052&r2=1201053&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/test-framework/java/org/apache/lucene/index/RandomIndexWriter.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/test-framework/java/org/apache/lucene/index/RandomIndexWriter.java Fri Nov 11 21:13:51 2011
@@ -38,7 +38,7 @@ import org.apache.lucene.util._TestUtil;
 
 /** Silly class that randomizes the indexing experience.  EG
  *  it may swap in a different merge policy/scheduler; may
- *  commit periodically; may or may not optimize in the end,
+ *  commit periodically; may or may not forceMerge in the end,
  *  may flush by doc count instead of RAM, etc. 
  */
 
@@ -323,8 +323,8 @@ public class RandomIndexWriter implement
     return getReader(true);
   }
 
-  private boolean doRandomOptimize = true;
-  private boolean doRandomOptimizeAssert = true;
+  private boolean doRandomForceMerge = true;
+  private boolean doRandomForceMergeAssert = true;
 
   public void expungeDeletes(boolean doWait) throws IOException {
     w.expungeDeletes(doWait);
@@ -334,25 +334,25 @@ public class RandomIndexWriter implement
     w.expungeDeletes();
   }
 
-  public void setDoRandomOptimize(boolean v) {
-    doRandomOptimize = v;
+  public void setDoRandomForceMerge(boolean v) {
+    doRandomForceMerge = v;
   }
 
-  public void setDoRandomOptimizeAssert(boolean v) {
-    doRandomOptimizeAssert = v;
+  public void setDoRandomForceMergeAssert(boolean v) {
+    doRandomForceMergeAssert = v;
   }
 
-  private void doRandomOptimize() throws IOException {
-    if (doRandomOptimize) {
+  private void doRandomForceMerge() throws IOException {
+    if (doRandomForceMerge) {
       final int segCount = w.getSegmentCount();
       if (r.nextBoolean() || segCount == 0) {
-        // full optimize
-        w.optimize();
+        // full forceMerge
+        w.forceMerge(1);
       } else {
-        // partial optimize
+        // partial forceMerge
         final int limit = _TestUtil.nextInt(r, 1, segCount);
-        w.optimize(limit);
-        assert !doRandomOptimizeAssert || w.getSegmentCount() <= limit: "limit=" + limit + " actual=" + w.getSegmentCount();
+        w.forceMerge(limit);
+        assert !doRandomForceMergeAssert || w.getSegmentCount() <= limit: "limit=" + limit + " actual=" + w.getSegmentCount();
       }
     }
     switchDoDocValues();
@@ -361,7 +361,7 @@ public class RandomIndexWriter implement
   public IndexReader getReader(boolean applyDeletions) throws IOException {
     getReaderCalled = true;
     if (r.nextInt(4) == 2) {
-      doRandomOptimize();
+      doRandomForceMerge();
     }
     // If we are writing with PreFlexRW, force a full
     // IndexReader.open so terms are sorted in codepoint
@@ -394,21 +394,21 @@ public class RandomIndexWriter implement
    */
   public void close() throws IOException {
     // if someone isn't using getReader() API, we want to be sure to
-    // maybeOptimize since presumably they might open a reader on the dir.
+    // forceMerge since presumably they might open a reader on the dir.
     if (getReaderCalled == false && r.nextInt(8) == 2) {
-      doRandomOptimize();
+      doRandomForceMerge();
     }
     w.close();
   }
 
   /**
-   * Forces an optimize.
+   * Forces a forceMerge.
    * <p>
    * NOTE: this should be avoided in tests unless absolutely necessary,
    * as it will result in less test coverage.
-   * @see IndexWriter#optimize()
+   * @see IndexWriter#forceMerge(int)
    */
-  public void optimize() throws IOException {
-    w.optimize();
+  public void forceMerge(int maxSegmentCount) throws IOException {
+    w.forceMerge(maxSegmentCount);
   }
 }

Modified: lucene/dev/branches/lucene2621/lucene/src/test-framework/java/org/apache/lucene/index/ThreadedIndexingAndSearchingTestCase.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/test-framework/java/org/apache/lucene/index/ThreadedIndexingAndSearchingTestCase.java?rev=1201053&r1=1201052&r2=1201053&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/test-framework/java/org/apache/lucene/index/ThreadedIndexingAndSearchingTestCase.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/test-framework/java/org/apache/lucene/index/ThreadedIndexingAndSearchingTestCase.java Fri Nov 11 21:13:51 2011
@@ -54,7 +54,7 @@ import org.apache.lucene.util.NamedThrea
 import org.apache.lucene.util._TestUtil;
 
 // TODO
-//   - mix in optimize, addIndexes
+//   - mix in forceMerge, addIndexes
 //   - randomly mix in non-congruent docs
 
 /** Utility class that spawns multiple indexing and

Modified: lucene/dev/branches/lucene2621/lucene/src/test-framework/java/org/apache/lucene/util/LuceneTestCase.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/test-framework/java/org/apache/lucene/util/LuceneTestCase.java?rev=1201053&r1=1201052&r2=1201053&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/test-framework/java/org/apache/lucene/util/LuceneTestCase.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/test-framework/java/org/apache/lucene/util/LuceneTestCase.java Fri Nov 11 21:13:51 2011
@@ -32,7 +32,6 @@ import java.util.Map.Entry;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
-import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
 import org.apache.lucene.analysis.Analyzer;
@@ -41,17 +40,8 @@ import org.apache.lucene.document.FieldT
 import org.apache.lucene.index.*;
 import org.apache.lucene.index.codecs.Codec;
 import org.apache.lucene.index.codecs.PostingsFormat;
-import org.apache.lucene.index.codecs.lucene3x.Lucene3xCodec;
-import org.apache.lucene.index.codecs.lucene3x.Lucene3xPostingsFormat;
 import org.apache.lucene.index.codecs.lucene40.Lucene40Codec;
-import org.apache.lucene.index.codecs.mockintblock.MockFixedIntBlockPostingsFormat;
-import org.apache.lucene.index.codecs.mockintblock.MockVariableIntBlockPostingsFormat;
-import org.apache.lucene.index.codecs.mocksep.MockSepPostingsFormat;
-import org.apache.lucene.index.codecs.mockrandom.MockRandomPostingsFormat;
-import org.apache.lucene.index.codecs.perfield.PerFieldPostingsFormat;
 import org.apache.lucene.index.codecs.preflexrw.PreFlexRWCodec;
-import org.apache.lucene.index.codecs.preflexrw.PreFlexRWPostingsFormat;
-import org.apache.lucene.index.codecs.pulsing.PulsingPostingsFormat;
 import org.apache.lucene.index.codecs.simpletext.SimpleTextCodec;
 import org.apache.lucene.search.BooleanQuery;
 import org.apache.lucene.search.FieldCache;
@@ -1329,7 +1319,7 @@ public abstract class LuceneTestCase ext
       context = IOContext.READONCE;
       break;
     case 3:
-      context = new IOContext(new MergeInfo(randomNumDocs, size, true, false));
+      context = new IOContext(new MergeInfo(randomNumDocs, size, true, -1));
       break;
     case 4:
       context = new IOContext(new FlushInfo(randomNumDocs, size));

Modified: lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/TestExternalCodecs.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/TestExternalCodecs.java?rev=1201053&r1=1201052&r2=1201053&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/TestExternalCodecs.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/TestExternalCodecs.java Fri Nov 11 21:13:51 2011
@@ -25,11 +25,9 @@ import org.apache.lucene.document.*;
 import org.apache.lucene.index.*;
 import org.apache.lucene.index.codecs.*;
 import org.apache.lucene.index.codecs.lucene40.Lucene40Codec;
-import org.apache.lucene.index.codecs.perfield.PerFieldPostingsFormat;
 import org.apache.lucene.search.*;
 import org.apache.lucene.store.*;
 import org.apache.lucene.util.*;
-import org.apache.lucene.util.Bits;
 
 /* Intentionally outside of oal.index to verify fully
    external codecs work fine */
@@ -104,7 +102,7 @@ public class TestExternalCodecs extends 
       System.out.println("\nTEST: now delete 2nd doc");
     }
     w.deleteDocuments(new Term("id", "44"));
-    w.optimize();
+    w.forceMerge(1);
     r = IndexReader.open(w, true);
     assertEquals(NUM_DOCS-2, r.maxDoc());
     assertEquals(NUM_DOCS-2, r.numDocs());

Modified: lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/TestMergeSchedulerExternal.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/TestMergeSchedulerExternal.java?rev=1201053&r1=1201052&r2=1201053&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/TestMergeSchedulerExternal.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/TestMergeSchedulerExternal.java Fri Nov 11 21:13:51 2011
@@ -142,7 +142,7 @@ public class TestMergeSchedulerExternal 
     writer.commit(); // trigger flush
     writer.addDocument(new Document());
     writer.commit(); // trigger flush
-    writer.optimize();
+    writer.forceMerge(1);
     writer.close();
     dir.close();
   }

Modified: lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/Test2BPostings.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/Test2BPostings.java?rev=1201053&r1=1201052&r2=1201053&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/Test2BPostings.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/Test2BPostings.java Fri Nov 11 21:13:51 2011
@@ -73,7 +73,7 @@ public class Test2BPostings extends Luce
         System.out.println(i + " of " + numDocs + "...");
       }
     }
-    w.optimize();
+    w.forceMerge(1);
     w.close();
     CheckIndex ci = new CheckIndex(dir);
     if (VERBOSE) {

Modified: lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/Test2BTerms.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/Test2BTerms.java?rev=1201053&r1=1201052&r2=1201053&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/Test2BTerms.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/Test2BTerms.java Fri Nov 11 21:13:51 2011
@@ -195,8 +195,8 @@ public class Test2BTerms extends LuceneT
       }
       savedTerms = ts.savedTerms;
 
-      System.out.println("TEST: optimize");
-      w.optimize();
+      System.out.println("TEST: full merge");
+      w.forceMerge(1);
       System.out.println("TEST: close writer");
       w.close();
     }

Modified: lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestAddIndexes.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestAddIndexes.java?rev=1201053&r1=1201052&r2=1201053&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestAddIndexes.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestAddIndexes.java Fri Nov 11 21:13:51 2011
@@ -41,13 +41,7 @@ import org.apache.lucene.index.codecs.Po
 import org.apache.lucene.index.codecs.SegmentInfosFormat;
 import org.apache.lucene.index.codecs.TermVectorsFormat;
 import org.apache.lucene.index.codecs.lucene40.Lucene40Codec;
-import org.apache.lucene.index.codecs.lucene40.Lucene40PostingsBaseFormat;
-import org.apache.lucene.index.codecs.lucene40.Lucene40PostingsFormat;
-import org.apache.lucene.index.codecs.mocksep.MockSepPostingsFormat;
-import org.apache.lucene.index.codecs.perfield.PerFieldPostingsFormat;
 import org.apache.lucene.index.codecs.pulsing.Pulsing40PostingsFormat;
-import org.apache.lucene.index.codecs.pulsing.PulsingPostingsFormat;
-import org.apache.lucene.index.codecs.simpletext.SimpleTextPostingsFormat;
 import org.apache.lucene.search.DocIdSetIterator;
 import org.apache.lucene.search.PhraseQuery;
 import org.apache.lucene.store.AlreadyClosedException;
@@ -116,7 +110,7 @@ public class TestAddIndexes extends Luce
     assertEquals(40, writer.maxDoc());
     writer.close();
 
-    // test doc count before segments are merged/index is optimized
+    // test doc count before segments are merged
     writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
     assertEquals(190, writer.maxDoc());
     writer.addIndexes(aux3);
@@ -130,9 +124,9 @@ public class TestAddIndexes extends Luce
 
     verifyTermDocs(dir, new Term("content", "bbb"), 50);
 
-    // now optimize it.
+    // now fully merge it.
     writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
-    writer.optimize();
+    writer.forceMerge(1);
     writer.close();
 
     // make sure the new index is correct
@@ -188,7 +182,7 @@ public class TestAddIndexes extends Luce
     q.add(new Term("content", "14"));
     writer.deleteDocuments(q);
 
-    writer.optimize();
+    writer.forceMerge(1);
     writer.commit();
 
     verifyNumDocs(dir, 1039);
@@ -226,7 +220,7 @@ public class TestAddIndexes extends Luce
     q.add(new Term("content", "14"));
     writer.deleteDocuments(q);
 
-    writer.optimize();
+    writer.forceMerge(1);
     writer.commit();
 
     verifyNumDocs(dir, 1039);
@@ -264,7 +258,7 @@ public class TestAddIndexes extends Luce
 
     writer.addIndexes(aux);
 
-    writer.optimize();
+    writer.forceMerge(1);
     writer.commit();
 
     verifyNumDocs(dir, 1039);
@@ -731,10 +725,10 @@ public class TestAddIndexes extends Luce
       switch(j%5) {
       case 0:
         if (VERBOSE) {
-          System.out.println(Thread.currentThread().getName() + ": TEST: addIndexes(Dir[]) then optimize");
+          System.out.println(Thread.currentThread().getName() + ": TEST: addIndexes(Dir[]) then full merge");
         }
         writer2.addIndexes(dirs);
-        writer2.optimize();
+        writer2.forceMerge(1);
         break;
       case 1:
         if (VERBOSE) {
@@ -836,10 +830,10 @@ public class TestAddIndexes extends Luce
       switch(j%5) {
       case 0:
         if (VERBOSE) {
-          System.out.println("TEST: " + Thread.currentThread().getName() + ": addIndexes + optimize");
+          System.out.println("TEST: " + Thread.currentThread().getName() + ": addIndexes + full merge");
         }
         writer2.addIndexes(dirs);
-        writer2.optimize();
+        writer2.forceMerge(1);
         break;
       case 1:
         if (VERBOSE) {
@@ -855,9 +849,9 @@ public class TestAddIndexes extends Luce
         break;
       case 3:
         if (VERBOSE) {
-          System.out.println("TEST: " + Thread.currentThread().getName() + ": optimize");
+          System.out.println("TEST: " + Thread.currentThread().getName() + ": full merge");
         }
-        writer2.optimize();
+        writer2.forceMerge(1);
         break;
       case 4:
         if (VERBOSE) {
@@ -1221,7 +1215,7 @@ public class TestAddIndexes extends Luce
     }
 
     try {
-      IndexReader indexReader = IndexReader.open(toAdd);
+      IndexReader.open(toAdd);
       fail("no such codec");
     } catch (IllegalArgumentException ex) {
       // expected

Modified: lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java?rev=1201053&r1=1201052&r2=1201053&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java Fri Nov 11 21:13:51 2011
@@ -79,16 +79,16 @@ public class TestBackwardsCompatibility 
   
 /*
   // These are only needed for the special upgrade test to verify
-  // that also optimized indexes are correctly upgraded by IndexUpgrader.
+  // that also single-segment indexes are correctly upgraded by IndexUpgrader.
   // You don't need them to be build for non-3.1 (the test is happy with just one
   // "old" segment format, version is unimportant:
   
-  public void testCreateOptimizedCFS() throws IOException {
-    createIndex("index.optimized.cfs", true, true);
+  public void testCreateSingleSegmentCFS() throws IOException {
+    createIndex("index.singlesegment.cfs", true, true);
   }
 
-  public void testCreateOptimizedNoCFS() throws IOException {
-    createIndex("index.optimized.nocfs", false, true);
+  public void testCreateSingleSegmentNoCFS() throws IOException {
+    createIndex("index.singlesegment.nocfs", false, true);
   }
 
 */  
@@ -118,8 +118,8 @@ public class TestBackwardsCompatibility 
                                      "29.nocfs",
   };
   
-  final String[] oldOptimizedNames = {"31.optimized.cfs",
-                                      "31.optimized.nocfs",
+  final String[] oldSingleSegmentNames = {"31.optimized.cfs",
+                                          "31.optimized.nocfs",
   };
   
   /** This test checks that *only* IndexFormatTooOldExceptions are thrown when you open and operate on too old indexes! */
@@ -180,7 +180,7 @@ public class TestBackwardsCompatibility 
     }
   }
   
-  public void testOptimizeOldIndex() throws Exception {
+  public void testFullyMergeOldIndex() throws Exception {
     for(int i=0;i<oldNames.length;i++) {
       if (VERBOSE) {
         System.out.println("\nTEST: index=" + oldNames[i]);
@@ -191,7 +191,7 @@ public class TestBackwardsCompatibility 
 
       IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(
           TEST_VERSION_CURRENT, new MockAnalyzer(random)));
-      w.optimize();
+      w.forceMerge(1);
       w.close();
       
       dir.close();
@@ -393,9 +393,9 @@ public class TestBackwardsCompatibility 
     doTestHits(hits, 43, searcher.getIndexReader());
     searcher.close();
 
-    // optimize
+    // fully merge
     writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
-    writer.optimize();
+    writer.forceMerge(1);
     writer.close();
 
     searcher = new IndexSearcher(dir, true);
@@ -439,9 +439,9 @@ public class TestBackwardsCompatibility 
     doTestHits(hits, 33, searcher.getIndexReader());
     searcher.close();
 
-    // optimize
+    // fully merge
     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
-    writer.optimize();
+    writer.forceMerge(1);
     writer.close();
 
     searcher = new IndexSearcher(dir, true);
@@ -455,7 +455,7 @@ public class TestBackwardsCompatibility 
     dir.close();
   }
 
-  public File createIndex(String dirName, boolean doCFS, boolean optimized) throws IOException {
+  public File createIndex(String dirName, boolean doCFS, boolean fullyMerged) throws IOException {
     // we use a real directory name that is not cleaned up, because this method is only used to create backwards indexes:
     File indexDir = new File(LuceneTestCase.TEMP_DIR, dirName);
     _TestUtil.rmDir(indexDir);
@@ -472,12 +472,12 @@ public class TestBackwardsCompatibility 
       addDoc(writer, i);
     }
     assertEquals("wrong doc count", 35, writer.maxDoc());
-    if (optimized) {
-      writer.optimize();
+    if (fullyMerged) {
+      writer.forceMerge(1);
     }
     writer.close();
 
-    if (!optimized) {
+    if (!fullyMerged) {
       // open fresh writer so we get no prx file in the added segment
       mp = new LogByteSizeMergePolicy();
       mp.setUseCompoundFile(doCFS);
@@ -743,9 +743,9 @@ public class TestBackwardsCompatibility 
   }
 
   public void testUpgradeOldIndex() throws Exception {
-    List<String> names = new ArrayList<String>(oldNames.length + oldOptimizedNames.length);
+    List<String> names = new ArrayList<String>(oldNames.length + oldSingleSegmentNames.length);
     names.addAll(Arrays.asList(oldNames));
-    names.addAll(Arrays.asList(oldOptimizedNames));
+    names.addAll(Arrays.asList(oldSingleSegmentNames));
     for(String name : names) {
       if (VERBOSE) {
         System.out.println("testUpgradeOldIndex: index=" +name);
@@ -764,16 +764,16 @@ public class TestBackwardsCompatibility 
     }
   }
 
-  public void testUpgradeOldOptimizedIndexWithAdditions() throws Exception {
-    for (String name : oldOptimizedNames) {
+  public void testUpgradeOldSingleSegmentIndexWithAdditions() throws Exception {
+    for (String name : oldSingleSegmentNames) {
       if (VERBOSE) {
-        System.out.println("testUpgradeOldOptimizedIndexWithAdditions: index=" +name);
+        System.out.println("testUpgradeOldSingleSegmentIndexWithAdditions: index=" +name);
       }
       File oldIndxeDir = _TestUtil.getTempDir(name);
       _TestUtil.unzip(getDataFile("index." + name + ".zip"), oldIndxeDir);
       Directory dir = newFSDirectory(oldIndxeDir);
 
-      assertEquals("Original index must be optimized", 1, getNumberOfSegments(dir));
+      assertEquals("Original index must be single segment", 1, getNumberOfSegments(dir));
 
       // create a bunch of dummy segments
       int id = 40;
@@ -791,7 +791,8 @@ public class TestBackwardsCompatibility 
         w.close(false);
       }
       
-      // add dummy segments (which are all in current version) to optimized index
+      // add dummy segments (which are all in current
+      // version) to single segment index
       MergePolicy mp = random.nextBoolean() ? newLogMergePolicy() : newTieredMergePolicy();
       IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, null)
         .setMergePolicy(mp);

Modified: lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestCheckIndex.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestCheckIndex.java?rev=1201053&r1=1201052&r2=1201053&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestCheckIndex.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestCheckIndex.java Fri Nov 11 21:13:51 2011
@@ -45,7 +45,7 @@ public class TestCheckIndex extends Luce
     for(int i=0;i<19;i++) {
       writer.addDocument(doc);
     }
-    writer.optimize();
+    writer.forceMerge(1);
     writer.close();
     IndexReader reader = IndexReader.open(dir, false);
     reader.deleteDocument(5);

Modified: lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestCodecs.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestCodecs.java?rev=1201053&r1=1201052&r2=1201053&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestCodecs.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestCodecs.java Fri Nov 11 21:13:51 2011
@@ -33,7 +33,6 @@ import org.apache.lucene.index.codecs.Po
 import org.apache.lucene.index.codecs.TermStats;
 import org.apache.lucene.index.codecs.TermsConsumer;
 import org.apache.lucene.index.codecs.lucene3x.Lucene3xCodec;
-import org.apache.lucene.index.codecs.lucene3x.Lucene3xPostingsFormat;
 import org.apache.lucene.index.codecs.mocksep.MockSepPostingsFormat;
 import org.apache.lucene.search.DocIdSetIterator;
 import org.apache.lucene.search.IndexSearcher;
@@ -371,7 +370,7 @@ public class TestCodecs extends LuceneTe
       assertEquals(2, results.length);
       assertEquals(0, results[0].doc);
 
-      writer.optimize();
+      writer.forceMerge(1);
 
       // optimise to merge the segments.
       results = this.search(writer, pq, 5);

Modified: lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestConsistentFieldNumbers.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestConsistentFieldNumbers.java?rev=1201053&r1=1201052&r2=1201053&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestConsistentFieldNumbers.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestConsistentFieldNumbers.java Fri Nov 11 21:13:51 2011
@@ -77,7 +77,7 @@ public class TestConsistentFieldNumbers 
       assertEquals("f4", fis2.fieldInfo(3).name);
 
       writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
-      writer.optimize();
+      writer.forceMerge(1);
       writer.close();
 
       sis = new SegmentInfos();
@@ -141,7 +141,7 @@ public class TestConsistentFieldNumbers 
     assertEquals("f4", fis2.fieldInfo(3).name);
 
     writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
-    writer.optimize();
+    writer.forceMerge(1);
     writer.close();
 
     sis = new SegmentInfos();
@@ -252,7 +252,7 @@ public class TestConsistentFieldNumbers 
       IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
           TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(
           new LogByteSizeMergePolicy()).setInfoStream(new FailOnNonBulkMergesInfoStream()));
-      writer.optimize();
+      writer.forceMerge(1);
       writer.close();
 
       SegmentInfos sis = new SegmentInfos();
@@ -293,7 +293,7 @@ public class TestConsistentFieldNumbers 
       writer.addDocument(d);
     }
 
-    writer.optimize();
+    writer.forceMerge(1);
     writer.close();
 
     SegmentInfos sis = new SegmentInfos();

Modified: lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestDeletionPolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestDeletionPolicy.java?rev=1201053&r1=1201052&r2=1201053&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestDeletionPolicy.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestDeletionPolicy.java Fri Nov 11 21:13:51 2011
@@ -74,7 +74,7 @@ public class TestDeletionPolicy extends 
     public void onCommit(List<? extends IndexCommit> commits) throws IOException {
       IndexCommit lastCommit =  commits.get(commits.size()-1);
       IndexReader r = IndexReader.open(dir, true);
-      assertEquals("lastCommit.isOptimized()=" + lastCommit.isOptimized() + " vs IndexReader.isOptimized=" + r.isOptimized(), r.isOptimized(), lastCommit.isOptimized());
+      assertEquals("lastCommit.segmentCount()=" + lastCommit.getSegmentCount() + " vs IndexReader.segmentCount=" + r.getSequentialSubReaders().length, r.getSequentialSubReaders().length, lastCommit.getSegmentCount());
       r.close();
       verifyCommitOrder(commits);
       numOnCommit++;
@@ -317,13 +317,13 @@ public class TestDeletionPolicy extends 
       }
       writer.close();
 
-      final boolean isOptimized;
+      final boolean needsMerging;
       {
         IndexReader r = IndexReader.open(dir);
-        isOptimized = r.isOptimized();
+        needsMerging = r.getSequentialSubReaders().length != 1;
         r.close();
       }
-      if (!isOptimized) {
+      if (needsMerging) {
         conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
                                     new MockAnalyzer(random)).setOpenMode(
                                                                     OpenMode.APPEND).setIndexDeletionPolicy(policy);
@@ -332,22 +332,22 @@ public class TestDeletionPolicy extends 
           ((LogMergePolicy) mp).setUseCompoundFile(useCompoundFile);
         }
         if (VERBOSE) {
-          System.out.println("TEST: open writer for optimize");
+          System.out.println("TEST: open writer for forceMerge");
         }
         writer = new IndexWriter(dir, conf);
-        writer.optimize();
+        writer.forceMerge(1);
         writer.close();
       }
-      assertEquals(isOptimized ? 0:1, policy.numOnInit);
+      assertEquals(needsMerging ? 1:0, policy.numOnInit);
 
       // If we are not auto committing then there should
       // be exactly 2 commits (one per close above):
-      assertEquals(1 + (isOptimized ? 0:1), policy.numOnCommit);
+      assertEquals(1 + (needsMerging ? 1:0), policy.numOnCommit);
 
       // Test listCommits
       Collection<IndexCommit> commits = IndexReader.listCommits(dir);
       // 2 from closing writer
-      assertEquals(1 + (isOptimized ? 0:1), commits.size());
+      assertEquals(1 + (needsMerging ? 1:0), commits.size());
 
       // Make sure we can open a reader on each commit:
       for (final IndexCommit commit : commits) {
@@ -418,16 +418,16 @@ public class TestDeletionPolicy extends 
     }
     assertTrue(lastCommit != null);
 
-    // Now add 1 doc and optimize
+    // Now add 1 doc and merge
     writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexDeletionPolicy(policy));
     addDoc(writer);
     assertEquals(11, writer.numDocs());
-    writer.optimize();
+    writer.forceMerge(1);
     writer.close();
 
     assertEquals(6, IndexReader.listCommits(dir).size());
 
-    // Now open writer on the commit just before optimize:
+    // Now open writer on the commit just before merge:
     writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
         .setIndexDeletionPolicy(policy).setIndexCommit(lastCommit));
     assertEquals(10, writer.numDocs());
@@ -436,8 +436,8 @@ public class TestDeletionPolicy extends 
     writer.rollback();
 
     IndexReader r = IndexReader.open(dir, true);
-    // Still optimized, still 11 docs
-    assertTrue(r.isOptimized());
+    // Still merged, still 11 docs
+    assertEquals(1, r.getSequentialSubReaders().length);
     assertEquals(11, r.numDocs());
     r.close();
 
@@ -451,39 +451,39 @@ public class TestDeletionPolicy extends 
     assertEquals(7, IndexReader.listCommits(dir).size());
     
     r = IndexReader.open(dir, true);
-    // Not optimized because we rolled it back, and now only
+    // Not fully merged because we rolled it back, and now only
     // 10 docs
-    assertTrue(!r.isOptimized());
+    assertTrue(r.getSequentialSubReaders().length > 1);
     assertEquals(10, r.numDocs());
     r.close();
 
-    // Reoptimize
+    // Re-merge
     writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexDeletionPolicy(policy));
-    writer.optimize();
+    writer.forceMerge(1);
     writer.close();
 
     r = IndexReader.open(dir, true);
-    assertTrue(r.isOptimized());
+    assertEquals(1, r.getSequentialSubReaders().length);
     assertEquals(10, r.numDocs());
     r.close();
 
-    // Now open writer on the commit just before optimize,
+    // Now open writer on the commit just before merging,
     // but this time keeping only the last commit:
     writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexCommit(lastCommit));
     assertEquals(10, writer.numDocs());
     
-    // Reader still sees optimized index, because writer
+    // Reader still sees fully merged index, because writer
     // opened on the prior commit has not yet committed:
     r = IndexReader.open(dir, true);
-    assertTrue(r.isOptimized());
+    assertEquals(1, r.getSequentialSubReaders().length);
     assertEquals(10, r.numDocs());
     r.close();
 
     writer.close();
 
-    // Now reader sees unoptimized index:
+    // Now reader sees not-fully-merged index:
     r = IndexReader.open(dir, true);
-    assertTrue(!r.isOptimized());
+    assertTrue(r.getSequentialSubReaders().length > 1);
     assertEquals(10, r.numDocs());
     r.close();
 
@@ -525,7 +525,7 @@ public class TestDeletionPolicy extends 
         ((LogMergePolicy) mp).setUseCompoundFile(true);
       }
       writer = new IndexWriter(dir, conf);
-      writer.optimize();
+      writer.forceMerge(1);
       writer.close();
 
       assertEquals(1, policy.numOnInit);
@@ -569,7 +569,7 @@ public class TestDeletionPolicy extends 
         for(int i=0;i<17;i++) {
           addDoc(writer);
         }
-        writer.optimize();
+        writer.forceMerge(1);
         writer.close();
       }
 
@@ -673,15 +673,15 @@ public class TestDeletionPolicy extends 
         ((LogMergePolicy) mp).setUseCompoundFile(useCompoundFile);
       }
       IndexReader r = IndexReader.open(dir);
-      final boolean wasOptimized = r.isOptimized();
+      final boolean wasFullyMerged = r.getSequentialSubReaders().length == 1 && !r.hasDeletions();
       r.close();
       writer = new IndexWriter(dir, conf);
-      writer.optimize();
+      writer.forceMerge(1);
       // this is a commit
       writer.close();
 
       assertEquals(2*(N+1)+1, policy.numOnInit);
-      assertEquals(2*(N+2) - (wasOptimized ? 1:0), policy.numOnCommit);
+      assertEquals(2*(N+2) - (wasFullyMerged ? 1:0), policy.numOnCommit);
 
       IndexSearcher searcher = new IndexSearcher(dir, false);
       ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;

Modified: lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestDirectoryReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestDirectoryReader.java?rev=1201053&r1=1201052&r2=1201053&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestDirectoryReader.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestDirectoryReader.java Fri Nov 11 21:13:51 2011
@@ -184,7 +184,7 @@ public class TestDirectoryReader extends
     while (td.nextDoc() != td.NO_MORE_DOCS) ret += td.docID();
 
     // really a dummy assert to ensure that we got some docs and to ensure that
-    // nothing is optimized out.
+    // nothing is eliminated by hotspot
     assertTrue(ret > 0);
     readers1[0].close();
     readers1[1].close();

Modified: lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestDocCount.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestDocCount.java?rev=1201053&r1=1201052&r2=1201053&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestDocCount.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestDocCount.java Fri Nov 11 21:13:51 2011
@@ -42,7 +42,7 @@ public class TestDocCount extends Lucene
     IndexReader ir = iw.getReader();
     verifyCount(ir);
     ir.close();
-    iw.optimize();
+    iw.forceMerge(1);
     ir = iw.getReader();
     verifyCount(ir);
     ir.close();

Modified: lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestDocumentWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestDocumentWriter.java?rev=1201053&r1=1201052&r2=1201053&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestDocumentWriter.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestDocumentWriter.java Fri Nov 11 21:13:51 2011
@@ -321,7 +321,7 @@ public class TestDocumentWriter extends 
     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
         TEST_VERSION_CURRENT, new MockAnalyzer(random)));
     writer.addDocument(doc);
-    writer.optimize(); // be sure to have a single segment
+    writer.forceMerge(1); // be sure to have a single segment
     writer.close();
 
     _TestUtil.checkIndex(dir);

Modified: lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestFieldsReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestFieldsReader.java?rev=1201053&r1=1201052&r2=1201053&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestFieldsReader.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestFieldsReader.java Fri Nov 11 21:13:51 2011
@@ -203,7 +203,7 @@ public class TestFieldsReader extends Lu
           TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
       for(int i=0;i<2;i++)
         writer.addDocument(testDoc);
-      writer.optimize();
+      writer.forceMerge(1);
       writer.close();
 
       IndexReader reader = IndexReader.open(dir, true);

Modified: lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestFlex.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestFlex.java?rev=1201053&r1=1201052&r2=1201053&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestFlex.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestFlex.java Fri Nov 11 21:13:51 2011
@@ -48,7 +48,7 @@ public class TestFlex extends LuceneTest
           w.addDocument(doc);
         }
       } else {
-        w.optimize();
+        w.forceMerge(1);
       }
 
       IndexReader r = w.getReader();

Modified: lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestGlobalFieldNumbers.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestGlobalFieldNumbers.java?rev=1201053&r1=1201052&r2=1201053&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestGlobalFieldNumbers.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestGlobalFieldNumbers.java Fri Nov 11 21:13:51 2011
@@ -100,7 +100,7 @@ public class TestGlobalFieldNumbers exte
 
       IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
           TEST_VERSION_CURRENT, new MockAnalyzer(random)).setInfoStream(new FailOnNonBulkMergesInfoStream()));
-      writer.optimize();
+      writer.forceMerge(1);
       writer.close();
       assertFNXFiles(dir, "_2.fnx");
 
@@ -140,7 +140,7 @@ public class TestGlobalFieldNumbers exte
 
       IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
           TEST_VERSION_CURRENT, new MockAnalyzer(random)).setInfoStream(new FailOnNonBulkMergesInfoStream()));
-      writer.optimize();
+      writer.forceMerge(1);
       writer.close();
       assertFNXFiles(dir, "_2.fnx");
 
@@ -187,7 +187,7 @@ public class TestGlobalFieldNumbers exte
       }
       IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
           TEST_VERSION_CURRENT, new MockAnalyzer(random)).setInfoStream(new FailOnNonBulkMergesInfoStream()));
-      writer.optimize();
+      writer.forceMerge(1);
       writer.close();
       assertFNXFiles(dir, "_2.fnx");
       dir.close();
@@ -270,7 +270,7 @@ public class TestGlobalFieldNumbers exte
     return dir;
   }
 
-  public void testOptimize() throws IOException {
+  public void testForceMerge() throws IOException {
     for (int i = 0; i < 2*RANDOM_MULTIPLIER; i++) {
       Set<String> fieldNames = new HashSet<String>();
       final int numFields = 2 + (TEST_NIGHTLY ? random.nextInt(200) : random.nextInt(20));
@@ -285,7 +285,7 @@ public class TestGlobalFieldNumbers exte
       FieldNumberBiMap globalFieldMap = writer.segmentInfos
           .getOrLoadGlobalFieldNumberMap(base);
       Set<Entry<String, Integer>> entries = globalFieldMap.entries();
-      writer.optimize();
+      writer.forceMerge(1);
       writer.commit();
       writer.close();
       Set<Entry<String, Integer>> afterOptmize = globalFieldMap.entries();
@@ -352,7 +352,7 @@ public class TestGlobalFieldNumbers exte
       IndexWriter w = new IndexWriter(base, newIndexWriterConfig(
           TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(
           new LogByteSizeMergePolicy()));
-      w.optimize();
+      w.forceMerge(1);
       w.close();
       SegmentInfos sis = new SegmentInfos();
       sis.read(base);

Modified: lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexCommit.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexCommit.java?rev=1201053&r1=1201052&r2=1201053&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexCommit.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexCommit.java Fri Nov 11 21:13:51 2011
@@ -42,7 +42,7 @@ public class TestIndexCommit extends Luc
       @Override public long getTimestamp() throws IOException { return 1;}
       @Override public Map<String, String> getUserData() throws IOException { return null; }
       @Override public boolean isDeleted() { return false; }
-      @Override public boolean isOptimized() { return false; }
+      @Override public int getSegmentCount() { return 2; }
     };
     
     IndexCommit ic2 = new IndexCommit() {
@@ -55,7 +55,7 @@ public class TestIndexCommit extends Luc
       @Override public long getTimestamp() throws IOException { return 1;}
       @Override public Map<String, String> getUserData() throws IOException { return null; }
       @Override public boolean isDeleted() { return false; }
-      @Override public boolean isOptimized() { return false; }
+      @Override public int getSegmentCount() { return 2; }
     };
 
     assertEquals(ic1, ic2);

Modified: lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexReader.java?rev=1201053&r1=1201052&r2=1201053&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexReader.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexReader.java Fri Nov 11 21:13:51 2011
@@ -95,18 +95,18 @@ public class TestIndexReader extends Luc
       IndexReader r3 = IndexReader.openIfChanged(r2);
       assertNotNull(r3);
       assertFalse(c.equals(r3.getIndexCommit()));
-      assertFalse(r2.getIndexCommit().isOptimized());
+      assertFalse(r2.getIndexCommit().getSegmentCount() == 1);
       r3.close();
 
       writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
         new MockAnalyzer(random))
         .setOpenMode(OpenMode.APPEND));
-      writer.optimize();
+      writer.forceMerge(1);
       writer.close();
 
       r3 = IndexReader.openIfChanged(r2);
       assertNotNull(r3);
-      assertTrue(r3.getIndexCommit().isOptimized());
+      assertEquals(1, r3.getIndexCommit().getSegmentCount());
       r2.close();
       r3.close();
       d.close();
@@ -381,11 +381,11 @@ public class TestIndexReader extends Luc
           assertEquals(bin[i], bytesRef.bytes[i + bytesRef.offset]);
         }
         reader.close();
-        // force optimize
+        // force merge
 
 
         writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
-        writer.optimize();
+        writer.forceMerge(1);
         writer.close();
         reader = IndexReader.open(dir, false);
         doc2 = reader.document(reader.maxDoc() - 1);
@@ -721,7 +721,7 @@ public class TestIndexReader extends Luc
       // [incorrectly] hit a "docs out of order"
       // IllegalStateException because above out-of-bounds
       // deleteDocument corrupted the index:
-      writer.optimize();
+      writer.forceMerge(1);
       writer.close();
       if (!gotException) {
         fail("delete of out-of-bounds doc number failed to hit exception");
@@ -846,7 +846,9 @@ public class TestIndexReader extends Luc
       assertEquals("IndexReaders have different values for numDocs.", index1.numDocs(), index2.numDocs());
       assertEquals("IndexReaders have different values for maxDoc.", index1.maxDoc(), index2.maxDoc());
       assertEquals("Only one IndexReader has deletions.", index1.hasDeletions(), index2.hasDeletions());
-      assertEquals("Only one index is optimized.", index1.isOptimized(), index2.isOptimized());
+      if (!(index1 instanceof ParallelReader)) {
+        assertEquals("Single segment test differs.", index1.getSequentialSubReaders().length == 1, index2.getSequentialSubReaders().length == 1);
+      }
       
       // check field names
       Collection<String> fields1 = index1.getFieldNames(FieldOption.ALL);
@@ -970,19 +972,19 @@ public class TestIndexReader extends Luc
       IndexReader r2 = IndexReader.openIfChanged(r);
       assertNotNull(r2);
       assertFalse(c.equals(r2.getIndexCommit()));
-      assertFalse(r2.getIndexCommit().isOptimized());
+      assertFalse(r2.getIndexCommit().getSegmentCount() == 1);
       r2.close();
 
       writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
         new MockAnalyzer(random))
         .setOpenMode(OpenMode.APPEND));
-      writer.optimize();
+      writer.forceMerge(1);
       writer.close();
 
       r2 = IndexReader.openIfChanged(r);
       assertNotNull(r2);
       assertNull(IndexReader.openIfChanged(r2));
-      assertTrue(r2.getIndexCommit().isOptimized());
+      assertEquals(1, r2.getIndexCommit().getSegmentCount());
 
       r.close();
       r2.close();
@@ -1032,7 +1034,7 @@ public class TestIndexReader extends Luc
       writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
         new MockAnalyzer(random))
         .setOpenMode(OpenMode.APPEND));
-      writer.optimize();
+      writer.forceMerge(1);
       writer.close();
 
       // Make sure reopen to a single segment is still readonly:

Modified: lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexReaderClone.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexReaderClone.java?rev=1201053&r1=1201052&r2=1201053&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexReaderClone.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexReaderClone.java Fri Nov 11 21:13:51 2011
@@ -192,15 +192,15 @@ public class TestIndexReaderClone extend
   }
 
   // open non-readOnly reader1 on multi-segment index, then
-  // optimize the index, then clone to readOnly reader2
-  public void testReadOnlyCloneAfterOptimize() throws Exception {
+  // fully merge the index, then clone to readOnly reader2
+  public void testReadOnlyCloneAfterFullMerge() throws Exception {
     final Directory dir1 = newDirectory();
 
     TestIndexReaderReopen.createIndex(random, dir1, true);
     IndexReader reader1 = IndexReader.open(dir1, false);
     IndexWriter w = new IndexWriter(dir1, newIndexWriterConfig(
         TEST_VERSION_CURRENT, new MockAnalyzer(random)));
-    w.optimize();
+    w.forceMerge(1);
     w.close();
     IndexReader reader2 = reader1.clone(true);
     assertTrue(isReadOnly(reader2));

Modified: lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java?rev=1201053&r1=1201052&r2=1201053&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java Fri Nov 11 21:13:51 2011
@@ -80,7 +80,7 @@ public class TestIndexReaderCloneNorms e
   /**
    * Test that norms values are preserved as the index is maintained. Including
    * separate norms. Including merging indexes with seprate norms. Including
-   * optimize.
+   * full merge.
    */
   public void testNorms() throws IOException {
     // test with a single index: index1
@@ -112,7 +112,7 @@ public class TestIndexReaderCloneNorms e
 
     createIndex(random, dir3);
     if (VERBOSE) {
-      System.out.println("TEST: now addIndexes/optimize");
+      System.out.println("TEST: now addIndexes/full merge");
     }
     IndexWriter iw = new IndexWriter(
         dir3,
@@ -122,7 +122,7 @@ public class TestIndexReaderCloneNorms e
         setMergePolicy(newLogMergePolicy(3))
     );
     iw.addIndexes(dir1, dir2);
-    iw.optimize();
+    iw.forceMerge(1);
     iw.close();
 
     norms1.addAll(norms);
@@ -135,7 +135,7 @@ public class TestIndexReaderCloneNorms e
     verifyIndex(dir3);
     doTestNorms(random, dir3);
 
-    // now with optimize
+    // now with full merge
     iw = new IndexWriter(
         dir3,
         newIndexWriterConfig(TEST_VERSION_CURRENT, anlzr).
@@ -143,7 +143,7 @@ public class TestIndexReaderCloneNorms e
             setMaxBufferedDocs(5).
             setMergePolicy(newLogMergePolicy(3))
     );
-    iw.optimize();
+    iw.forceMerge(1);
     iw.close();
     verifyIndex(dir3);
 

Modified: lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexReaderDelete.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexReaderDelete.java?rev=1201053&r1=1201052&r2=1201053&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexReaderDelete.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexReaderDelete.java Fri Nov 11 21:13:51 2011
@@ -33,7 +33,7 @@ import static org.apache.lucene.index.Te
 import static org.apache.lucene.index.TestIndexReader.createDocument;
 
 public class TestIndexReaderDelete extends LuceneTestCase {
-  private void deleteReaderReaderConflict(boolean optimize) throws IOException {
+  private void deleteReaderReaderConflict(boolean doFullMerge) throws IOException {
     Directory dir = newDirectory();
 
     Term searchTerm1 = new Term("content", "aaa");
@@ -49,8 +49,9 @@ public class TestIndexReaderDelete exten
         addDoc(writer, searchTerm2.text());
         addDoc(writer, searchTerm3.text());
     }
-    if(optimize)
-      writer.optimize();
+    if (doFullMerge) {
+      writer.forceMerge(1);
+    }
     writer.close();
 
     // OPEN TWO READERS
@@ -131,7 +132,7 @@ public class TestIndexReaderDelete exten
     dir.close();
   }
 
-  private void deleteReaderWriterConflict(boolean optimize) throws IOException {
+  private void deleteReaderWriterConflict(boolean doFullMerge) throws IOException {
     //Directory dir = new RAMDirectory();
     Directory dir = newDirectory();
 
@@ -159,13 +160,14 @@ public class TestIndexReaderDelete exten
         addDoc(writer, searchTerm2.text());
     }
 
-    // REQUEST OPTIMIZATION
+    // REQUEST full merge
     // This causes a new segment to become current for all subsequent
     // searchers. Because of this, deletions made via a previously open
     // reader, which would be applied to that reader's segment, are lost
     // for subsequent searchers/readers
-    if(optimize)
-      writer.optimize();
+    if (doFullMerge) {
+      writer.forceMerge(1);
+    }
     writer.close();
 
     // The reader should not see the new data
@@ -255,19 +257,19 @@ public class TestIndexReaderDelete exten
     dir.close();
   }
 
-  public void testDeleteReaderReaderConflictUnoptimized() throws IOException {
+  public void testDeleteReaderReaderConflictNoFullMerge() throws IOException {
     deleteReaderReaderConflict(false);
   }
   
-  public void testDeleteReaderReaderConflictOptimized() throws IOException {
+  public void testDeleteReaderReaderConflictFullMerge() throws IOException {
     deleteReaderReaderConflict(true);
   }
   
-  public void testDeleteReaderWriterConflictUnoptimized() throws IOException {
+  public void testDeleteReaderWriterConflictNoFullMerge() throws IOException {
     deleteReaderWriterConflict(false);
   }
   
-  public void testDeleteReaderWriterConflictOptimized() throws IOException {
+  public void testDeleteReaderWriterConflictFullMerge() throws IOException {
     deleteReaderWriterConflict(true);
   }
   

Modified: lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexReaderReopen.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexReaderReopen.java?rev=1201053&r1=1201052&r2=1201053&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexReaderReopen.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexReaderReopen.java Fri Nov 11 21:13:51 2011
@@ -711,7 +711,7 @@ public class TestIndexReaderReopen exten
     for (int i = 0; i < n; i++) {
       writer.addDocument(createDocument(i, 3));
     }
-    writer.optimize();
+    writer.forceMerge(1);
     writer.close();
 
     final TestReopen test = new TestReopen() {      
@@ -961,7 +961,7 @@ public class TestIndexReaderReopen exten
     }
     
     if (!multiSegment) {
-      w.optimize();
+      w.forceMerge(1);
     }
     
     w.close();
@@ -1019,14 +1019,14 @@ public class TestIndexReaderReopen exten
       }
       case 2: {
         IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
-        w.optimize();
+        w.forceMerge(1);
         w.close();
         break;
       }
       case 3: {
         IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
         w.addDocument(createDocument(101, 4));
-        w.optimize();
+        w.forceMerge(1);
         w.addDocument(createDocument(102, 4));
         w.addDocument(createDocument(103, 4));
         w.close();

Modified: lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexWriter.java?rev=1201053&r1=1201052&r2=1201053&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexWriter.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/index/TestIndexWriter.java Fri Nov 11 21:13:51 2011
@@ -19,7 +19,6 @@ package org.apache.lucene.index;
 
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
-import java.io.PrintStream;
 import java.io.Reader;
 import java.io.StringReader;
 import java.util.ArrayList;
@@ -55,16 +54,13 @@ import org.apache.lucene.search.TopDocs;
 import org.apache.lucene.search.spans.SpanTermQuery;
 import org.apache.lucene.store.AlreadyClosedException;
 import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.FSDirectory;
 import org.apache.lucene.store.IndexOutput;
 import org.apache.lucene.store.Lock;
 import org.apache.lucene.store.LockFactory;
 import org.apache.lucene.store.LockObtainFailedException;
 import org.apache.lucene.store.MockDirectoryWrapper;
-import org.apache.lucene.store.NativeFSLockFactory;
 import org.apache.lucene.store.NoLockFactory;
 import org.apache.lucene.store.RAMDirectory;
-import org.apache.lucene.store.SimpleFSDirectory;
 import org.apache.lucene.store.SimpleFSLockFactory;
 import org.apache.lucene.store.SingleInstanceLockFactory;
 import org.apache.lucene.util.BytesRef;
@@ -109,10 +105,10 @@ public class TestIndexWriter extends Luc
         assertEquals(60, reader.numDocs());
         reader.close();
 
-        // optimize the index and check that the new doc count is correct
+        // merge the index down and check that the new doc count is correct
         writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
         assertEquals(60, writer.numDocs());
-        writer.optimize();
+        writer.forceMerge(1);
         assertEquals(60, writer.maxDoc());
         assertEquals(60, writer.numDocs());
         writer.close();
@@ -734,7 +730,7 @@ public class TestIndexWriter extends Luc
         writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
         //LogMergePolicy lmp2 = (LogMergePolicy) writer.getConfig().getMergePolicy();
         //lmp2.setUseCompoundFile(false);
-        writer.optimize();
+        writer.forceMerge(1);
         writer.close();
       }
     }
@@ -1303,7 +1299,7 @@ public class TestIndexWriter extends Luc
 
     w.addDocument(doc);
     w.commit();
-    w.optimize();   // force segment merge.
+    w.forceMerge(1);   // force segment merge.
     w.close();
 
     IndexReader ir = IndexReader.open(dir, true);
@@ -1440,7 +1436,7 @@ public class TestIndexWriter extends Luc
       List<String> files = Arrays.asList(dir.listAll());
       assertTrue(files.contains("_0.cfs"));
       w.addDocument(doc);
-      w.optimize();
+      w.forceMerge(1);
       if (iter == 1) {
         w.commit();
       }
@@ -1451,10 +1447,10 @@ public class TestIndexWriter extends Luc
 
       // NOTE: here we rely on "Windows" behavior, ie, even
       // though IW wanted to delete _0.cfs since it was
-      // optimized away, because we have a reader open
+      // merged away, because we have a reader open
       // against this file, it should still be here:
       assertTrue(files.contains("_0.cfs"));
-      // optimize created this
+      // forceMerge created this
       //assertTrue(files.contains("_2.cfs"));
       w.deleteUnusedFiles();
 
@@ -1698,7 +1694,7 @@ public class TestIndexWriter extends Luc
         }
         s.close();
         r.close();
-        w.optimize();
+        w.forceMerge(1);
       }
     }
     w.close();