You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by mi...@apache.org on 2011/05/10 20:28:20 UTC

svn commit: r1101583 - in /lucene/dev/branches/branch_3x: ./ lucene/ lucene/backwards/ lucene/backwards/src/test/org/apache/lucene/index/ lucene/contrib/analyzers/common/src/java/org/apache/lucene/analysis/path/ lucene/src/java/org/apache/lucene/index/...

Author: mikemccand
Date: Tue May 10 18:28:19 2011
New Revision: 1101583

URL: http://svn.apache.org/viewvc?rev=1101583&view=rev
Log:
LUCENE-3084: cutover to List<SegmentInfo> not SegmentInfos for MergePolicy.OneMerge.segments

Removed:
    lucene/dev/branches/branch_3x/lucene/backwards/src/test/org/apache/lucene/index/TestPerSegmentDeletes.java
Modified:
    lucene/dev/branches/branch_3x/   (props changed)
    lucene/dev/branches/branch_3x/lucene/   (props changed)
    lucene/dev/branches/branch_3x/lucene/CHANGES.txt
    lucene/dev/branches/branch_3x/lucene/backwards/   (props changed)
    lucene/dev/branches/branch_3x/lucene/backwards/src/test/org/apache/lucene/index/TestIndexWriter.java
    lucene/dev/branches/branch_3x/lucene/contrib/analyzers/common/src/java/org/apache/lucene/analysis/path/   (props changed)
    lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/BufferedDeletesStream.java
    lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/ConcurrentMergeScheduler.java
    lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/IndexWriter.java
    lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/LogMergePolicy.java
    lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/MergeDocIDRemapper.java
    lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/MergePolicy.java
    lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/TieredMergePolicy.java
    lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/UpgradeIndexMergePolicy.java
    lucene/dev/branches/branch_3x/lucene/src/test-framework/org/apache/lucene/index/MockRandomMergePolicy.java
    lucene/dev/branches/branch_3x/solr/   (props changed)

Modified: lucene/dev/branches/branch_3x/lucene/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/CHANGES.txt?rev=1101583&r1=1101582&r2=1101583&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/CHANGES.txt (original)
+++ lucene/dev/branches/branch_3x/lucene/CHANGES.txt Tue May 10 18:28:19 2011
@@ -12,6 +12,11 @@ Changes in backwards compatibility polic
 * LUCENE-1076: IndexWriter.setInfoStream now throws IOException
   (Mike McCandless, Shai Erera)
 
+* LUCENE-3084: MergePolicy.OneMerge.segments was changed from
+  SegmentInfos to a List<SegmentInfo>; this is actually a minor change
+  because SegmentInfos itself extends Vector<SegmentInfo>.  (Uwe
+  Schindler, Mike McCandless)
+
 Changes in runtime behavior
 
 * LUCENE-3065: When a NumericField is retrieved from a Document loaded

Modified: lucene/dev/branches/branch_3x/lucene/backwards/src/test/org/apache/lucene/index/TestIndexWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/backwards/src/test/org/apache/lucene/index/TestIndexWriter.java?rev=1101583&r1=1101582&r2=1101583&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/backwards/src/test/org/apache/lucene/index/TestIndexWriter.java (original)
+++ lucene/dev/branches/branch_3x/lucene/backwards/src/test/org/apache/lucene/index/TestIndexWriter.java Tue May 10 18:28:19 2011
@@ -1251,47 +1251,6 @@ public class TestIndexWriter extends Luc
     }
   }
 
-  // Just intercepts all merges & verifies that we are never
-  // merging a segment with >= 20 (maxMergeDocs) docs
-  private class MyMergeScheduler extends MergeScheduler {
-    @Override
-    synchronized public void merge(IndexWriter writer)
-      throws CorruptIndexException, IOException {
-
-      while(true) {
-        MergePolicy.OneMerge merge = writer.getNextMerge();
-        if (merge == null)
-          break;
-        for(int i=0;i<merge.segments.size();i++)
-          assert merge.segments.info(i).docCount < 20;
-        writer.merge(merge);
-      }
-    }
-
-    @Override
-    public void close() {}
-  }
-
-  // LUCENE-1013
-  public void testSetMaxMergeDocs() throws IOException {
-    Directory dir = newDirectory();
-    IndexWriterConfig conf = newIndexWriterConfig(
-        TEST_VERSION_CURRENT, new StandardAnalyzer(TEST_VERSION_CURRENT))
-        .setMergeScheduler(new MyMergeScheduler()).setMaxBufferedDocs(2);
-    LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
-    lmp.setMaxMergeDocs(20);
-    lmp.setMergeFactor(2);
-    IndexWriter iw = new IndexWriter(dir, conf);
-    Document document = new Document();
-    document.add(newField("tvtest", "a b c", Field.Store.NO, Field.Index.ANALYZED,
-                           Field.TermVector.YES));
-    for(int i=0;i<177;i++)
-      iw.addDocument(document);
-    iw.close();
-    dir.close();
-  }
-
-
   public void testVariableSchema() throws Exception {
     Directory dir = newDirectory();
     int delID = 0;

Modified: lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/BufferedDeletesStream.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/BufferedDeletesStream.java?rev=1101583&r1=1101582&r2=1101583&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/BufferedDeletesStream.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/BufferedDeletesStream.java Tue May 10 18:28:19 2011
@@ -121,9 +121,9 @@ class BufferedDeletesStream {
     public final long gen;
 
     // If non-null, contains segments that are 100% deleted
-    public final SegmentInfos allDeleted;
+    public final List<SegmentInfo> allDeleted;
 
-    ApplyDeletesResult(boolean anyDeletes, long gen, SegmentInfos allDeleted) {
+    ApplyDeletesResult(boolean anyDeletes, long gen, List<SegmentInfo> allDeleted) {
       this.anyDeletes = anyDeletes;
       this.gen = gen;
       this.allDeleted = allDeleted;
@@ -153,7 +153,7 @@ class BufferedDeletesStream {
   /** Resolves the buffered deleted Term/Query/docIDs, into
    *  actual deleted docIDs in the deletedDocs BitVector for
    *  each SegmentReader. */
-  public synchronized ApplyDeletesResult applyDeletes(IndexWriter.ReaderPool readerPool, SegmentInfos infos) throws IOException {
+  public synchronized ApplyDeletesResult applyDeletes(IndexWriter.ReaderPool readerPool, List<SegmentInfo> infos) throws IOException {
     final long t0 = System.currentTimeMillis();
 
     if (infos.size() == 0) {
@@ -171,7 +171,7 @@ class BufferedDeletesStream {
       message("applyDeletes: infos=" + infos + " packetCount=" + deletes.size());
     }
 
-    SegmentInfos infos2 = new SegmentInfos();
+    List<SegmentInfo> infos2 = new ArrayList<SegmentInfo>();
     infos2.addAll(infos);
     Collections.sort(infos2, sortByDelGen);
 
@@ -181,7 +181,7 @@ class BufferedDeletesStream {
     int infosIDX = infos2.size()-1;
     int delIDX = deletes.size()-1;
 
-    SegmentInfos allDeleted = null;
+    List<SegmentInfo> allDeleted = null;
 
     while (infosIDX >= 0) {
       //System.out.println("BD: cycle delIDX=" + delIDX + " infoIDX=" + infosIDX);
@@ -223,7 +223,7 @@ class BufferedDeletesStream {
 
         if (segAllDeletes) {
           if (allDeleted == null) {
-            allDeleted = new SegmentInfos();
+            allDeleted = new ArrayList<SegmentInfo>();
           }
           allDeleted.add(info);
         }
@@ -260,7 +260,7 @@ class BufferedDeletesStream {
 
           if (segAllDeletes) {
             if (allDeleted == null) {
-              allDeleted = new SegmentInfos();
+              allDeleted = new ArrayList<SegmentInfo>();
             }
             allDeleted.add(info);
           }

Modified: lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/ConcurrentMergeScheduler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/ConcurrentMergeScheduler.java?rev=1101583&r1=1101582&r2=1101583&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/ConcurrentMergeScheduler.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/ConcurrentMergeScheduler.java Tue May 10 18:28:19 2011
@@ -143,8 +143,8 @@ public class ConcurrentMergeScheduler ex
       final MergePolicy.OneMerge m1 = t1.getCurrentMerge();
       final MergePolicy.OneMerge m2 = t2.getCurrentMerge();
       
-      final int c1 = m1 == null ? Integer.MAX_VALUE : m1.segments.totalDocCount();
-      final int c2 = m2 == null ? Integer.MAX_VALUE : m2.segments.totalDocCount();
+      final int c1 = m1 == null ? Integer.MAX_VALUE : m1.totalDocCount;
+      final int c2 = m2 == null ? Integer.MAX_VALUE : m2.totalDocCount;
 
       return c2 - c1;
     }

Modified: lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/IndexWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/IndexWriter.java?rev=1101583&r1=1101582&r2=1101583&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/IndexWriter.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/IndexWriter.java Tue May 10 18:28:19 2011
@@ -485,7 +485,7 @@ public class IndexWriter implements Clos
     private final Map<SegmentInfo,SegmentReader> readerMap = new HashMap<SegmentInfo,SegmentReader>();
 
     /** Forcefully clear changes for the specified segments.  This is called on successful merge. */
-    synchronized void clear(SegmentInfos infos) throws IOException {
+    synchronized void clear(List<SegmentInfo> infos) throws IOException {
       if (infos == null) {
         for (Map.Entry<SegmentInfo,SegmentReader> ent: readerMap.entrySet()) {
           ent.getValue().hasChanges = false;
@@ -575,7 +575,7 @@ public class IndexWriter implements Clos
       return false;
     }
 
-    public synchronized void drop(SegmentInfos infos) throws IOException {
+    public synchronized void drop(List<SegmentInfo> infos) throws IOException {
       for(SegmentInfo info : infos) {
         drop(info);
       }
@@ -3477,7 +3477,7 @@ public class IndexWriter implements Clos
 
     assert testPoint("startCommitMergeDeletes");
 
-    final SegmentInfos sourceSegments = merge.segments;
+    final List<SegmentInfo> sourceSegments = merge.segments;
 
     if (infoStream != null)
       message("commitMergeDeletes " + merge.segString(directory));
@@ -3489,7 +3489,7 @@ public class IndexWriter implements Clos
     long minGen = Long.MAX_VALUE;
 
     for(int i=0; i < sourceSegments.size(); i++) {
-      SegmentInfo info = sourceSegments.info(i);
+      SegmentInfo info = sourceSegments.get(i);
       minGen = Math.min(info.getBufferedDeletesGen(), minGen);
       int docCount = info.docCount;
       final SegmentReader previousReader = merge.readerClones.get(i);
@@ -3937,7 +3937,7 @@ public class IndexWriter implements Clos
     // It's possible we are called twice, eg if there was an
     // exception inside mergeInit
     if (merge.registerDone) {
-      final SegmentInfos sourceSegments = merge.segments;
+      final List<SegmentInfo> sourceSegments = merge.segments;
       for(SegmentInfo info : sourceSegments) {
         mergingSegments.remove(info);
       }
@@ -4008,7 +4008,7 @@ public class IndexWriter implements Clos
     
     int mergedDocCount = 0;
 
-    SegmentInfos sourceSegments = merge.segments;
+    List<SegmentInfo> sourceSegments = merge.segments;
 
     SegmentMerger merger = new SegmentMerger(directory, config.getTermIndexInterval(), mergedName, merge,
                                              payloadProcessorProvider,
@@ -4031,7 +4031,7 @@ public class IndexWriter implements Clos
       int segUpto = 0;
       while(segUpto < sourceSegments.size()) {
 
-        final SegmentInfo info = sourceSegments.info(segUpto);
+        final SegmentInfo info = sourceSegments.get(segUpto);
 
         // Hold onto the "live" reader; we will use this to
         // commit merged deletes
@@ -4223,14 +4223,14 @@ public class IndexWriter implements Clos
   }
 
   /** @lucene.internal */
-  public synchronized String segString(SegmentInfos infos) throws IOException {
+  public synchronized String segString(List<SegmentInfo> infos) throws IOException {
     StringBuilder buffer = new StringBuilder();
     final int count = infos.size();
     for(int i = 0; i < count; i++) {
       if (i > 0) {
         buffer.append(' ');
       }
-      buffer.append(segString(infos.info(i)));
+      buffer.append(segString(infos.get(i)));
     }
     return buffer.toString();
   }

Modified: lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/LogMergePolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/LogMergePolicy.java?rev=1101583&r1=1101582&r2=1101583&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/LogMergePolicy.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/LogMergePolicy.java Tue May 10 18:28:19 2011
@@ -576,7 +576,7 @@ public abstract class LogMergePolicy ext
         } else if (!anyTooLarge) {
           if (spec == null)
             spec = new MergeSpecification();
-          final SegmentInfos mergeInfos = new SegmentInfos();
+          final List<SegmentInfo> mergeInfos = new ArrayList<SegmentInfo>();
           for(int i=start;i<end;i++) {
             mergeInfos.add(levels.get(i).info);
             assert infos.contains(levels.get(i).info);

Modified: lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/MergeDocIDRemapper.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/MergeDocIDRemapper.java?rev=1101583&r1=1101582&r2=1101583&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/MergeDocIDRemapper.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/MergeDocIDRemapper.java Tue May 10 18:28:19 2011
@@ -32,7 +32,7 @@ final class MergeDocIDRemapper {
 
   public MergeDocIDRemapper(SegmentInfos infos, int[][] docMaps, int[] delCounts, MergePolicy.OneMerge merge, int mergedDocCount) {
     this.docMaps = docMaps;
-    SegmentInfo firstSegment = merge.segments.info(0);
+    SegmentInfo firstSegment = merge.segments.get(0);
     int i = 0;
     while(true) {
       SegmentInfo info = infos.info(i);
@@ -45,7 +45,7 @@ final class MergeDocIDRemapper {
     int numDocs = 0;
     for(int j=0;j<docMaps.length;i++,j++) {
       numDocs += infos.info(i).docCount;
-      assert infos.info(i).equals(merge.segments.info(j));
+      assert infos.info(i).equals(merge.segments.get(j));
     }
     maxDocID = minDocID + numDocs;
 
@@ -55,7 +55,7 @@ final class MergeDocIDRemapper {
     starts[0] = minDocID;
     newStarts[0] = minDocID;
     for(i=1;i<docMaps.length;i++) {
-      final int lastDocCount = merge.segments.info(i-1).docCount;
+      final int lastDocCount = merge.segments.get(i-1).docCount;
       starts[i] = starts[i-1] + lastDocCount;
       newStarts[i] = newStarts[i-1] + lastDocCount - delCounts[i-1];
     }
@@ -69,7 +69,7 @@ final class MergeDocIDRemapper {
     // assert docShift > 0;
 
     // Make sure it all adds up:
-    assert docShift == maxDocID - (newStarts[docMaps.length-1] + merge.segments.info(docMaps.length-1).docCount - delCounts[docMaps.length-1]);
+    assert docShift == maxDocID - (newStarts[docMaps.length-1] + merge.segments.get(docMaps.length-1).docCount - delCounts[docMaps.length-1]);
   }
 
   public int remap(int oldDocID) {

Modified: lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/MergePolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/MergePolicy.java?rev=1101583&r1=1101582&r2=1101583&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/MergePolicy.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/MergePolicy.java Tue May 10 18:28:19 2011
@@ -75,15 +75,21 @@ public abstract class MergePolicy implem
     long estimatedMergeBytes;       // used by IndexWriter
     List<SegmentReader> readers;        // used by IndexWriter
     List<SegmentReader> readerClones;   // used by IndexWriter
-    public final SegmentInfos segments;
+    public final List<SegmentInfo> segments;
+    public final int totalDocCount;
     boolean aborted;
     Throwable error;
     boolean paused;
 
-    public OneMerge(SegmentInfos segments) {
+    public OneMerge(List<SegmentInfo> segments) {
       if (0 == segments.size())
         throw new RuntimeException("segments must include at least one segment");
       this.segments = segments;
+      int count = 0;
+      for(SegmentInfo info : segments) {
+        count += info.docCount;
+      }
+      totalDocCount = count;
     }
 
     /** Record that an exception occurred while executing
@@ -147,7 +153,7 @@ public abstract class MergePolicy implem
       final int numSegments = segments.size();
       for(int i=0;i<numSegments;i++) {
         if (i > 0) b.append(' ');
-        b.append(segments.info(i).toString(dir, 0));
+        b.append(segments.get(i).toString(dir, 0));
       }
       if (info != null)
         b.append(" into ").append(info.name);

Modified: lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/TieredMergePolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/TieredMergePolicy.java?rev=1101583&r1=1101582&r2=1101583&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/TieredMergePolicy.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/TieredMergePolicy.java Tue May 10 18:28:19 2011
@@ -23,6 +23,8 @@ import java.util.Collection;
 import java.util.Collections;
 import java.util.HashSet;
 import java.util.Comparator;
+import java.util.List;
+import java.util.ArrayList;
 
 /**
  *  Merges segments of approximately equal size, subject to
@@ -249,7 +251,7 @@ public class TieredMergePolicy extends M
     final Collection<SegmentInfo> merging = writer.get().getMergingSegments();
     final Collection<SegmentInfo> toBeMerged = new HashSet<SegmentInfo>();
 
-    final SegmentInfos infosSorted = new SegmentInfos();
+    final List<SegmentInfo> infosSorted = new ArrayList<SegmentInfo>();
     infosSorted.addAll(infos);
 
     Collections.sort(infosSorted, segmentByteSizeDescending);
@@ -277,7 +279,7 @@ public class TieredMergePolicy extends M
     // If we have too-large segments, grace them out
     // of the maxSegmentCount:
     int tooBigCount = 0;
-    while (tooBigCount < infosSorted.size() && size(infosSorted.info(tooBigCount)) >= maxMergedSegmentBytes/2.0) {
+    while (tooBigCount < infosSorted.size() && size(infosSorted.get(tooBigCount)) >= maxMergedSegmentBytes/2.0) {
       totIndexBytes -= size(infosSorted.get(tooBigCount));
       tooBigCount++;
     }
@@ -310,7 +312,7 @@ public class TieredMergePolicy extends M
       // Gather eligible segments for merging, ie segments
       // not already being merged and not already picked (by
       // prior iteration of this loop) for merging:
-      final SegmentInfos eligible = new SegmentInfos();
+      final List<SegmentInfo> eligible = new ArrayList<SegmentInfo>();
       for(int idx = tooBigCount; idx<infosSorted.size(); idx++) {
         final SegmentInfo info = infosSorted.get(idx);
         if (merging.contains(info)) {
@@ -332,7 +334,7 @@ public class TieredMergePolicy extends M
 
         // OK we are over budget -- find best merge!
         MergeScore bestScore = null;
-        SegmentInfos best = null;
+        List<SegmentInfo> best = null;
         boolean bestTooLarge = false;
         long bestMergeBytes = 0;
 
@@ -341,10 +343,10 @@ public class TieredMergePolicy extends M
 
           long totAfterMergeBytes = 0;
 
-          final SegmentInfos candidate = new SegmentInfos();
+          final List<SegmentInfo> candidate = new ArrayList<SegmentInfo>();
           boolean hitTooLarge = false;
           for(int idx = startIdx;idx<eligible.size() && candidate.size() < maxMergeAtOnce;idx++) {
-            final SegmentInfo info = eligible.info(idx);
+            final SegmentInfo info = eligible.get(idx);
             final long segBytes = size(info);
 
             if (totAfterMergeBytes + segBytes > maxMergedSegmentBytes) {
@@ -398,7 +400,7 @@ public class TieredMergePolicy extends M
   }
 
   /** Expert: scores one merge; subclasses can override. */
-  protected MergeScore score(SegmentInfos candidate, boolean hitTooLarge, long mergingBytes) throws IOException {
+  protected MergeScore score(List<SegmentInfo> candidate, boolean hitTooLarge, long mergingBytes) throws IOException {
     long totBeforeMergeBytes = 0;
     long totAfterMergeBytes = 0;
     long totAfterMergeBytesFloored = 0;
@@ -420,7 +422,7 @@ public class TieredMergePolicy extends M
       // over time:
       skew = 1.0/maxMergeAtOnce;
     } else {
-      skew = ((double) floorSize(size(candidate.info(0))))/totAfterMergeBytesFloored;
+      skew = ((double) floorSize(size(candidate.get(0))))/totAfterMergeBytesFloored;
     }
 
     // Strongly favor merges with less skew (smaller
@@ -458,7 +460,8 @@ public class TieredMergePolicy extends M
     if (verbose()) {
       message("findMergesForOptimize maxSegmentCount=" + maxSegmentCount + " infos=" + writer.get().segString(infos) + " segmentsToOptimize=" + segmentsToOptimize);
     }
-    SegmentInfos eligible = new SegmentInfos();
+
+    List<SegmentInfo> eligible = new ArrayList<SegmentInfo>();
     boolean optimizeMergeRunning = false;
     final Collection<SegmentInfo> merging = writer.get().getMergingSegments();
     for(SegmentInfo info : infos) {
@@ -499,7 +502,7 @@ public class TieredMergePolicy extends M
       if (spec == null) {
         spec = new MergeSpecification();
       }
-      final OneMerge merge = new OneMerge(eligible.range(end-maxMergeAtOnceExplicit, end));
+      final OneMerge merge = new OneMerge(eligible.subList(end-maxMergeAtOnceExplicit, end));
       if (verbose()) {
         message("add merge=" + writer.get().segString(merge.segments));
       }
@@ -510,7 +513,7 @@ public class TieredMergePolicy extends M
     if (spec == null && !optimizeMergeRunning) {
       // Do final merge
       final int numToMerge = end - maxSegmentCount + 1;
-      final OneMerge merge = new OneMerge(eligible.range(end-numToMerge, end));
+      final OneMerge merge = new OneMerge(eligible.subList(end-numToMerge, end));
       if (verbose()) {
         message("add final merge=" + merge.segString(writer.get().getDirectory()));
       }
@@ -527,7 +530,7 @@ public class TieredMergePolicy extends M
     if (verbose()) {
       message("findMergesToExpungeDeletes infos=" + writer.get().segString(infos) + " expungeDeletesPctAllowed=" + expungeDeletesPctAllowed);
     }
-    final SegmentInfos eligible = new SegmentInfos();
+    final List<SegmentInfo> eligible = new ArrayList<SegmentInfo>();
     final Collection<SegmentInfo> merging = writer.get().getMergingSegments();
     for(SegmentInfo info : infos) {
       double pctDeletes = 100.*((double) writer.get().numDeletedDocs(info))/info.docCount;
@@ -580,7 +583,7 @@ public class TieredMergePolicy extends M
         spec = new MergeSpecification();
       }
 
-      final OneMerge merge = new OneMerge(eligible.range(start, upto));
+      final OneMerge merge = new OneMerge(eligible.subList(start, upto));
       if (verbose()) {
         message("add merge=" + writer.get().segString(merge.segments));
       }

Modified: lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/UpgradeIndexMergePolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/UpgradeIndexMergePolicy.java?rev=1101583&r1=1101582&r2=1101583&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/UpgradeIndexMergePolicy.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/java/org/apache/lucene/index/UpgradeIndexMergePolicy.java Tue May 10 18:28:19 2011
@@ -20,7 +20,9 @@ package org.apache.lucene.index;
 import org.apache.lucene.util.Constants;
 
 import java.io.IOException;
+import java.util.ArrayList;
 import java.util.HashSet;
+import java.util.List;
 import java.util.Set;
 
 /** This {@link MergePolicy} is used for upgrading all existing segments of
@@ -101,7 +103,7 @@ public class UpgradeIndexMergePolicy ext
       if (verbose())
         message("findMergesForOptimize: " +  base.getClass().getSimpleName() +
         " does not want to merge all old segments, merge remaining ones into new segment: " + oldSegments);
-      final SegmentInfos newInfos = new SegmentInfos();
+      final List<SegmentInfo> newInfos = new ArrayList<SegmentInfo>();
       for (final SegmentInfo si : segmentInfos) {
         if (oldSegments.contains(si)) {
           newInfos.add(si);

Modified: lucene/dev/branches/branch_3x/lucene/src/test-framework/org/apache/lucene/index/MockRandomMergePolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_3x/lucene/src/test-framework/org/apache/lucene/index/MockRandomMergePolicy.java?rev=1101583&r1=1101582&r2=1101583&view=diff
==============================================================================
--- lucene/dev/branches/branch_3x/lucene/src/test-framework/org/apache/lucene/index/MockRandomMergePolicy.java (original)
+++ lucene/dev/branches/branch_3x/lucene/src/test-framework/org/apache/lucene/index/MockRandomMergePolicy.java Tue May 10 18:28:19 2011
@@ -18,7 +18,9 @@ package org.apache.lucene.index;
  */
 
 import java.io.IOException;
+import java.util.ArrayList;
 import java.util.Collections;
+import java.util.List;
 import java.util.Random;
 import java.util.Set;
 
@@ -58,7 +60,7 @@ public class MockRandomMergePolicy exten
       SegmentInfos segmentInfos, int maxSegmentCount, Set<SegmentInfo> segmentsToOptimize)
     throws CorruptIndexException, IOException {
 
-    final SegmentInfos eligibleSegments = new SegmentInfos();
+    final List<SegmentInfo> eligibleSegments = new ArrayList<SegmentInfo>();
     for(SegmentInfo info : segmentInfos) {
       if (segmentsToOptimize.contains(info)) {
         eligibleSegments.add(info);
@@ -76,7 +78,7 @@ public class MockRandomMergePolicy exten
       while(upto < eligibleSegments.size()) {
         int max = Math.min(10, eligibleSegments.size()-upto);
         int inc = max <= 2 ? max : _TestUtil.nextInt(random, 2, max);
-        mergeSpec.add(new OneMerge(eligibleSegments.range(upto, upto+inc)));
+        mergeSpec.add(new OneMerge(eligibleSegments.subList(upto, upto+inc)));
         upto += inc;
       }
     }