You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by sh...@apache.org on 2010/05/27 17:36:33 UTC

svn commit: r948861 [1/2] - in /lucene/dev/trunk: lucene/ lucene/contrib/misc/src/java/org/apache/lucene/misc/ lucene/src/java/org/apache/lucene/index/ lucene/src/java/org/apache/lucene/index/codecs/preflex/ lucene/src/java/org/apache/lucene/index/code...

Author: shaie
Date: Thu May 27 15:36:32 2010
New Revision: 948861

URL: http://svn.apache.org/viewvc?rev=948861&view=rev
Log:
LUCENE-2455: Some house cleaning in addIndexes* (trunk)

Added:
    lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestAddIndexes.java
      - copied, changed from r948393, lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestAddIndexesNoOptimize.java
    lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/index.31.cfs.zip   (with props)
    lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/index.31.nocfs.zip   (with props)
Removed:
    lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestAddIndexesNoOptimize.java
Modified:
    lucene/dev/trunk/lucene/CHANGES.txt
    lucene/dev/trunk/lucene/contrib/misc/src/java/org/apache/lucene/misc/IndexMergeTool.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/CompoundFileReader.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/CompoundFileWriter.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/DocFieldProcessor.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/DocumentsWriter.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/FieldsReader.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/FieldsWriter.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/IndexFileDeleter.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/IndexFileNames.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/IndexWriter.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/NormsWriter.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentInfo.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentInfos.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentMerger.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentReader.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/StoredFieldsWriter.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/TermVectorsReader.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/TermVectorsTermsWriter.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/TermVectorsWriter.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/codecs/preflex/PreFlexFields.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/codecs/preflex/TermInfosReader.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/codecs/sep/SepPostingsReaderImpl.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/codecs/sep/SepPostingsWriterImpl.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/codecs/standard/SimpleStandardTermsIndexReader.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/codecs/standard/SimpleStandardTermsIndexWriter.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/codecs/standard/StandardPostingsReaderImpl.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/codecs/standard/StandardPostingsWriterImpl.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/codecs/standard/StandardTermsDictReader.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/codecs/standard/StandardTermsDictWriter.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/store/Directory.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/store/FSDirectory.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/store/RAMDirectory.java
    lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
    lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java
    lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestIndexWriter.java
    lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestIndexWriterMerging.java
    lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestIndexWriterReader.java
    lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestNorms.java
    lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestPayloadProcessorProvider.java
    lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestTermVectorsReader.java
    lucene/dev/trunk/lucene/src/test/org/apache/lucene/search/TestBoolean2.java
    lucene/dev/trunk/solr/src/java/org/apache/solr/core/RefCntRamDirectory.java
    lucene/dev/trunk/solr/src/java/org/apache/solr/update/DirectUpdateHandler2.java

Modified: lucene/dev/trunk/lucene/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/CHANGES.txt?rev=948861&r1=948860&r2=948861&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/CHANGES.txt (original)
+++ lucene/dev/trunk/lucene/CHANGES.txt Thu May 27 15:36:32 2010
@@ -248,6 +248,17 @@ Changes in runtime behavior
   it cannot delete the lock file, since obtaining the lock does not fail if the 
   file is there. (Shai Erera)
 
+* LUCENE-2455: IndexWriter.addIndexes no longer optimizes the target index 
+  before it adds the new ones. Also, the existing segments are not merged and so
+  the index will not end up with a single segment (unless it was empty before).
+  In addition, addIndexesNoOptimize was renamed to addIndexes and no longer
+  invokes a merge on the incoming and target segments, but instead copies the
+  segments to the target index. You can call maybeMerge or optimize after this
+  method completes, if you need to.
+  In addition, Directory.copyTo* were removed in favor of copy which takes the
+  target Directory, source and target files as arguments, and copies the source
+  file to the target Directory under the target file name. (Shai Erera)
+
 API Changes
 
 * LUCENE-2076: Rename FSDirectory.getFile -> getDirectory.  (George
@@ -334,6 +345,11 @@ API Changes
   (such as SnapshotDeletionPolicy), you can call this method to remove those
   commit points when they are not needed anymore (instead of waiting for the 
   next commit). (Shai Erera)
+
+* LUCENE-2455: IndexWriter.addIndexesNoOptimize was renamed to addIndexes.
+  IndexFileNames.segmentFileName now takes another parameter to accomodate
+  custom file names. You should use this method to name all your files.
+  (Shai Erera) 
   
 Bug fixes
 

Modified: lucene/dev/trunk/lucene/contrib/misc/src/java/org/apache/lucene/misc/IndexMergeTool.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/contrib/misc/src/java/org/apache/lucene/misc/IndexMergeTool.java?rev=948861&r1=948860&r2=948861&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/contrib/misc/src/java/org/apache/lucene/misc/IndexMergeTool.java (original)
+++ lucene/dev/trunk/lucene/contrib/misc/src/java/org/apache/lucene/misc/IndexMergeTool.java Thu May 27 15:36:32 2010
@@ -49,7 +49,7 @@ public class IndexMergeTool {
     }
 
     System.out.println("Merging...");
-    writer.addIndexesNoOptimize(indexes);
+    writer.addIndexes(indexes);
 
     System.out.println("Optimizing...");
     writer.optimize();

Modified: lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/CompoundFileReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/CompoundFileReader.java?rev=948861&r1=948860&r2=948861&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/CompoundFileReader.java (original)
+++ lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/CompoundFileReader.java Thu May 27 15:36:32 2010
@@ -56,9 +56,7 @@ public class CompoundFileReader extends 
     this(dir, name, BufferedIndexInput.BUFFER_SIZE);
   }
 
-  public CompoundFileReader(Directory dir, String name, int readBufferSize)
-    throws IOException
-    {
+  public CompoundFileReader(Directory dir, String name, int readBufferSize) throws IOException {
         directory = dir;
         fileName = name;
         this.readBufferSize = readBufferSize;
@@ -68,13 +66,37 @@ public class CompoundFileReader extends 
         try {
             stream = dir.openInput(name, readBufferSize);
 
+            // read the first VInt. If it is negative, it's the version number
+            // otherwise it's the count (pre-3.1 indexes)
+            int firstInt = stream.readVInt();
+            
+            final int count;
+            final boolean stripSegmentName;
+            if (firstInt < CompoundFileWriter.FORMAT_PRE_VERSION) {
+              if (firstInt < CompoundFileWriter.FORMAT_CURRENT) {
+                throw new CorruptIndexException("Incompatible format version: "
+                    + firstInt + " expected " + CompoundFileWriter.FORMAT_CURRENT);
+              }
+              // It's a post-3.1 index, read the count.
+              count = stream.readVInt();
+              stripSegmentName = false;
+            } else {
+              count = firstInt;
+              stripSegmentName = true;
+            }
+
             // read the directory and init files
-            int count = stream.readVInt();
             FileEntry entry = null;
             for (int i=0; i<count; i++) {
                 long offset = stream.readLong();
                 String id = stream.readString();
 
+                if (stripSegmentName) {
+                  // Fix the id to not include the segment names. This is relevant for
+                  // pre-3.1 indexes.
+                  id = IndexFileNames.stripSegmentName(id);
+                }
+                
                 if (entry != null) {
                     // set length of the previous entry
                     entry.length = offset - entry.offset;
@@ -93,7 +115,7 @@ public class CompoundFileReader extends 
             success = true;
 
         } finally {
-            if (! success && (stream != null)) {
+            if (!success && (stream != null)) {
                 try {
                     stream.close();
                 } catch (IOException e) { }
@@ -133,7 +155,8 @@ public class CompoundFileReader extends 
     {
         if (stream == null)
             throw new IOException("Stream closed");
-
+        
+        id = IndexFileNames.stripSegmentName(id);
         FileEntry entry = entries.get(id);
         if (entry == null)
             throw new IOException("No sub-file with id " + id + " found");
@@ -144,14 +167,19 @@ public class CompoundFileReader extends 
     /** Returns an array of strings, one for each file in the directory. */
     @Override
     public String[] listAll() {
-        String res[] = new String[entries.size()];
-        return entries.keySet().toArray(res);
+        String[] res = entries.keySet().toArray(new String[entries.size()]);
+        // Add the segment name
+        String seg = fileName.substring(0, fileName.indexOf('.'));
+        for (int i = 0; i < res.length; i++) {
+          res[i] = seg + res[i];
+        }
+        return res;
     }
 
     /** Returns true iff a file with the given name exists. */
     @Override
     public boolean fileExists(String name) {
-        return entries.containsKey(name);
+        return entries.containsKey(IndexFileNames.stripSegmentName(name));
     }
 
     /** Returns the time the compound file was last modified. */
@@ -185,7 +213,7 @@ public class CompoundFileReader extends 
      * @throws IOException if the file does not exist */
     @Override
     public long fileLength(String name) throws IOException {
-        FileEntry e = entries.get(name);
+        FileEntry e = entries.get(IndexFileNames.stripSegmentName(name));
         if (e == null)
             throw new FileNotFoundException(name);
         return e.length;

Modified: lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/CompoundFileWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/CompoundFileWriter.java?rev=948861&r1=948860&r2=948861&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/CompoundFileWriter.java (original)
+++ lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/CompoundFileWriter.java Thu May 27 15:36:32 2010
@@ -59,6 +59,16 @@ final class CompoundFileWriter {
         long dataOffset;
     }
 
+    // Before versioning started.
+    static final int FORMAT_PRE_VERSION = 0;
+    
+    // Segment name is not written in the file names.
+    static final int FORMAT_NO_SEGMENT_PREFIX = -1;
+
+    // NOTE: if you introduce a new format, make it 1 lower
+    // than the current one, and always change this if you
+    // switch to a new format!
+    static final int FORMAT_CURRENT = FORMAT_NO_SEGMENT_PREFIX;
 
     private Directory directory;
     private String fileName;
@@ -146,6 +156,10 @@ final class CompoundFileWriter {
         try {
             os = directory.createOutput(fileName);
 
+            // Write the Version info - must be a VInt because CFR reads a VInt
+            // in older versions!
+            os.writeVInt(FORMAT_CURRENT);
+            
             // Write the number of entries
             os.writeVInt(entries.size());
 
@@ -156,7 +170,7 @@ final class CompoundFileWriter {
             for (FileEntry fe : entries) {
                 fe.directoryOffset = os.getFilePointer();
                 os.writeLong(0);    // for now
-                os.writeString(fe.file);
+                os.writeString(IndexFileNames.stripSegmentName(fe.file));
                 totalSize += directory.fileLength(fe.file);
             }
 

Modified: lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/DocFieldProcessor.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/DocFieldProcessor.java?rev=948861&r1=948860&r2=948861&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/DocFieldProcessor.java (original)
+++ lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/DocFieldProcessor.java Thu May 27 15:36:32 2010
@@ -67,7 +67,7 @@ final class DocFieldProcessor extends Do
     // consumer can alter the FieldInfo* if necessary.  EG,
     // FreqProxTermsWriter does this with
     // FieldInfo.storePayload.
-    final String fileName = IndexFileNames.segmentFileName(state.segmentName, IndexFileNames.FIELD_INFOS_EXTENSION);
+    final String fileName = IndexFileNames.segmentFileName(state.segmentName, "", IndexFileNames.FIELD_INFOS_EXTENSION);
     fieldInfos.write(state.directory, fileName);
     state.flushedFiles.add(fileName);
   }

Modified: lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/DocumentsWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/DocumentsWriter.java?rev=948861&r1=948860&r2=948861&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/DocumentsWriter.java (original)
+++ lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/DocumentsWriter.java Thu May 27 15:36:32 2010
@@ -670,7 +670,7 @@ final class DocumentsWriter {
   void createCompoundFile(String segment) throws IOException {
     
     CompoundFileWriter cfsWriter = new CompoundFileWriter(directory, 
-        IndexFileNames.segmentFileName(segment, IndexFileNames.COMPOUND_FILE_EXTENSION));
+        IndexFileNames.segmentFileName(segment, "", IndexFileNames.COMPOUND_FILE_EXTENSION));
     for(String fileName : flushState.flushedFiles) {
       cfsWriter.addFile(fileName);
     }

Modified: lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/FieldsReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/FieldsReader.java?rev=948861&r1=948860&r2=948861&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/FieldsReader.java (original)
+++ lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/FieldsReader.java Thu May 27 15:36:32 2010
@@ -105,8 +105,8 @@ final class FieldsReader implements Clon
     try {
       fieldInfos = fn;
 
-      cloneableFieldsStream = d.openInput(IndexFileNames.segmentFileName(segment, IndexFileNames.FIELDS_EXTENSION), readBufferSize);
-      cloneableIndexStream = d.openInput(IndexFileNames.segmentFileName(segment, IndexFileNames.FIELDS_INDEX_EXTENSION), readBufferSize);
+      cloneableFieldsStream = d.openInput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.FIELDS_EXTENSION), readBufferSize);
+      cloneableIndexStream = d.openInput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.FIELDS_INDEX_EXTENSION), readBufferSize);
       
       // First version of fdx did not include a format
       // header, but, the first int will always be 0 in that

Modified: lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/FieldsWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/FieldsWriter.java?rev=948861&r1=948860&r2=948861&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/FieldsWriter.java (original)
+++ lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/FieldsWriter.java Thu May 27 15:36:32 2010
@@ -61,7 +61,7 @@ final class FieldsWriter
         fieldInfos = fn;
 
         boolean success = false;
-        final String fieldsName = IndexFileNames.segmentFileName(segment, IndexFileNames.FIELDS_EXTENSION);
+        final String fieldsName = IndexFileNames.segmentFileName(segment, "", IndexFileNames.FIELDS_EXTENSION);
         try {
           fieldsStream = d.createOutput(fieldsName);
           fieldsStream.writeInt(FORMAT_CURRENT);
@@ -82,7 +82,7 @@ final class FieldsWriter
         }
 
         success = false;
-        final String indexName = IndexFileNames.segmentFileName(segment, IndexFileNames.FIELDS_INDEX_EXTENSION);
+        final String indexName = IndexFileNames.segmentFileName(segment, "", IndexFileNames.FIELDS_INDEX_EXTENSION);
         try {
           indexStream = d.createOutput(indexName);
           indexStream.writeInt(FORMAT_CURRENT);

Modified: lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/IndexFileDeleter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/IndexFileDeleter.java?rev=948861&r1=948860&r2=948861&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/IndexFileDeleter.java (original)
+++ lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/IndexFileDeleter.java Thu May 27 15:36:32 2010
@@ -616,8 +616,6 @@ final class IndexFileDeleter {
       files = Collections.unmodifiableCollection(segmentInfos.files(directory, true));
       gen = segmentInfos.getGeneration();
       isOptimized = segmentInfos.size() == 1 && !segmentInfos.info(0).hasDeletions();
-
-      assert !segmentInfos.hasExternalSegments(directory);
     }
 
     @Override

Modified: lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/IndexFileNames.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/IndexFileNames.java?rev=948861&r1=948860&r2=948861&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/IndexFileNames.java (original)
+++ lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/IndexFileNames.java Thu May 27 15:36:32 2010
@@ -26,7 +26,7 @@ import org.apache.lucene.index.codecs.Co
  * matchesExtension}), as well as generating file names from a segment name,
  * generation and extension (
  * {@link #fileNameFromGeneration(String, String, long) fileNameFromGeneration},
- * {@link #segmentFileName(String, String) segmentFileName}).
+ * {@link #segmentFileName(String, String, String) segmentFileName}).
  *
  * <p><b>NOTE</b>: extensions used by codecs are not
  * listed here.  You must interact with the {@link Codec}
@@ -150,7 +150,7 @@ public final class IndexFileNames {
     if (gen == SegmentInfo.NO) {
       return null;
     } else if (gen == SegmentInfo.WITHOUT_GEN) {
-      return segmentFileName(base, ext);
+      return segmentFileName(base, "", ext);
     } else {
       // The '6' part in the length is: 1 for '.', 1 for '_' and 4 as estimate
       // to the gen length as string (hopefully an upper limit so SB won't
@@ -179,18 +179,32 @@ public final class IndexFileNames {
   }
 
   /**
-   * Returns the file name that matches the given segment name and extension.
-   * This method takes care to return the full file name in the form
-   * &lt;segmentName&gt;.&lt;ext&gt;, therefore you don't need to prefix the
-   * extension with a '.'.<br>
+   * Returns a file name that includes the given segment name, your own custom
+   * name and extension. The format of the filename is:
+   * &lt;segmentName&gt;(_&lt;name&gt;)(.&lt;ext&gt;).
+   * <p>
    * <b>NOTE:</b> .&lt;ext&gt; is added to the result file name only if
    * <code>ext</code> is not empty.
+   * <p>
+   * <b>NOTE:</b> _&lt;name&gt; is added to the result file name only if
+   * <code>name</code> is not empty.
+   * <p>
+   * <b>NOTE:</b> all custom files should be named using this method, or
+   * otherwise some structures may fail to handle them properly (such as if they
+   * are added to compound files).
    */
-  public static final String segmentFileName(String segmentName, String ext) {
-    if (ext.length() > 0) {
+  public static final String segmentFileName(String segmentName, String name, String ext) {
+    if (ext.length() > 0 || name.length() > 0) {
       assert !ext.startsWith(".");
-      return new StringBuilder(segmentName.length() + 1 + ext.length()).append(
-          segmentName).append('.').append(ext).toString();
+      StringBuilder sb = new StringBuilder(segmentName.length() + 2 + name.length() + ext.length());
+      sb.append(segmentName);
+      if (name.length() > 0) {
+        sb.append('_').append(name);
+      }
+      if (ext.length() > 0) {
+        sb.append('.').append(ext);
+      }
+      return sb.toString();
     } else {
       return segmentName;
     }
@@ -205,5 +219,27 @@ public final class IndexFileNames {
     // or not, since there's only 1 '+' operator.
     return filename.endsWith("." + ext);
   }
+
+  /**
+   * Strips the segment name out of the given file name. If you used
+   * {@link #segmentFileName} or {@link #fileNameFromGeneration} to create your
+   * files, then this method simply removes whatever comes before the first '.',
+   * or the second '_' (excluding both).
+   * 
+   * @return the filename with the segment name removed, or the given filename
+   *         if it does not contain a '.' and '_'.
+   */
+  public static final String stripSegmentName(String filename) {
+    // If it is a .del file, there's an '_' after the first character
+    int idx = filename.indexOf('_', 1);
+    if (idx == -1) {
+      // If it's not, strip everything that's before the '.'
+      idx = filename.indexOf('.');
+    }
+    if (idx != -1) {
+      filename = filename.substring(idx);
+    }
+    return filename;
+  }
   
 }

Modified: lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/IndexWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/IndexWriter.java?rev=948861&r1=948860&r2=948861&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/IndexWriter.java (original)
+++ lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/IndexWriter.java Thu May 27 15:36:32 2010
@@ -271,9 +271,6 @@ public class IndexWriter implements Clos
   volatile SegmentInfos pendingCommit;            // set when a commit is pending (after prepareCommit() & before commit())
   volatile long pendingCommitChangeCount;
 
-  private SegmentInfos localRollbackSegmentInfos;      // segmentInfos we will fallback to if the commit fails
-  private int localFlushedDocCount;               // saved docWriter.getFlushedDocCount during local transaction
-
   private SegmentInfos segmentInfos = new SegmentInfos();       // the segments
 
   private DocumentsWriter docWriter;
@@ -305,12 +302,7 @@ public class IndexWriter implements Clos
   private int flushCount;
   private int flushDeletesCount;
 
-  // Used to only allow one addIndexes to proceed at once
-  // TODO: use ReadWriteLock once we are on 5.0
-  private int readCount;                          // count of how many threads are holding read lock
-  private Thread writeThread;                     // non-null if any thread holds write lock
   final ReaderPool readerPool = new ReaderPool();
-  private int upgradeCount;
   
   // This is a "write once" variable (like the organic dye
   // on a DVD-R that may or may not be heated by a laser and
@@ -694,52 +686,6 @@ public class IndexWriter implements Clos
     }
   }
   
-  synchronized void acquireWrite() {
-    assert writeThread != Thread.currentThread();
-    while(writeThread != null || readCount > 0)
-      doWait();
-
-    // We could have been closed while we were waiting:
-    ensureOpen();
-
-    writeThread = Thread.currentThread();
-  }
-
-  synchronized void releaseWrite() {
-    assert Thread.currentThread() == writeThread;
-    writeThread = null;
-    notifyAll();
-  }
-
-  synchronized void acquireRead() {
-    final Thread current = Thread.currentThread();
-    while(writeThread != null && writeThread != current)
-      doWait();
-
-    readCount++;
-  }
-
-  // Allows one readLock to upgrade to a writeLock even if
-  // there are other readLocks as long as all other
-  // readLocks are also blocked in this method:
-  synchronized void upgradeReadToWrite() {
-    assert readCount > 0;
-    upgradeCount++;
-    while(readCount > upgradeCount || writeThread != null) {
-      doWait();
-    }
-    
-    writeThread = Thread.currentThread();
-    readCount--;
-    upgradeCount--;
-  }
-
-  synchronized void releaseRead() {
-    readCount--;
-    assert readCount >= 0;
-    notifyAll();
-  }
-
   /**
    * Used internally to throw an {@link
    * AlreadyClosedException} if this IndexWriter has been
@@ -1201,7 +1147,6 @@ public class IndexWriter implements Clos
   
   private synchronized void setRollbackSegmentInfos(SegmentInfos infos) {
     rollbackSegmentInfos = (SegmentInfos) infos.clone();
-    assert !rollbackSegmentInfos.hasExternalSegments(directory);
     rollbackSegments = new HashMap<SegmentInfo,Integer>();
     final int size = rollbackSegmentInfos.size();
     for(int i=0;i<size;i++)
@@ -1863,13 +1808,13 @@ public class IndexWriter implements Clos
       // Now build compound doc store file
 
       if (infoStream != null) {
-        message("create compound file " + IndexFileNames.segmentFileName(docStoreSegment, IndexFileNames.COMPOUND_FILE_STORE_EXTENSION));
+        message("create compound file " + IndexFileNames.segmentFileName(docStoreSegment, "", IndexFileNames.COMPOUND_FILE_STORE_EXTENSION));
       }
 
       success = false;
 
       final int numSegments = segmentInfos.size();
-      final String compoundFileName = IndexFileNames.segmentFileName(docStoreSegment, IndexFileNames.COMPOUND_FILE_STORE_EXTENSION);
+      final String compoundFileName = IndexFileNames.segmentFileName(docStoreSegment, "", IndexFileNames.COMPOUND_FILE_STORE_EXTENSION);
 
       try {
         CompoundFileWriter cfsWriter = new CompoundFileWriter(directory, compoundFileName);
@@ -2397,10 +2342,7 @@ public class IndexWriter implements Clos
 
     synchronized(this) {
       resetMergeExceptions();
-      segmentsToOptimize = new HashSet<SegmentInfo>();
-      final int numSegments = segmentInfos.size();
-      for(int i=0;i<numSegments;i++)
-        segmentsToOptimize.add(segmentInfos.info(i));
+      segmentsToOptimize = new HashSet<SegmentInfo>(segmentInfos);
       
       // Now mark all pending & running merges as optimize
       // merge:
@@ -2646,169 +2588,6 @@ public class IndexWriter implements Clos
     }
   }
 
-  /** Like getNextMerge() except only returns a merge if it's
-   *  external. */
-  private synchronized MergePolicy.OneMerge getNextExternalMerge() {
-    if (pendingMerges.size() == 0)
-      return null;
-    else {
-      Iterator<MergePolicy.OneMerge> it = pendingMerges.iterator();
-      while(it.hasNext()) {
-        MergePolicy.OneMerge merge = it.next();
-        if (merge.isExternal) {
-          // Advance the merge from pending to running
-          it.remove();
-          runningMerges.add(merge);
-          return merge;
-        }
-      }
-
-      // All existing merges do not involve external segments
-      return null;
-    }
-  }
-
-  /*
-   * Begin a transaction.  During a transaction, any segment
-   * merges that happen (or ram segments flushed) will not
-   * write a new segments file and will not remove any files
-   * that were present at the start of the transaction.  You
-   * must make a matched (try/finally) call to
-   * commitTransaction() or rollbackTransaction() to finish
-   * the transaction.
-   *
-   * Note that buffered documents and delete terms are not handled
-   * within the transactions, so they must be flushed before the
-   * transaction is started.
-   */
-  private synchronized void startTransaction(boolean haveReadLock) throws IOException {
-
-    boolean success = false;
-    try {
-      if (infoStream != null)
-        message("now start transaction");
-
-      assert docWriter.getNumBufferedDeleteTerms() == 0 :
-      "calling startTransaction with buffered delete terms not supported: numBufferedDeleteTerms=" + docWriter.getNumBufferedDeleteTerms();
-      assert docWriter.getNumDocsInRAM() == 0 :
-      "calling startTransaction with buffered documents not supported: numDocsInRAM=" + docWriter.getNumDocsInRAM();
-
-      ensureOpen();
-
-      // If a transaction is trying to roll back (because
-      // addIndexes hit an exception) then wait here until
-      // that's done:
-      synchronized(this) {
-        while(stopMerges)
-          doWait();
-      }
-      success = true;
-    } finally {
-      // Release the write lock if our caller held it, on
-      // hitting an exception
-      if (!success && haveReadLock)
-        releaseRead();
-    }
-
-    if (haveReadLock) {
-      upgradeReadToWrite();
-    } else {
-      acquireWrite();
-    }
-
-    success = false;
-    try {
-      localRollbackSegmentInfos = (SegmentInfos) segmentInfos.clone();
-
-      assert !hasExternalSegments();
-
-      localFlushedDocCount = docWriter.getFlushedDocCount();
-
-      // We must "protect" our files at this point from
-      // deletion in case we need to rollback:
-      deleter.incRef(segmentInfos, false);
-
-      success = true;
-    } finally {
-      if (!success)
-        finishAddIndexes();
-    }
-  }
-
-  /*
-   * Rolls back the transaction and restores state to where
-   * we were at the start.
-   */
-  private synchronized void rollbackTransaction() throws IOException {
-
-    if (infoStream != null)
-      message("now rollback transaction");
-
-    if (docWriter != null) {
-      docWriter.setFlushedDocCount(localFlushedDocCount);
-    }
-
-    // Must finish merges before rolling back segmentInfos
-    // so merges don't hit exceptions on trying to commit
-    // themselves, don't get files deleted out from under
-    // them, etc:
-    finishMerges(false);
-
-    // Keep the same segmentInfos instance but replace all
-    // of its SegmentInfo instances.  This is so the next
-    // attempt to commit using this instance of IndexWriter
-    // will always write to a new generation ("write once").
-    segmentInfos.clear();
-    segmentInfos.addAll(localRollbackSegmentInfos);
-    localRollbackSegmentInfos = null;
-
-    // This must come after we rollback segmentInfos, so
-    // that if a commit() kicks off it does not see the
-    // segmentInfos with external segments
-    finishAddIndexes();
-
-    // Ask deleter to locate unreferenced files we had
-    // created & remove them:
-    deleter.checkpoint(segmentInfos, false);
-
-    // Remove the incRef we did in startTransaction:
-    deleter.decRef(segmentInfos);
-
-    // Also ask deleter to remove any newly created files
-    // that were never incref'd; this "garbage" is created
-    // when a merge kicks off but aborts part way through
-    // before it had a chance to incRef the files it had
-    // partially created
-    deleter.refresh();
-    
-    notifyAll();
-
-    assert !hasExternalSegments();
-  }
-
-  /*
-   * Commits the transaction.  This will write the new
-   * segments file and remove and pending deletions we have
-   * accumulated during the transaction
-   */
-  private synchronized void commitTransaction() throws IOException {
-
-    if (infoStream != null)
-      message("now commit transaction");
-
-    // Give deleter a chance to remove files now:
-    checkpoint();
-
-    // Remove the incRef we did in startTransaction.
-    deleter.decRef(localRollbackSegmentInfos);
-
-    localRollbackSegmentInfos = null;
-
-    assert !hasExternalSegments();
-
-    finishAddIndexes();
-  }
-
   /**
    * Close the <code>IndexWriter</code> without committing
    * any changes that have occurred since the last commit
@@ -2860,8 +2639,6 @@ public class IndexWriter implements Clos
         segmentInfos.clear();
         segmentInfos.addAll(rollbackSegmentInfos);
 
-        assert !hasExternalSegments();
-        
         docWriter.abort();
 
         assert testPoint("rollback before checkpoint");
@@ -2963,12 +2740,6 @@ public class IndexWriter implements Clos
         merge.abort();
       }
 
-      // Ensure any running addIndexes finishes.  It's fine
-      // if a new one attempts to start because its merges
-      // will quickly see the stopMerges == true and abort.
-      acquireRead();
-      releaseRead();
-
       // These merges periodically check whether they have
       // been aborted, and stop if so.  We wait here to make
       // sure they all stop.  It should not take very long
@@ -3005,10 +2776,6 @@ public class IndexWriter implements Clos
    *    will have completed once this method completes.</p>
    */
   public synchronized void waitForMerges() {
-    // Ensure any running addIndexes finishes.
-    acquireRead();
-    releaseRead();
-
     while(pendingMerges.size() > 0 || runningMerges.size() > 0) {
       doWait();
     }
@@ -3017,7 +2784,7 @@ public class IndexWriter implements Clos
     assert 0 == mergingSegments.size();
   }
 
-  /*
+  /**
    * Called whenever the SegmentInfos has been updated and
    * the index files referenced exist (correctly) in the
    * index directory.
@@ -3027,31 +2794,6 @@ public class IndexWriter implements Clos
     deleter.checkpoint(segmentInfos, false);
   }
 
-  private void finishAddIndexes() {
-    releaseWrite();
-  }
-
-  private void blockAddIndexes() {
-
-    acquireRead();
-
-    boolean success = false;
-    try {
-
-      // Make sure we are still open since we could have
-      // waited quite a while for last addIndexes to finish
-      ensureOpen(false);
-      success = true;
-    } finally {
-      if (!success)
-        releaseRead();
-    }
-  }
-
-  private void resumeAddIndexes() {
-    releaseRead();
-  }
-
   private synchronized void resetMergeExceptions() {
     mergeExceptions = new ArrayList<MergePolicy.OneMerge>();
     mergeGen++;
@@ -3069,208 +2811,127 @@ public class IndexWriter implements Clos
   }
 
   /**
-   * Merges all segments from an array of indexes into this
-   * index.
+   * Adds all segments from an array of indexes into this index.
    *
-   * <p>This may be used to parallelize batch indexing.  A large document
-   * collection can be broken into sub-collections.  Each sub-collection can be
-   * indexed in parallel, on a different thread, process or machine.  The
+   * <p>This may be used to parallelize batch indexing. A large document
+   * collection can be broken into sub-collections. Each sub-collection can be
+   * indexed in parallel, on a different thread, process or machine. The
    * complete index can then be created by merging sub-collection indexes
    * with this method.
    *
-   * <p><b>NOTE:</b> the index in each Directory must not be
+   * <p>
+   * <b>NOTE:</b> the index in each {@link Directory} must not be
    * changed (opened by a writer) while this method is
    * running.  This method does not acquire a write lock in
    * each input Directory, so it is up to the caller to
    * enforce this.
    *
-   * <p><b>NOTE:</b> while this is running, any attempts to
-   * add or delete documents (with another thread) will be
-   * paused until this method completes.
-   *
    * <p>This method is transactional in how Exceptions are
    * handled: it does not commit a new segments_N file until
    * all indexes are added.  This means if an Exception
    * occurs (for example disk full), then either no indexes
-   * will have been added or they all will have been.</p>
+   * will have been added or they all will have been.
    *
    * <p>Note that this requires temporary free space in the
-   * Directory up to 2X the sum of all input indexes
-   * (including the starting index).  If readers/searchers
+   * {@link Directory} up to 2X the sum of all input indexes
+   * (including the starting index). If readers/searchers
    * are open against the starting index, then temporary
    * free space required will be higher by the size of the
    * starting index (see {@link #optimize()} for details).
-   * </p>
    *
-   * <p>Once this completes, the final size of the index
-   * will be less than the sum of all input index sizes
-   * (including the starting index).  It could be quite a
-   * bit smaller (if there were many pending deletes) or
-   * just slightly smaller.</p>
-   * 
    * <p>
-   * This requires this index not be among those to be added.
+   * <b>NOTE:</b> this method only copies the segments of the incomning indexes
+   * and does not merge them. Therefore deleted documents are not removed and
+   * the new segments are not merged with the existing ones. Also, the segments 
+   * are copied as-is, meaning they are not converted to CFS if they aren't, 
+   * and vice-versa. If you wish to do that, you can call {@link #maybeMerge} 
+   * or {@link #optimize} afterwards.
+   * 
+   * <p>This requires this index not be among those to be added.
    *
-   * <p><b>NOTE</b>: if this method hits an OutOfMemoryError
-   * you should immediately close the writer.  See <a
-   * href="#OOME">above</a> for details.</p>
+   * <p>
+   * <b>NOTE</b>: if this method hits an OutOfMemoryError
+   * you should immediately close the writer. See <a
+   * href="#OOME">above</a> for details.
    *
    * @throws CorruptIndexException if the index is corrupt
    * @throws IOException if there is a low-level IO error
    */
-  public void addIndexesNoOptimize(Directory... dirs)
-      throws CorruptIndexException, IOException {
-
+  public void addIndexes(Directory... dirs) throws CorruptIndexException, IOException {
     ensureOpen();
 
     noDupDirs(dirs);
 
-    // Do not allow add docs or deletes while we are running:
-    docWriter.pauseAllThreads();
-
     try {
       if (infoStream != null)
-        message("flush at addIndexesNoOptimize");
+        message("flush at addIndexes(Directory...)");
       flush(true, false, true);
 
-      boolean success = false;
-
-      startTransaction(false);
+      int docCount = 0;
+      List<SegmentInfo> infos = new ArrayList<SegmentInfo>();
+      for (Directory dir : dirs) {
+        if (infoStream != null) {
+          message("process directory " + dir);
+        }
+        SegmentInfos sis = new SegmentInfos(); // read infos from dir
+        sis.read(dir);
+        Map<String, String> dsNames = new HashMap<String, String>();
+        for (SegmentInfo info : sis) {
+          assert !infos.contains(info): "dup info dir=" + info.dir + " name=" + info.name;
 
-      try {
+          if (infoStream != null) {
+            message("process segment=" + info.name);
+          }
+          docCount += info.docCount;
+          String newSegName = newSegmentName();
+          String dsName = info.getDocStoreSegment();
+
+          // Determine if the doc store of this segment needs to be copied. It's
+          // only relevant for segments who share doc store with others, because
+          // the DS might have been copied already, in which case we just want
+          // to update the DS name of this SegmentInfo.
+          // NOTE: pre-3x segments include a null DSName if they don't share doc
+          // store. So the following code ensures we don't accidentally insert
+          // 'null' to the map.
+          String newDsName = newSegName;
+          boolean docStoreCopied = false;
+          if (dsNames.containsKey(dsName)) {
+            newDsName = dsNames.get(dsName);
+            docStoreCopied = true;
+          } else if (dsName != null) {
+            dsNames.put(dsName, newSegName);
+            docStoreCopied = false;
+          }
 
-        int docCount = 0;
-        synchronized(this) {
-          ensureOpen();
+          // Copy the segment files
+          for (String file : info.files()) {
+            if (docStoreCopied && IndexFileNames.isDocStoreFile(file)) {
+              continue;
+            } 
+            dir.copy(directory, file, newSegName + IndexFileNames.stripSegmentName(file));
+          }
 
-          for (int i = 0; i < dirs.length; i++) {
-            if (directory == dirs[i]) {
-              // cannot add this index: segments may be deleted in merge before added
-              throw new IllegalArgumentException("Cannot add this index to itself");
-            }
+          // Update SI appropriately
+          info.setDocStore(info.getDocStoreOffset(), newDsName, info.getDocStoreIsCompoundFile());
+          info.dir = directory;
+          info.name = newSegName;
 
-            SegmentInfos sis = new SegmentInfos(); // read infos from dir
-            sis.read(dirs[i], codecs);
-            for (int j = 0; j < sis.size(); j++) {
-              SegmentInfo info = sis.info(j);
-              assert !segmentInfos.contains(info): "dup info dir=" + info.dir + " name=" + info.name;
-              docCount += info.docCount;
-              segmentInfos.add(info); // add each info
-            }
-          }
+          infos.add(info);
         }
+      }      
 
+      synchronized (this) {
+        ensureOpen();
+        segmentInfos.addAll(infos);
         // Notify DocumentsWriter that the flushed count just increased
         docWriter.updateFlushedDocCount(docCount);
 
-        maybeMerge();
-
-        ensureOpen();
-
-        // If after merging there remain segments in the index
-        // that are in a different directory, just copy these
-        // over into our index.  This is necessary (before
-        // finishing the transaction) to avoid leaving the
-        // index in an unusable (inconsistent) state.
-        resolveExternalSegments();
-
-        ensureOpen();
-
-        success = true;
-
-      } finally {
-        if (success) {
-          commitTransaction();
-        } else {
-          rollbackTransaction();
-        }
-      }
-    } catch (OutOfMemoryError oom) {
-      handleOOM(oom, "addIndexesNoOptimize");
-    } finally {
-      if (docWriter != null) {
-        docWriter.resumeAllThreads();
-      }
-    }
-  }
-
-  private boolean hasExternalSegments() {
-    return segmentInfos.hasExternalSegments(directory);
-  }
-
-  /* If any of our segments are using a directory != ours
-   * then we have to either copy them over one by one, merge
-   * them (if merge policy has chosen to) or wait until
-   * currently running merges (in the background) complete.
-   * We don't return until the SegmentInfos has no more
-   * external segments.  Currently this is only used by
-   * addIndexesNoOptimize(). */
-  private void resolveExternalSegments() throws CorruptIndexException, IOException {
-
-    boolean any = false;
-
-    boolean done = false;
-
-    while(!done) {
-      SegmentInfo info = null;
-      MergePolicy.OneMerge merge = null;
-      synchronized(this) {
-
-        if (stopMerges)
-          throw new MergePolicy.MergeAbortedException("rollback() was called or addIndexes* hit an unhandled exception");
-
-        final int numSegments = segmentInfos.size();
-
-        done = true;
-        for(int i=0;i<numSegments;i++) {
-          info = segmentInfos.info(i);
-          if (info.dir != directory) {
-            done = false;
-            final MergePolicy.OneMerge newMerge = new MergePolicy.OneMerge(segmentInfos.range(i, 1+i), mergePolicy instanceof LogMergePolicy && getUseCompoundFile());
-
-            // Returns true if no running merge conflicts
-            // with this one (and, records this merge as
-            // pending), ie, this segment is not currently
-            // being merged:
-            if (registerMerge(newMerge)) {
-              merge = newMerge;
-
-              // If this segment is not currently being
-              // merged, then advance it to running & run
-              // the merge ourself (below):
-              pendingMerges.remove(merge);
-              runningMerges.add(merge);
-              break;
-            }
-          }
-        }
-
-        if (!done && merge == null)
-          // We are not yet done (external segments still
-          // exist in segmentInfos), yet, all such segments
-          // are currently "covered" by a pending or running
-          // merge.  We now try to grab any pending merge
-          // that involves external segments:
-          merge = getNextExternalMerge();
-
-        if (!done && merge == null)
-          // We are not yet done, and, all external segments
-          // fall under merges that the merge scheduler is
-          // currently running.  So, we now wait and check
-          // back to see if the merge has completed.
-          doWait();
+        checkpoint();
       }
 
-      if (merge != null) {
-        any = true;
-        merge(merge);
-      }
+    } catch (OutOfMemoryError oom) {
+      handleOOM(oom, "addIndexes(Directory...)");
     }
-
-    if (any)
-      // Sometimes, on copying an external segment over,
-      // more merges may become necessary:
-      mergeScheduler.merge(this);
   }
 
   /** Merges the provided indexes into this index.
@@ -3281,10 +2942,9 @@ public class IndexWriter implements Clos
    * add or delete documents (with another thread) will be
    * paused until this method completes.
    *
-   * <p>See {@link #addIndexesNoOptimize} for
-   * details on transactional semantics, temporary free
-   * space required in the Directory, and non-CFS segments
-   * on an Exception.</p>
+   * <p>See {@link #addIndexes} for details on transactional 
+   * semantics, temporary free space required in the Directory, 
+   * and non-CFS segments on an Exception.</p>
    *
    * <p><b>NOTE</b>: if this method hits an OutOfMemoryError
    * you should immediately close the writer.  See <a
@@ -3295,95 +2955,31 @@ public class IndexWriter implements Clos
    */
   public void addIndexes(IndexReader... readers)
     throws CorruptIndexException, IOException {
-
     ensureOpen();
 
-    // Do not allow add docs or deletes while we are running:
-    docWriter.pauseAllThreads();
-
-    // We must pre-acquire a read lock here (and upgrade to
-    // write lock in startTransaction below) so that no
-    // other addIndexes is allowed to start up after we have
-    // flushed & optimized but before we then start our
-    // transaction.  This is because the merging below
-    // requires that only one segment is present in the
-    // index:
-    acquireRead();
-
     try {
-
+      String mergedName = newSegmentName();
+      SegmentMerger merger = new SegmentMerger(directory, termIndexInterval,
+          mergedName, null, codecs, payloadProcessorProvider);
+      
+      for (IndexReader reader : readers)      // add new indexes
+        merger.add(reader);
+      
+      int docCount = merger.merge();                // merge 'em
+      
       SegmentInfo info = null;
-      String mergedName = null;
-      SegmentMerger merger = null;
-
-      boolean success = false;
-
-      try {
-        flush(true, false, true);
-        optimize();					  // start with zero or 1 seg
-        success = true;
-      } finally {
-        // Take care to release the read lock if we hit an
-        // exception before starting the transaction
-        if (!success)
-          releaseRead();
-      }
-
-      // true means we already have a read lock; if this
-      // call hits an exception it will release the write
-      // lock:
-      startTransaction(true);
-
-      try {
-        mergedName = newSegmentName();
-        merger = new SegmentMerger(directory, termIndexInterval, mergedName, null, codecs, payloadProcessorProvider);
-
-        SegmentReader sReader = null;
-        synchronized(this) {
-          if (segmentInfos.size() == 1) { // add existing index, if any
-            sReader = readerPool.get(segmentInfos.info(0), true, BufferedIndexInput.BUFFER_SIZE, -1);
-          }
-        }
+      synchronized(this) {
+        info = new SegmentInfo(mergedName, docCount, directory, false, true,
+            -1, null, false, merger.hasProx(), merger.getCodec());
+        setDiagnostics(info, "addIndexes(IndexReader...)");
+        segmentInfos.add(info);
+        checkpoint();
         
-        success = false;
-
-        try {
-          if (sReader != null)
-            merger.add(sReader);
-
-          for (int i = 0; i < readers.length; i++)      // add new indexes
-            merger.add(readers[i]);
-
-          int docCount = merger.merge();                // merge 'em
-
-          synchronized(this) {
-            segmentInfos.clear();                      // pop old infos & add new
-            info = new SegmentInfo(mergedName, docCount, directory, false, true,
-                                   -1, null, false, merger.hasProx(), merger.getCodec());
-            setDiagnostics(info, "addIndexes(IndexReader...)");
-            segmentInfos.add(info);
-          }
-
-          // Notify DocumentsWriter that the flushed count just increased
-          docWriter.updateFlushedDocCount(docCount);
-
-          success = true;
-
-        } finally {
-          if (sReader != null) {
-            readerPool.release(sReader);
-          }
-        }
-      } finally {
-        if (!success) {
-          if (infoStream != null)
-            message("hit exception in addIndexes during merge");
-          rollbackTransaction();
-        } else {
-          commitTransaction();
-        }
+        // Notify DocumentsWriter that the flushed count just increased
+        docWriter.updateFlushedDocCount(docCount);
       }
-    
+      
+      // Now create the compound file if needed
       if (mergePolicy instanceof LogMergePolicy && getUseCompoundFile()) {
 
         List<String> files = null;
@@ -3391,7 +2987,7 @@ public class IndexWriter implements Clos
         synchronized(this) {
           // Must incRef our files so that if another thread
           // is running merge/optimize, it doesn't delete our
-          // segment's files before we have a change to
+          // segment's files before we have a chance to
           // finish making the compound file.
           if (segmentInfos.contains(info)) {
             files = info.files();
@@ -3400,40 +2996,18 @@ public class IndexWriter implements Clos
         }
 
         if (files != null) {
-
-          success = false;
-
-          startTransaction(false);
-
           try {
             merger.createCompoundFile(mergedName + ".cfs", info);
             synchronized(this) {
               info.setUseCompoundFile(true);
             }
-          
-            success = true;
-          
           } finally {
-
             deleter.decRef(files);
-
-            if (!success) {
-              if (infoStream != null)
-                message("hit exception building compound file in addIndexes during merge");
-
-              rollbackTransaction();
-            } else {
-              commitTransaction();
-            }
           }
         }
       }
     } catch (OutOfMemoryError oom) {
       handleOOM(oom, "addIndexes(IndexReader...)");
-    } finally {
-      if (docWriter != null) {
-        docWriter.resumeAllThreads();
-      }
     }
   }
 
@@ -3787,7 +3361,7 @@ public class IndexWriter implements Clos
           if (!success) {
             if (infoStream != null)
               message("hit exception creating compound file for newly flushed segment " + segment);
-            deleter.deleteFile(IndexFileNames.segmentFileName(segment, IndexFileNames.COMPOUND_FILE_EXTENSION));
+            deleter.deleteFile(IndexFileNames.segmentFileName(segment, "", IndexFileNames.COMPOUND_FILE_EXTENSION));
           }
         }
 
@@ -4526,7 +4100,7 @@ public class IndexWriter implements Clos
     if (merge.useCompoundFile) {
 
       success = false;
-      final String compoundFileName = IndexFileNames.segmentFileName(mergedName, IndexFileNames.COMPOUND_FILE_EXTENSION);
+      final String compoundFileName = IndexFileNames.segmentFileName(mergedName, "", IndexFileNames.COMPOUND_FILE_EXTENSION);
 
       try {
         merger.createCompoundFile(compoundFileName, merge.info);
@@ -4697,51 +4271,36 @@ public class IndexWriter implements Clos
 
       synchronized(this) {
 
-        // Wait for any running addIndexes to complete
-        // first, then block any from running until we've
-        // copied the segmentInfos we intend to sync:
-        blockAddIndexes();
-
-        // On commit the segmentInfos must never
-        // reference a segment in another directory:
-        assert !hasExternalSegments();
-
-        try {
-
-          assert lastCommitChangeCount <= changeCount;
-
-          if (changeCount == lastCommitChangeCount) {
-            if (infoStream != null)
-              message("  skip startCommit(): no changes pending");
-            return;
-          }
-
-          // First, we clone & incref the segmentInfos we intend
-          // to sync, then, without locking, we sync() each file
-          // referenced by toSync, in the background.  Multiple
-          // threads can be doing this at once, if say a large
-          // merge and a small merge finish at the same time:
-
+        assert lastCommitChangeCount <= changeCount;
+        
+        if (changeCount == lastCommitChangeCount) {
           if (infoStream != null)
-            message("startCommit index=" + segString(segmentInfos) + " changeCount=" + changeCount);
-          
-          readerPool.commit();
-          
-          toSync = (SegmentInfos) segmentInfos.clone();
-
-          if (commitUserData != null)
-            toSync.setUserData(commitUserData);
-
-          deleter.incRef(toSync, false);
-          myChangeCount = changeCount;
-
-          Collection<String> files = toSync.files(directory, false);
-          for(final String fileName: files) {
-            assert directory.fileExists(fileName): "file " + fileName + " does not exist";
-          }
-
-        } finally {
-          resumeAddIndexes();
+            message("  skip startCommit(): no changes pending");
+          return;
+        }
+        
+        // First, we clone & incref the segmentInfos we intend
+        // to sync, then, without locking, we sync() each file
+        // referenced by toSync, in the background.  Multiple
+        // threads can be doing this at once, if say a large
+        // merge and a small merge finish at the same time:
+        
+        if (infoStream != null)
+          message("startCommit index=" + segString(segmentInfos) + " changeCount=" + changeCount);
+        
+        readerPool.commit();
+        
+        toSync = (SegmentInfos) segmentInfos.clone();
+        
+        if (commitUserData != null)
+          toSync.setUserData(commitUserData);
+        
+        deleter.incRef(toSync, false);
+        myChangeCount = changeCount;
+        
+        Collection<String> files = toSync.files(directory, false);
+        for(final String fileName: files) {
+          assert directory.fileExists(fileName): "file " + fileName + " does not exist";
         }
       }
 
@@ -5013,10 +4572,10 @@ public class IndexWriter implements Clos
    * Sets the {@link PayloadProcessorProvider} to use when merging payloads.
    * Note that the given <code>pcp</code> will be invoked for every segment that
    * is merged, not only external ones that are given through
-   * {@link IndexWriter#addIndexes} or {@link IndexWriter#addIndexesNoOptimize}.
-   * If you want only the payloads of the external segments to be processed, you
-   * can return <code>null</code> whenever a {@link DirPayloadProcessor} is
-   * requested for the {@link Directory} of the {@link IndexWriter}.
+   * {@link #addIndexes}. If you want only the payloads of the external segments
+   * to be processed, you can return <code>null</code> whenever a
+   * {@link DirPayloadProcessor} is requested for the {@link Directory} of the
+   * {@link IndexWriter}.
    * <p>
    * The default is <code>null</code> which means payloads are processed
    * normally (copied) during segment merges. You can also unset it by passing

Modified: lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/NormsWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/NormsWriter.java?rev=948861&r1=948860&r2=948861&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/NormsWriter.java (original)
+++ lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/NormsWriter.java Thu May 27 15:36:32 2010
@@ -88,7 +88,7 @@ final class NormsWriter extends Inverted
       }
     }
 
-    final String normsFileName = IndexFileNames.segmentFileName(state.segmentName, IndexFileNames.NORMS_EXTENSION);
+    final String normsFileName = IndexFileNames.segmentFileName(state.segmentName, "", IndexFileNames.NORMS_EXTENSION);
     state.flushedFiles.add(normsFileName);
     IndexOutput normsOut = state.directory.createOutput(normsFileName);
 

Modified: lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentInfo.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentInfo.java?rev=948861&r1=948860&r2=948861&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentInfo.java (original)
+++ lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentInfo.java Thu May 27 15:36:32 2010
@@ -490,7 +490,7 @@ public final class SegmentInfo {
     } else if (isCompoundFile == YES) {
       return true;
     } else {
-      return dir.fileExists(IndexFileNames.segmentFileName(name, IndexFileNames.COMPOUND_FILE_EXTENSION));
+      return dir.fileExists(IndexFileNames.segmentFileName(name, "", IndexFileNames.COMPOUND_FILE_EXTENSION));
     }
   }
 
@@ -537,6 +537,7 @@ public final class SegmentInfo {
     docStoreOffset = offset;
     docStoreSegment = segment;
     docStoreIsCompoundFile = isCompoundFile;
+    clearFiles();
   }
   
   /**
@@ -615,10 +616,10 @@ public final class SegmentInfo {
     boolean useCompoundFile = getUseCompoundFile();
 
     if (useCompoundFile) {
-      fileSet.add(IndexFileNames.segmentFileName(name, IndexFileNames.COMPOUND_FILE_EXTENSION));
+      fileSet.add(IndexFileNames.segmentFileName(name, "", IndexFileNames.COMPOUND_FILE_EXTENSION));
     } else {
       for(String ext : IndexFileNames.NON_STORE_INDEX_EXTENSIONS) {
-        addIfExists(fileSet, IndexFileNames.segmentFileName(name, ext));
+        addIfExists(fileSet, IndexFileNames.segmentFileName(name, "", ext));
       }
       codec.files(dir, this, fileSet);
     }
@@ -628,14 +629,14 @@ public final class SegmentInfo {
       // vectors) with other segments
       assert docStoreSegment != null;
       if (docStoreIsCompoundFile) {
-        fileSet.add(IndexFileNames.segmentFileName(docStoreSegment, IndexFileNames.COMPOUND_FILE_STORE_EXTENSION));
+        fileSet.add(IndexFileNames.segmentFileName(docStoreSegment, "", IndexFileNames.COMPOUND_FILE_STORE_EXTENSION));
       } else {
         for (String ext : IndexFileNames.STORE_INDEX_EXTENSIONS)
-          addIfExists(fileSet, IndexFileNames.segmentFileName(docStoreSegment, ext));
+          addIfExists(fileSet, IndexFileNames.segmentFileName(docStoreSegment, "", ext));
       }
     } else if (!useCompoundFile) {
       for (String ext : IndexFileNames.STORE_INDEX_EXTENSIONS)
-        addIfExists(fileSet, IndexFileNames.segmentFileName(name, ext));
+        addIfExists(fileSet, IndexFileNames.segmentFileName(name, "", ext));
     }
 
     String delFileName = IndexFileNames.fileNameFromGeneration(name, IndexFileNames.DELETES_EXTENSION, delGen);
@@ -654,7 +655,7 @@ public final class SegmentInfo {
           // No separate norms but maybe plain norms
           // in the non compound file case:
           if (!hasSingleNormFile && !useCompoundFile) {
-            String fileName = IndexFileNames.segmentFileName(name, IndexFileNames.PLAIN_NORMS_EXTENSION + i);
+            String fileName = IndexFileNames.segmentFileName(name, "", IndexFileNames.PLAIN_NORMS_EXTENSION + i);
             if (dir.fileExists(fileName)) {
               fileSet.add(fileName);
             }
@@ -663,9 +664,9 @@ public final class SegmentInfo {
           // Pre-2.1: we have to check file existence
           String fileName = null;
           if (useCompoundFile) {
-            fileName = IndexFileNames.segmentFileName(name, IndexFileNames.SEPARATE_NORMS_EXTENSION + i);
+            fileName = IndexFileNames.segmentFileName(name, "", IndexFileNames.SEPARATE_NORMS_EXTENSION + i);
           } else if (!hasSingleNormFile) {
-            fileName = IndexFileNames.segmentFileName(name, IndexFileNames.PLAIN_NORMS_EXTENSION + i);
+            fileName = IndexFileNames.segmentFileName(name, "", IndexFileNames.PLAIN_NORMS_EXTENSION + i);
           }
           if (fileName != null && dir.fileExists(fileName)) {
             fileSet.add(fileName);
@@ -677,9 +678,9 @@ public final class SegmentInfo {
       // matching _X.sN/_X.fN files for our segment:
       String prefix;
       if (useCompoundFile) {
-        prefix = IndexFileNames.segmentFileName(name, IndexFileNames.SEPARATE_NORMS_EXTENSION);
+        prefix = IndexFileNames.segmentFileName(name, "", IndexFileNames.SEPARATE_NORMS_EXTENSION);
       } else {
-        prefix = IndexFileNames.segmentFileName(name, IndexFileNames.PLAIN_NORMS_EXTENSION);
+        prefix = IndexFileNames.segmentFileName(name, "", IndexFileNames.PLAIN_NORMS_EXTENSION);
       }
       final String pattern = prefix + "\\d+";
 

Modified: lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentInfos.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentInfos.java?rev=948861&r1=948860&r2=948861&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentInfos.java (original)
+++ lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentInfos.java Thu May 27 15:36:32 2010
@@ -935,15 +935,6 @@ public final class SegmentInfos extends 
     lastGeneration = other.lastGeneration;
   }
 
-  // Used only for testing
-  public boolean hasExternalSegments(Directory dir) {
-    final int numSegments = size();
-    for(int i=0;i<numSegments;i++)
-      if (info(i).dir != dir)
-        return true;
-    return false;
-  }
-
   /** Returns sum of all segment's docCounts.  Note that
    *  this does not include deletions */
   public int totalDocCount() {

Modified: lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentMerger.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentMerger.java?rev=948861&r1=948860&r2=948861&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentMerger.java (original)
+++ lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentMerger.java Thu May 27 15:36:32 2010
@@ -181,7 +181,7 @@ final class SegmentMerger {
     for (String ext : IndexFileNames.COMPOUND_EXTENSIONS_NOT_CODEC) {
       if (mergeDocStores || (!ext.equals(IndexFileNames.FIELDS_EXTENSION) &&
                              !ext.equals(IndexFileNames.FIELDS_INDEX_EXTENSION)))
-        fileSet.add(IndexFileNames.segmentFileName(segment, ext));
+        fileSet.add(IndexFileNames.segmentFileName(segment, "", ext));
     }
 
     codec.files(directory, info, fileSet);
@@ -191,7 +191,7 @@ final class SegmentMerger {
     for (int i = 0; i < numFIs; i++) {
       FieldInfo fi = fieldInfos.fieldInfo(i);
       if (fi.isIndexed && !fi.omitNorms) {
-        fileSet.add(IndexFileNames.segmentFileName(segment, IndexFileNames.NORMS_EXTENSION));
+        fileSet.add(IndexFileNames.segmentFileName(segment, "", IndexFileNames.NORMS_EXTENSION));
         break;
       }
     }
@@ -199,7 +199,7 @@ final class SegmentMerger {
     // Vector files
     if (fieldInfos.hasVectors() && mergeDocStores) {
       for (String ext : IndexFileNames.VECTOR_EXTENSIONS) {
-        fileSet.add(IndexFileNames.segmentFileName(segment, ext));
+        fileSet.add(IndexFileNames.segmentFileName(segment, "", ext));
       }
     }
 
@@ -337,7 +337,7 @@ final class SegmentMerger {
         fieldsWriter.close();
       }
 
-      final String fileName = IndexFileNames.segmentFileName(segment, IndexFileNames.FIELDS_INDEX_EXTENSION);
+      final String fileName = IndexFileNames.segmentFileName(segment, "", IndexFileNames.FIELDS_INDEX_EXTENSION);
       final long fdxFileLength = directory.fileLength(fileName);
 
       if (4+((long) docCount)*8 != fdxFileLength)
@@ -468,7 +468,7 @@ final class SegmentMerger {
       termVectorsWriter.close();
     }
 
-    final String fileName = IndexFileNames.segmentFileName(segment, IndexFileNames.VECTORS_INDEX_EXTENSION);
+    final String fileName = IndexFileNames.segmentFileName(segment, "", IndexFileNames.VECTORS_INDEX_EXTENSION);
     final long tvxSize = directory.fileLength(fileName);
 
     if (4+((long) mergedDocs)*16 != tvxSize)
@@ -681,7 +681,7 @@ final class SegmentMerger {
         FieldInfo fi = fieldInfos.fieldInfo(i);
         if (fi.isIndexed && !fi.omitNorms) {
           if (output == null) { 
-            output = directory.createOutput(IndexFileNames.segmentFileName(segment, IndexFileNames.NORMS_EXTENSION));
+            output = directory.createOutput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.NORMS_EXTENSION));
             output.writeBytes(NORMS_HEADER,NORMS_HEADER.length);
           }
           for ( IndexReader reader : readers) {

Modified: lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentReader.java?rev=948861&r1=948860&r2=948861&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentReader.java (original)
+++ lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentReader.java Thu May 27 15:36:32 2010
@@ -127,12 +127,12 @@ public class SegmentReader extends Index
       try {
         Directory dir0 = dir;
         if (si.getUseCompoundFile()) {
-          cfsReader = new CompoundFileReader(dir, IndexFileNames.segmentFileName(segment, IndexFileNames.COMPOUND_FILE_EXTENSION), readBufferSize);
+          cfsReader = new CompoundFileReader(dir, IndexFileNames.segmentFileName(segment, "", IndexFileNames.COMPOUND_FILE_EXTENSION), readBufferSize);
           dir0 = cfsReader;
         }
         cfsDir = dir0;
 
-        fieldInfos = new FieldInfos(cfsDir, IndexFileNames.segmentFileName(segment, IndexFileNames.FIELD_INFOS_EXTENSION));
+        fieldInfos = new FieldInfos(cfsDir, IndexFileNames.segmentFileName(segment, "", IndexFileNames.FIELD_INFOS_EXTENSION));
 
         this.termsIndexDivisor = termsIndexDivisor;
 
@@ -216,7 +216,7 @@ public class SegmentReader extends Index
           if (si.getDocStoreIsCompoundFile()) {
             assert storeCFSReader == null;
             storeCFSReader = new CompoundFileReader(dir,
-                IndexFileNames.segmentFileName(si.getDocStoreSegment(), IndexFileNames.COMPOUND_FILE_STORE_EXTENSION),
+                IndexFileNames.segmentFileName(si.getDocStoreSegment(), "", IndexFileNames.COMPOUND_FILE_STORE_EXTENSION),
                                                     readBufferSize);
             storeDir = storeCFSReader;
             assert storeDir != null;
@@ -229,7 +229,7 @@ public class SegmentReader extends Index
           // was not used, but then we are asked to open doc
           // stores after the segment has switched to CFS
           if (cfsReader == null) {
-            cfsReader = new CompoundFileReader(dir, IndexFileNames.segmentFileName(segment, IndexFileNames.COMPOUND_FILE_EXTENSION), readBufferSize);
+            cfsReader = new CompoundFileReader(dir, IndexFileNames.segmentFileName(segment, "", IndexFileNames.COMPOUND_FILE_EXTENSION), readBufferSize);
           }
           storeDir = cfsReader;
           assert storeDir != null;

Modified: lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/StoredFieldsWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/StoredFieldsWriter.java?rev=948861&r1=948860&r2=948861&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/StoredFieldsWriter.java (original)
+++ lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/StoredFieldsWriter.java Thu May 27 15:36:32 2010
@@ -67,8 +67,8 @@ final class StoredFieldsWriter {
         fieldsWriter = new FieldsWriter(docWriter.directory,
                                         docStoreSegment,
                                         fieldInfos);
-        docWriter.addOpenFile(IndexFileNames.segmentFileName(docStoreSegment, IndexFileNames.FIELDS_EXTENSION));
-        docWriter.addOpenFile(IndexFileNames.segmentFileName(docStoreSegment, IndexFileNames.FIELDS_INDEX_EXTENSION));
+        docWriter.addOpenFile(IndexFileNames.segmentFileName(docStoreSegment, "", IndexFileNames.FIELDS_EXTENSION));
+        docWriter.addOpenFile(IndexFileNames.segmentFileName(docStoreSegment, "", IndexFileNames.FIELDS_INDEX_EXTENSION));
         lastDocID = 0;
       }
     }
@@ -88,8 +88,8 @@ final class StoredFieldsWriter {
       assert state.docStoreSegmentName != null;
       assert docStoreSegment.equals(state.docStoreSegmentName): "fieldsWriter wrote to segment=" + docStoreSegment + " vs SegmentWriteState segment=" + state.docStoreSegmentName;
       lastDocID = 0;
-      String fieldsName = IndexFileNames.segmentFileName(state.docStoreSegmentName, IndexFileNames.FIELDS_EXTENSION);
-      String fieldsIdxName = IndexFileNames.segmentFileName(state.docStoreSegmentName, IndexFileNames.FIELDS_INDEX_EXTENSION);
+      String fieldsName = IndexFileNames.segmentFileName(state.docStoreSegmentName, "", IndexFileNames.FIELDS_EXTENSION);
+      String fieldsIdxName = IndexFileNames.segmentFileName(state.docStoreSegmentName, "", IndexFileNames.FIELDS_INDEX_EXTENSION);
       state.flushedFiles.add(fieldsName);
       state.flushedFiles.add(fieldsIdxName);
 

Modified: lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/TermVectorsReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/TermVectorsReader.java?rev=948861&r1=948860&r2=948861&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/TermVectorsReader.java (original)
+++ lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/TermVectorsReader.java Thu May 27 15:36:32 2010
@@ -75,13 +75,13 @@ class TermVectorsReader implements Clone
     boolean success = false;
 
     try {
-      String idxName = IndexFileNames.segmentFileName(segment, IndexFileNames.VECTORS_INDEX_EXTENSION);
+      String idxName = IndexFileNames.segmentFileName(segment, "", IndexFileNames.VECTORS_INDEX_EXTENSION);
       if (d.fileExists(idxName)) {
         tvx = d.openInput(idxName, readBufferSize);
         format = checkValidFormat(tvx);
-        tvd = d.openInput(IndexFileNames.segmentFileName(segment, IndexFileNames.VECTORS_DOCUMENTS_EXTENSION), readBufferSize);
+        tvd = d.openInput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.VECTORS_DOCUMENTS_EXTENSION), readBufferSize);
         final int tvdFormat = checkValidFormat(tvd);
-        tvf = d.openInput(IndexFileNames.segmentFileName(segment, IndexFileNames.VECTORS_FIELDS_EXTENSION), readBufferSize);
+        tvf = d.openInput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.VECTORS_FIELDS_EXTENSION), readBufferSize);
         final int tvfFormat = checkValidFormat(tvf);
 
         assert format == tvdFormat;

Modified: lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/TermVectorsTermsWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/TermVectorsTermsWriter.java?rev=948861&r1=948860&r2=948861&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/TermVectorsTermsWriter.java (original)
+++ lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/TermVectorsTermsWriter.java Thu May 27 15:36:32 2010
@@ -85,12 +85,12 @@ final class TermVectorsTermsWriter exten
       tvd.close();
       tvx = null;
       assert state.docStoreSegmentName != null;
-      String idxName = IndexFileNames.segmentFileName(state.docStoreSegmentName, IndexFileNames.VECTORS_INDEX_EXTENSION);
+      String idxName = IndexFileNames.segmentFileName(state.docStoreSegmentName, "", IndexFileNames.VECTORS_INDEX_EXTENSION);
       if (4+((long) state.numDocsInStore)*16 != state.directory.fileLength(idxName))
         throw new RuntimeException("after flush: tvx size mismatch: " + state.numDocsInStore + " docs vs " + state.directory.fileLength(idxName) + " length in bytes of " + idxName + " file exists?=" + state.directory.fileExists(idxName));
 
-      String fldName = IndexFileNames.segmentFileName(state.docStoreSegmentName, IndexFileNames.VECTORS_FIELDS_EXTENSION);
-      String docName = IndexFileNames.segmentFileName(state.docStoreSegmentName, IndexFileNames.VECTORS_DOCUMENTS_EXTENSION);
+      String fldName = IndexFileNames.segmentFileName(state.docStoreSegmentName, "", IndexFileNames.VECTORS_FIELDS_EXTENSION);
+      String docName = IndexFileNames.segmentFileName(state.docStoreSegmentName, "", IndexFileNames.VECTORS_DOCUMENTS_EXTENSION);
       state.flushedFiles.add(idxName);
       state.flushedFiles.add(fldName);
       state.flushedFiles.add(docName);
@@ -148,9 +148,9 @@ final class TermVectorsTermsWriter exten
       // vector output files, we must abort this segment
       // because those files will be in an unknown
       // state:
-      String idxName = IndexFileNames.segmentFileName(docStoreSegment, IndexFileNames.VECTORS_INDEX_EXTENSION);
-      String docName = IndexFileNames.segmentFileName(docStoreSegment, IndexFileNames.VECTORS_DOCUMENTS_EXTENSION);
-      String fldName = IndexFileNames.segmentFileName(docStoreSegment, IndexFileNames.VECTORS_FIELDS_EXTENSION);
+      String idxName = IndexFileNames.segmentFileName(docStoreSegment, "", IndexFileNames.VECTORS_INDEX_EXTENSION);
+      String docName = IndexFileNames.segmentFileName(docStoreSegment, "", IndexFileNames.VECTORS_DOCUMENTS_EXTENSION);
+      String fldName = IndexFileNames.segmentFileName(docStoreSegment, "", IndexFileNames.VECTORS_FIELDS_EXTENSION);
       tvx = docWriter.directory.createOutput(idxName);
       tvd = docWriter.directory.createOutput(docName);
       tvf = docWriter.directory.createOutput(fldName);

Modified: lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/TermVectorsWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/TermVectorsWriter.java?rev=948861&r1=948860&r2=948861&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/TermVectorsWriter.java (original)
+++ lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/TermVectorsWriter.java Thu May 27 15:36:32 2010
@@ -35,11 +35,11 @@ final class TermVectorsWriter {
                            FieldInfos fieldInfos)
     throws IOException {
     // Open files for TermVector storage
-    tvx = directory.createOutput(IndexFileNames.segmentFileName(segment, IndexFileNames.VECTORS_INDEX_EXTENSION));
+    tvx = directory.createOutput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.VECTORS_INDEX_EXTENSION));
     tvx.writeInt(TermVectorsReader.FORMAT_CURRENT);
-    tvd = directory.createOutput(IndexFileNames.segmentFileName(segment, IndexFileNames.VECTORS_DOCUMENTS_EXTENSION));
+    tvd = directory.createOutput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.VECTORS_DOCUMENTS_EXTENSION));
     tvd.writeInt(TermVectorsReader.FORMAT_CURRENT);
-    tvf = directory.createOutput(IndexFileNames.segmentFileName(segment, IndexFileNames.VECTORS_FIELDS_EXTENSION));
+    tvf = directory.createOutput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.VECTORS_FIELDS_EXTENSION));
     tvf.writeInt(TermVectorsReader.FORMAT_CURRENT);
 
     this.fieldInfos = fieldInfos;

Modified: lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/codecs/preflex/PreFlexFields.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/codecs/preflex/PreFlexFields.java?rev=948861&r1=948860&r2=948861&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/codecs/preflex/PreFlexFields.java (original)
+++ lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/codecs/preflex/PreFlexFields.java Thu May 27 15:36:32 2010
@@ -95,16 +95,16 @@ public class PreFlexFields extends Field
   }
 
   static void files(Directory dir, SegmentInfo info, Collection<String> files) throws IOException {
-    files.add(IndexFileNames.segmentFileName(info.name, PreFlexCodec.TERMS_EXTENSION));
-    files.add(IndexFileNames.segmentFileName(info.name, PreFlexCodec.TERMS_INDEX_EXTENSION));
-    files.add(IndexFileNames.segmentFileName(info.name, PreFlexCodec.FREQ_EXTENSION));
+    files.add(IndexFileNames.segmentFileName(info.name, "", PreFlexCodec.TERMS_EXTENSION));
+    files.add(IndexFileNames.segmentFileName(info.name, "", PreFlexCodec.TERMS_INDEX_EXTENSION));
+    files.add(IndexFileNames.segmentFileName(info.name, "", PreFlexCodec.FREQ_EXTENSION));
     if (info.getHasProx()) {
       // LUCENE-1739: for certain versions of 2.9-dev,
       // hasProx would be incorrectly computed during
       // indexing as true, and then stored into the segments
       // file, when it should have been false.  So we do the
       // extra check, here:
-      final String prx = IndexFileNames.segmentFileName(info.name, PreFlexCodec.PROX_EXTENSION);
+      final String prx = IndexFileNames.segmentFileName(info.name, "", PreFlexCodec.PROX_EXTENSION);
       if (dir.fileExists(prx)) {
         files.add(prx);
       }
@@ -145,7 +145,7 @@ public class PreFlexFields extends Field
         // to CFS
 
         if (!(dir instanceof CompoundFileReader)) {
-          dir0 = cfsReader = new CompoundFileReader(dir, IndexFileNames.segmentFileName(si.name, IndexFileNames.COMPOUND_FILE_EXTENSION), readBufferSize);
+          dir0 = cfsReader = new CompoundFileReader(dir, IndexFileNames.segmentFileName(si.name, "", IndexFileNames.COMPOUND_FILE_EXTENSION), readBufferSize);
         } else {
           dir0 = dir;
         }

Modified: lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/codecs/preflex/TermInfosReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/codecs/preflex/TermInfosReader.java?rev=948861&r1=948860&r2=948861&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/codecs/preflex/TermInfosReader.java (original)
+++ lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/codecs/preflex/TermInfosReader.java Thu May 27 15:36:32 2010
@@ -102,7 +102,7 @@ public final class TermInfosReader {
       segment = seg;
       fieldInfos = fis;
 
-      origEnum = new SegmentTermEnum(directory.openInput(IndexFileNames.segmentFileName(segment, PreFlexCodec.TERMS_EXTENSION),
+      origEnum = new SegmentTermEnum(directory.openInput(IndexFileNames.segmentFileName(segment, "", PreFlexCodec.TERMS_EXTENSION),
                                                          readBufferSize), fieldInfos, false);
       size = origEnum.size;
 
@@ -110,7 +110,7 @@ public final class TermInfosReader {
       if (indexDivisor != -1) {
         // Load terms index
         totalIndexInterval = origEnum.indexInterval * indexDivisor;
-        final SegmentTermEnum indexEnum = new SegmentTermEnum(directory.openInput(IndexFileNames.segmentFileName(segment, PreFlexCodec.TERMS_INDEX_EXTENSION),
+        final SegmentTermEnum indexEnum = new SegmentTermEnum(directory.openInput(IndexFileNames.segmentFileName(segment, "", PreFlexCodec.TERMS_INDEX_EXTENSION),
                                                                                   readBufferSize), fieldInfos, true);
 
         try {

Modified: lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/codecs/sep/SepPostingsReaderImpl.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/codecs/sep/SepPostingsReaderImpl.java?rev=948861&r1=948860&r2=948861&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/codecs/sep/SepPostingsReaderImpl.java (original)
+++ lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/codecs/sep/SepPostingsReaderImpl.java Thu May 27 15:36:32 2010
@@ -59,15 +59,15 @@ public class SepPostingsReaderImpl exten
     boolean success = false;
     try {
 
-      final String docFileName = IndexFileNames.segmentFileName(segmentInfo.name, SepCodec.DOC_EXTENSION);
+      final String docFileName = IndexFileNames.segmentFileName(segmentInfo.name, "", SepCodec.DOC_EXTENSION);
       docIn = intFactory.openInput(dir, docFileName);
 
-      skipIn = dir.openInput(IndexFileNames.segmentFileName(segmentInfo.name, SepCodec.SKIP_EXTENSION), readBufferSize);
+      skipIn = dir.openInput(IndexFileNames.segmentFileName(segmentInfo.name, "", SepCodec.SKIP_EXTENSION), readBufferSize);
 
       if (segmentInfo.getHasProx()) {
-        freqIn = intFactory.openInput(dir, IndexFileNames.segmentFileName(segmentInfo.name, SepCodec.FREQ_EXTENSION));
-        posIn = intFactory.openInput(dir, IndexFileNames.segmentFileName(segmentInfo.name, SepCodec.POS_EXTENSION), readBufferSize);
-        payloadIn = dir.openInput(IndexFileNames.segmentFileName(segmentInfo.name, SepCodec.PAYLOAD_EXTENSION), readBufferSize);
+        freqIn = intFactory.openInput(dir, IndexFileNames.segmentFileName(segmentInfo.name, "", SepCodec.FREQ_EXTENSION));
+        posIn = intFactory.openInput(dir, IndexFileNames.segmentFileName(segmentInfo.name, "", SepCodec.POS_EXTENSION), readBufferSize);
+        payloadIn = dir.openInput(IndexFileNames.segmentFileName(segmentInfo.name, "", SepCodec.PAYLOAD_EXTENSION), readBufferSize);
       } else {
         posIn = null;
         payloadIn = null;
@@ -82,13 +82,13 @@ public class SepPostingsReaderImpl exten
   }
 
   public static void files(SegmentInfo segmentInfo, Collection<String> files) {
-    files.add(IndexFileNames.segmentFileName(segmentInfo.name, SepCodec.DOC_EXTENSION));
-    files.add(IndexFileNames.segmentFileName(segmentInfo.name, SepCodec.SKIP_EXTENSION));
+    files.add(IndexFileNames.segmentFileName(segmentInfo.name, "", SepCodec.DOC_EXTENSION));
+    files.add(IndexFileNames.segmentFileName(segmentInfo.name, "", SepCodec.SKIP_EXTENSION));
 
     if (segmentInfo.getHasProx()) {
-      files.add(IndexFileNames.segmentFileName(segmentInfo.name, SepCodec.FREQ_EXTENSION));
-      files.add(IndexFileNames.segmentFileName(segmentInfo.name, SepCodec.POS_EXTENSION));
-      files.add(IndexFileNames.segmentFileName(segmentInfo.name, SepCodec.PAYLOAD_EXTENSION));
+      files.add(IndexFileNames.segmentFileName(segmentInfo.name, "", SepCodec.FREQ_EXTENSION));
+      files.add(IndexFileNames.segmentFileName(segmentInfo.name, "", SepCodec.POS_EXTENSION));
+      files.add(IndexFileNames.segmentFileName(segmentInfo.name, "", SepCodec.PAYLOAD_EXTENSION));
     }
   }
 

Modified: lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/codecs/sep/SepPostingsWriterImpl.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/codecs/sep/SepPostingsWriterImpl.java?rev=948861&r1=948860&r2=948861&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/codecs/sep/SepPostingsWriterImpl.java (original)
+++ lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/codecs/sep/SepPostingsWriterImpl.java Thu May 27 15:36:32 2010
@@ -76,24 +76,24 @@ public final class SepPostingsWriterImpl
   public SepPostingsWriterImpl(SegmentWriteState state, IntStreamFactory factory) throws IOException {
     super();
 
-    final String docFileName = IndexFileNames.segmentFileName(state.segmentName, SepCodec.DOC_EXTENSION);
+    final String docFileName = IndexFileNames.segmentFileName(state.segmentName, "", SepCodec.DOC_EXTENSION);
     state.flushedFiles.add(docFileName);
     docOut = factory.createOutput(state.directory, docFileName);
     docIndex = docOut.index();
 
     if (state.fieldInfos.hasProx()) {
-      final String frqFileName = IndexFileNames.segmentFileName(state.segmentName, SepCodec.FREQ_EXTENSION);
+      final String frqFileName = IndexFileNames.segmentFileName(state.segmentName, "", SepCodec.FREQ_EXTENSION);
       state.flushedFiles.add(frqFileName);
       freqOut = factory.createOutput(state.directory, frqFileName);
       freqIndex = freqOut.index();
 
-      final String posFileName = IndexFileNames.segmentFileName(state.segmentName, SepCodec.POS_EXTENSION);
+      final String posFileName = IndexFileNames.segmentFileName(state.segmentName, "", SepCodec.POS_EXTENSION);
       posOut = factory.createOutput(state.directory, posFileName);
       state.flushedFiles.add(posFileName);
       posIndex = posOut.index();
 
       // TODO: -- only if at least one field stores payloads?
-      final String payloadFileName = IndexFileNames.segmentFileName(state.segmentName, SepCodec.PAYLOAD_EXTENSION);
+      final String payloadFileName = IndexFileNames.segmentFileName(state.segmentName, "", SepCodec.PAYLOAD_EXTENSION);
       state.flushedFiles.add(payloadFileName);
       payloadOut = state.directory.createOutput(payloadFileName);
 
@@ -105,7 +105,7 @@ public final class SepPostingsWriterImpl
       payloadOut = null;
     }
 
-    final String skipFileName = IndexFileNames.segmentFileName(state.segmentName, SepCodec.SKIP_EXTENSION);
+    final String skipFileName = IndexFileNames.segmentFileName(state.segmentName, "", SepCodec.SKIP_EXTENSION);
     state.flushedFiles.add(skipFileName);
     skipOut = state.directory.createOutput(skipFileName);