You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by si...@apache.org on 2011/05/31 13:25:50 UTC

svn commit: r1129631 [3/7] - in /lucene/dev/branches/docvalues: ./ dev-tools/eclipse/ dev-tools/idea/.idea/ dev-tools/idea/lucene/contrib/spellchecker/ dev-tools/idea/modules/suggest/ dev-tools/maven/lucene/contrib/ dev-tools/maven/lucene/contrib/spell...

Modified: lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/DocumentsWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/DocumentsWriter.java?rev=1129631&r1=1129630&r2=1129631&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/DocumentsWriter.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/DocumentsWriter.java Tue May 31 11:25:37 2011
@@ -228,14 +228,19 @@ final class DocumentsWriter {
       }
 
       final Iterator<ThreadState> threadsIterator = perThreadPool.getActivePerThreadsIterator();
-
       while (threadsIterator.hasNext()) {
-        ThreadState perThread = threadsIterator.next();
+        final ThreadState perThread = threadsIterator.next();
         perThread.lock();
         try {
           if (perThread.isActive()) { // we might be closed
-            perThread.perThread.abort();
-            perThread.perThread.checkAndResetHasAborted();
+            try {
+              perThread.perThread.abort();
+            } catch (IOException ex) {
+              // continue
+            } finally {
+              perThread.perThread.checkAndResetHasAborted();
+              flushControl.doOnAbort(perThread);
+            }
           } else {
             assert closed;
           }
@@ -243,7 +248,6 @@ final class DocumentsWriter {
           perThread.unlock();
         }
       }
-
       success = true;
     } finally {
       if (infoStream != null) {
@@ -274,11 +278,9 @@ final class DocumentsWriter {
     flushControl.setClosed();
   }
 
-  boolean updateDocument(final Document doc, final Analyzer analyzer,
-      final Term delTerm) throws CorruptIndexException, IOException {
+  private boolean preUpdate() throws CorruptIndexException, IOException {
     ensureOpen();
     boolean maybeMerge = false;
-    final boolean isUpdate = delTerm != null;
     if (flushControl.anyStalledThreads() || flushControl.numQueuedFlushes() > 0) {
       // Help out flushing any queued DWPTs so we can un-stall:
       if (infoStream != null) {
@@ -303,13 +305,30 @@ final class DocumentsWriter {
         message("continue indexing after helpling out flushing DocumentsWriter is healthy");
       }
     }
+    return maybeMerge;
+  }
+
+  private boolean postUpdate(DocumentsWriterPerThread flushingDWPT, boolean maybeMerge) throws IOException {
+    if (flushingDWPT != null) {
+      maybeMerge |= doFlush(flushingDWPT);
+    } else {
+      final DocumentsWriterPerThread nextPendingFlush = flushControl.nextPendingFlush();
+      if (nextPendingFlush != null) {
+        maybeMerge |= doFlush(nextPendingFlush);
+      }
+    }
 
-    final ThreadState perThread = perThreadPool.getAndLock(Thread.currentThread(),
-        this, doc);
+    return maybeMerge;
+  }
+
+  boolean updateDocuments(final Iterable<Document> docs, final Analyzer analyzer,
+                          final Term delTerm) throws CorruptIndexException, IOException {
+    boolean maybeMerge = preUpdate();
+
+    final ThreadState perThread = perThreadPool.getAndLock(Thread.currentThread(), this);
     final DocumentsWriterPerThread flushingDWPT;
     
     try {
-
       if (!perThread.isActive()) {
         ensureOpen();
         assert false: "perThread is not active but we are still open";
@@ -317,27 +336,53 @@ final class DocumentsWriter {
        
       final DocumentsWriterPerThread dwpt = perThread.perThread;
       try {
-        dwpt.updateDocument(doc, analyzer, delTerm); 
-        numDocsInRAM.incrementAndGet();
+        final int docCount = dwpt.updateDocuments(docs, analyzer, delTerm);
+        numDocsInRAM.addAndGet(docCount);
       } finally {
         if (dwpt.checkAndResetHasAborted()) {
           flushControl.doOnAbort(perThread);
         }
       }
+      final boolean isUpdate = delTerm != null;
       flushingDWPT = flushControl.doAfterDocument(perThread, isUpdate);
     } finally {
       perThread.unlock();
     }
+
+    return postUpdate(flushingDWPT, maybeMerge);
+  }
+
+  boolean updateDocument(final Document doc, final Analyzer analyzer,
+      final Term delTerm) throws CorruptIndexException, IOException {
+
+    boolean maybeMerge = preUpdate();
+
+    final ThreadState perThread = perThreadPool.getAndLock(Thread.currentThread(), this);
+    final DocumentsWriterPerThread flushingDWPT;
     
-    if (flushingDWPT != null) {
-      maybeMerge |= doFlush(flushingDWPT);
-    } else {
-      final DocumentsWriterPerThread nextPendingFlush = flushControl.nextPendingFlush();
-      if (nextPendingFlush != null) {
-        maybeMerge |= doFlush(nextPendingFlush);
+    try {
+
+      if (!perThread.isActive()) {
+        ensureOpen();
+        assert false: "perThread is not active but we are still open";
       }
+       
+      final DocumentsWriterPerThread dwpt = perThread.perThread;
+      try {
+        dwpt.updateDocument(doc, analyzer, delTerm); 
+        numDocsInRAM.incrementAndGet();
+      } finally {
+        if (dwpt.checkAndResetHasAborted()) {
+          flushControl.doOnAbort(perThread);
+        }
+      }
+      final boolean isUpdate = delTerm != null;
+      flushingDWPT = flushControl.doAfterDocument(perThread, isUpdate);
+    } finally {
+      perThread.unlock();
     }
-    return maybeMerge;
+
+    return postUpdate(flushingDWPT, maybeMerge);
   }
 
   private  boolean doFlush(DocumentsWriterPerThread flushingDWPT) throws IOException {
@@ -541,4 +586,20 @@ final class DocumentsWriter {
       return (!isSegmentFlush || segment != null);  
     }
   }
+  
+  // use by IW during close to assert all DWPT are inactive after final flush
+  boolean assertNoActiveDWPT() {
+    Iterator<ThreadState> activePerThreadsIterator = perThreadPool.getAllPerThreadsIterator();
+    while(activePerThreadsIterator.hasNext()) {
+      ThreadState next = activePerThreadsIterator.next();
+      next.lock();
+      try {
+        assert !next.isActive();
+      } finally  {
+        next.unlock();
+      }
+    }
+    return true;
+  }
+ 
 }

Modified: lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/DocumentsWriterFlushControl.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/DocumentsWriterFlushControl.java?rev=1129631&r1=1129630&r2=1129631&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/DocumentsWriterFlushControl.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/DocumentsWriterFlushControl.java Tue May 31 11:25:37 2011
@@ -16,6 +16,7 @@ package org.apache.lucene.index;
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+import java.io.IOException;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.Iterator;
@@ -68,7 +69,7 @@ public final class DocumentsWriterFlushC
     this.stallControl = new DocumentsWriterStallControl();
     this.perThreadPool = documentsWriter.perThreadPool;
     this.flushPolicy = documentsWriter.flushPolicy;
-    this.hardMaxBytesPerDWPT = config.getRAMPerThreadHardLimitMB() * 1024 * 1024;;
+    this.hardMaxBytesPerDWPT = config.getRAMPerThreadHardLimitMB() * 1024 * 1024;
     this.config = config;
     this.documentsWriter = documentsWriter;
   }
@@ -162,8 +163,6 @@ public final class DocumentsWriterFlushC
       stallControl.updateStalled(this);
       assert assertMemory();
     }
-    
-    
   }
 
   synchronized void doAfterFlush(DocumentsWriterPerThread dwpt) {
@@ -206,7 +205,7 @@ public final class DocumentsWriterFlushC
     } // don't assert on numDocs since we could hit an abort excp. while selecting that dwpt for flushing
     
   }
-
+  
   synchronized void doOnAbort(ThreadState state) {
     try {
       if (state.flushPending) {
@@ -217,7 +216,7 @@ public final class DocumentsWriterFlushC
       assert assertMemory();
       // Take it out of the loop this DWPT is stale
       perThreadPool.replaceForFlush(state, closed);
-    }finally {
+    } finally {
       stallControl.updateStalled(this);
     }
   }
@@ -305,6 +304,7 @@ public final class DocumentsWriterFlushC
   synchronized void setClosed() {
     // set by DW to signal that we should not release new DWPT after close
     this.closed = true;
+    perThreadPool.deactivateUnreleasedStates();
   }
 
   /**
@@ -387,8 +387,12 @@ public final class DocumentsWriterFlushC
             toFlush.add(flushingDWPT);
           }
         } else {
-          // get the new delete queue from DW
-          next.perThread.initialize();
+          if (closed) {
+            next.resetWriter(null); // make this state inactive
+          } else {
+            // get the new delete queue from DW
+            next.perThread.initialize();
+          }
         }
       } finally {
         next.unlock();
@@ -451,10 +455,21 @@ public final class DocumentsWriterFlushC
     try {
       for (DocumentsWriterPerThread dwpt : flushQueue) {
         doAfterFlush(dwpt);
+        try {
+          dwpt.abort();
+        } catch (IOException ex) {
+          // continue
+        }
       }
       for (BlockedFlush blockedFlush : blockedFlushes) {
-        flushingWriters.put(blockedFlush.dwpt, Long.valueOf(blockedFlush.bytes));
+        flushingWriters
+            .put(blockedFlush.dwpt, Long.valueOf(blockedFlush.bytes));
         doAfterFlush(blockedFlush.dwpt);
+        try {
+          blockedFlush.dwpt.abort();
+        } catch (IOException ex) {
+          // continue
+        }
       }
     } finally {
       fullFlush = false;
@@ -512,5 +527,4 @@ public final class DocumentsWriterFlushC
   boolean anyStalledThreads() {
     return stallControl.anyStalledThreads();
   }
- 
-}
\ No newline at end of file
+}

Modified: lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java?rev=1129631&r1=1129630&r2=1129631&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java Tue May 31 11:25:37 2011
@@ -105,7 +105,7 @@ public class DocumentsWriterPerThread {
       // largish:
       doc = null;
       analyzer = null;
-  }
+    }
   }
 
   static class FlushedSegment {
@@ -179,7 +179,7 @@ public class DocumentsWriterPerThread {
     this.parent = parent;
     this.fieldInfos = fieldInfos;
     this.writer = parent.indexWriter;
-    this.infoStream = parent.indexWriter.getInfoStream();
+    this.infoStream = parent.infoStream;
     this.docState = new DocState(this);
     this.docState.similarityProvider = parent.indexWriter.getConfig()
         .getSimilarityProvider();
@@ -255,6 +255,82 @@ public class DocumentsWriterPerThread {
     finishDocument(delTerm);
   }
   
+  public int updateDocuments(Iterable<Document> docs, Analyzer analyzer, Term delTerm) throws IOException {
+    assert writer.testPoint("DocumentsWriterPerThread addDocuments start");
+    assert deleteQueue != null;
+    docState.analyzer = analyzer;
+    if (segment == null) {
+      // this call is synchronized on IndexWriter.segmentInfos
+      segment = writer.newSegmentName();
+      assert numDocsInRAM == 0;
+    }
+
+    int docCount = 0;
+    try {
+      for(Document doc : docs) {
+        docState.doc = doc;
+        docState.docID = numDocsInRAM;
+        docCount++;
+
+        boolean success = false;
+        try {
+          consumer.processDocument(fieldInfos);
+          success = true;
+        } finally {
+          if (!success) {
+            // An exc is being thrown...
+
+            if (!aborting) {
+              // One of the documents hit a non-aborting
+              // exception (eg something happened during
+              // analysis).  We now go and mark any docs
+              // from this batch that we had already indexed
+              // as deleted:
+              int docID = docState.docID;
+              final int endDocID = docID - docCount;
+              while (docID > endDocID) {
+                deleteDocID(docID);
+                docID--;
+              }
+
+              // Incr here because finishDocument will not
+              // be called (because an exc is being thrown):
+              numDocsInRAM++;
+              fieldInfos.revertUncommitted();
+            } else {
+              abort();
+            }
+          }
+        }
+        success = false;
+        try {
+          consumer.finishDocument();
+          success = true;
+        } finally {
+          if (!success) {
+            abort();
+          }
+        }
+
+        finishDocument(null);
+      }
+
+      // Apply delTerm only after all indexing has
+      // succeeded, but apply it only to docs prior to when
+      // this batch started:
+      if (delTerm != null) {
+        deleteQueue.add(delTerm, deleteSlice);
+        assert deleteSlice.isTailItem(delTerm) : "expected the delete term as the tail item";
+        deleteSlice.apply(pendingDeletes, numDocsInRAM-docCount);
+      }
+
+    } finally {
+      docState.clear();
+    }
+
+    return docCount;
+  }
+  
   private void finishDocument(Term delTerm) throws IOException {
     /*
      * here we actually finish the document in two steps 1. push the delete into

Modified: lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/DocumentsWriterPerThreadPool.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/DocumentsWriterPerThreadPool.java?rev=1129631&r1=1129630&r2=1129631&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/DocumentsWriterPerThreadPool.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/DocumentsWriterPerThreadPool.java Tue May 31 11:25:37 2011
@@ -19,7 +19,6 @@ package org.apache.lucene.index;
 import java.util.Iterator;
 import java.util.concurrent.locks.ReentrantLock;
 
-import org.apache.lucene.document.Document;
 import org.apache.lucene.index.FieldInfos.FieldNumberBiMap;
 import org.apache.lucene.index.SegmentCodecs.SegmentCodecsBuilder;
 import org.apache.lucene.index.codecs.CodecProvider;
@@ -194,6 +193,21 @@ public abstract class DocumentsWriterPer
     return null;
   }
   
+  /**
+   * Deactivate all unreleased threadstates 
+   */
+  protected synchronized void deactivateUnreleasedStates() {
+    for (int i = numThreadStatesActive; i < perThreads.length; i++) {
+      final ThreadState threadState = perThreads[i];
+      threadState.lock();
+      try {
+        threadState.resetWriter(null);
+      } finally {
+        threadState.unlock();
+      }
+    }
+  }
+  
   protected DocumentsWriterPerThread replaceForFlush(ThreadState threadState, boolean closed) {
     assert threadState.isHeldByCurrentThread();
     final DocumentsWriterPerThread dwpt = threadState.perThread;
@@ -212,7 +226,7 @@ public abstract class DocumentsWriterPer
     // don't recycle DWPT by default
   }
   
-  public abstract ThreadState getAndLock(Thread requestingThread, DocumentsWriter documentsWriter, Document doc);
+  public abstract ThreadState getAndLock(Thread requestingThread, DocumentsWriter documentsWriter);
 
   /**
    * Returns an iterator providing access to all {@link ThreadState}

Modified: lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/FieldsWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/FieldsWriter.java?rev=1129631&r1=1129630&r2=1129631&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/FieldsWriter.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/FieldsWriter.java Tue May 31 11:25:37 2011
@@ -113,7 +113,7 @@ final class FieldsWriter {
   void close() throws IOException {
     if (directory != null) {
       try {
-        IOUtils.closeSafely(fieldsStream, indexStream);
+        IOUtils.closeSafely(false, fieldsStream, indexStream);
       } finally {
         fieldsStream = indexStream = null;
       }

Modified: lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/FreqProxTermsWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/FreqProxTermsWriter.java?rev=1129631&r1=1129630&r2=1129631&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/FreqProxTermsWriter.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/FreqProxTermsWriter.java Tue May 31 11:25:37 2011
@@ -57,9 +57,10 @@ final class FreqProxTermsWriter extends 
 
     final FieldsConsumer consumer = state.segmentCodecs.codec().fieldsConsumer(state);
 
-    TermsHash termsHash = null;
-
-    /*
+    try {
+      TermsHash termsHash = null;
+      
+      /*
     Current writer chain:
       FieldsConsumer
         -> IMPL: FormatPostingsTermsDictWriter
@@ -69,36 +70,38 @@ final class FreqProxTermsWriter extends 
                 -> IMPL: FormatPostingsDocsWriter
                   -> PositionsConsumer
                     -> IMPL: FormatPostingsPositionsWriter
-    */
-
-    for (int fieldNumber = 0; fieldNumber < numAllFields; fieldNumber++) {
-      final FieldInfo fieldInfo = allFields.get(fieldNumber).fieldInfo;
-
-      final FreqProxTermsWriterPerField fieldWriter = allFields.get(fieldNumber);
-
-      // Aggregate the storePayload as seen by the same
-      // field across multiple threads
-      if (!fieldInfo.omitTermFreqAndPositions) {
-        fieldInfo.storePayloads |= fieldWriter.hasPayloads;
+       */
+      
+      for (int fieldNumber = 0; fieldNumber < numAllFields; fieldNumber++) {
+        final FieldInfo fieldInfo = allFields.get(fieldNumber).fieldInfo;
+        
+        final FreqProxTermsWriterPerField fieldWriter = allFields.get(fieldNumber);
+        
+        // Aggregate the storePayload as seen by the same
+        // field across multiple threads
+        if (!fieldInfo.omitTermFreqAndPositions) {
+          fieldInfo.storePayloads |= fieldWriter.hasPayloads;
+        }
+        
+        // If this field has postings then add them to the
+        // segment
+        fieldWriter.flush(fieldInfo.name, consumer, state);
+        
+        TermsHashPerField perField = fieldWriter.termsHashPerField;
+        assert termsHash == null || termsHash == perField.termsHash;
+        termsHash = perField.termsHash;
+        int numPostings = perField.bytesHash.size();
+        perField.reset();
+        perField.shrinkHash(numPostings);
+        fieldWriter.reset();
       }
-
-      // If this field has postings then add them to the
-      // segment
-      fieldWriter.flush(fieldInfo.name, consumer, state);
-
-      TermsHashPerField perField = fieldWriter.termsHashPerField;
-      assert termsHash == null || termsHash == perField.termsHash;
-      termsHash = perField.termsHash;
-      int numPostings = perField.bytesHash.size();
-      perField.reset();
-      perField.shrinkHash(numPostings);
-      fieldWriter.reset();
-    }
-
-    if (termsHash != null) {
-      termsHash.reset();
+      
+      if (termsHash != null) {
+        termsHash.reset();
+      }
+    } finally {
+      consumer.close();
     }
-    consumer.close();
   }
 
   BytesRef payload;

Modified: lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/IndexFileNames.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/IndexFileNames.java?rev=1129631&r1=1129630&r2=1129631&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/IndexFileNames.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/IndexFileNames.java Tue May 31 11:25:37 2011
@@ -17,6 +17,8 @@ package org.apache.lucene.index;
  * limitations under the License.
  */
 
+import java.util.regex.Pattern;
+
 import org.apache.lucene.index.codecs.Codec;  // for javadocs
 
 /**
@@ -237,5 +239,16 @@ public final class IndexFileNames {
     }
     return filename;
   }
+
+  /**
+   * Returns true if the given filename ends with the separate norms file
+   * pattern: {@code SEPARATE_NORMS_EXTENSION + "[0-9]+"}.
+   */
+  public static boolean isSeparateNormsFile(String filename) {
+    int idx = filename.lastIndexOf('.');
+    if (idx == -1) return false;
+    String ext = filename.substring(idx + 1);
+    return Pattern.matches(SEPARATE_NORMS_EXTENSION + "[0-9]+", ext);
+  }
   
 }

Modified: lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/IndexWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/IndexWriter.java?rev=1129631&r1=1129630&r2=1129631&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/IndexWriter.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/IndexWriter.java Tue May 31 11:25:37 2011
@@ -23,6 +23,7 @@ import java.io.PrintStream;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
+import java.util.Comparator;
 import java.util.Date;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -51,6 +52,7 @@ import org.apache.lucene.store.LockObtai
 import org.apache.lucene.util.BitVector;
 import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.Constants;
+import org.apache.lucene.util.StringHelper;
 import org.apache.lucene.util.ThreadInterruptedException;
 import org.apache.lucene.util.MapBackedSet;
 
@@ -1071,7 +1073,8 @@ public class IndexWriter implements Clos
 
       if (infoStream != null)
         message("at close: " + segString());
-
+      // used by assert below
+      final DocumentsWriter oldWriter = docWriter;
       synchronized(this) {
         readerPool.close();
         docWriter = null;
@@ -1085,6 +1088,7 @@ public class IndexWriter implements Clos
       synchronized(this) {
         closed = true;
       }
+      assert oldWriter.assertNoActiveDWPT();
     } catch (OutOfMemoryError oom) {
       handleOOM(oom, "closeInternal");
     } finally {
@@ -1098,6 +1102,8 @@ public class IndexWriter implements Clos
       }
     }
   }
+  
+ 
 
   /** Returns the Directory used by this index. */
   public Directory getDirectory() {
@@ -1228,6 +1234,111 @@ public class IndexWriter implements Clos
   }
 
   /**
+   * Atomically adds a block of documents with sequentially
+   * assigned document IDs, such that an external reader
+   * will see all or none of the documents.
+   *
+   * <p><b>WARNING</b>: the index does not currently record
+   * which documents were added as a block.  Today this is
+   * fine, because merging will preserve the block (as long
+   * as none them were deleted).  But it's possible in the
+   * future that Lucene may more aggressively re-order
+   * documents (for example, perhaps to obtain better index
+   * compression), in which case you may need to fully
+   * re-index your documents at that time.
+   *
+   * <p>See {@link #addDocument(Document)} for details on
+   * index and IndexWriter state after an Exception, and
+   * flushing/merging temporary free space requirements.</p>
+   *
+   * <p><b>NOTE</b>: tools that do offline splitting of an index
+   * (for example, IndexSplitter in contrib) or
+   * re-sorting of documents (for example, IndexSorter in
+   * contrib) are not aware of these atomically added documents
+   * and will likely break them up.  Use such tools at your
+   * own risk!
+   *
+   * <p><b>NOTE</b>: if this method hits an OutOfMemoryError
+   * you should immediately close the writer.  See <a
+   * href="#OOME">above</a> for details.</p>
+   *
+   * @throws CorruptIndexException if the index is corrupt
+   * @throws IOException if there is a low-level IO error
+   *
+   * @lucene.experimental
+   */
+  public void addDocuments(Iterable<Document> docs) throws CorruptIndexException, IOException {
+    addDocuments(docs, analyzer);
+  }
+
+  /**
+   * Atomically adds a block of documents, analyzed using the
+   * provided analyzer, with sequentially assigned document
+   * IDs, such that an external reader will see all or none
+   * of the documents. 
+   *
+   * @throws CorruptIndexException if the index is corrupt
+   * @throws IOException if there is a low-level IO error
+   *
+   * @lucene.experimental
+   */
+  public void addDocuments(Iterable<Document> docs, Analyzer analyzer) throws CorruptIndexException, IOException {
+    updateDocuments(null, docs, analyzer);
+  }
+
+  /**
+   * Atomically deletes documents matching the provided
+   * delTerm and adds a block of documents with sequentially
+   * assigned document IDs, such that an external reader
+   * will see all or none of the documents. 
+   *
+   * See {@link #addDocuments(Iterable)}.
+   *
+   * @throws CorruptIndexException if the index is corrupt
+   * @throws IOException if there is a low-level IO error
+   *
+   * @lucene.experimental
+   */
+  public void updateDocuments(Term delTerm, Iterable<Document> docs) throws CorruptIndexException, IOException {
+    updateDocuments(delTerm, docs, analyzer);
+  }
+
+  /**
+   * Atomically deletes documents matching the provided
+   * delTerm and adds a block of documents, analyzed  using
+   * the provided analyzer, with sequentially
+   * assigned document IDs, such that an external reader
+   * will see all or none of the documents. 
+   *
+   * See {@link #addDocuments(Iterable)}.
+   *
+   * @throws CorruptIndexException if the index is corrupt
+   * @throws IOException if there is a low-level IO error
+   *
+   * @lucene.experimental
+   */
+  public void updateDocuments(Term delTerm, Iterable<Document> docs, Analyzer analyzer) throws CorruptIndexException, IOException {
+    ensureOpen();
+    try {
+      boolean success = false;
+      boolean anySegmentFlushed = false;
+      try {
+        anySegmentFlushed = docWriter.updateDocuments(docs, analyzer, delTerm);
+        success = true;
+      } finally {
+        if (!success && infoStream != null) {
+          message("hit exception updating document");
+        }
+      }
+      if (anySegmentFlushed) {
+        maybeMerge();
+      }
+    } catch (OutOfMemoryError oom) {
+      handleOOM(oom, "updateDocuments");
+    }
+  }
+
+  /**
    * Deletes the document(s) containing <code>term</code>.
    *
    * <p><b>NOTE</b>: if this method hits an OutOfMemoryError
@@ -2217,10 +2328,10 @@ public class IndexWriter implements Clos
    * <p>
    * <b>NOTE:</b> this method only copies the segments of the incoming indexes
    * and does not merge them. Therefore deleted documents are not removed and
-   * the new segments are not merged with the existing ones. Also, the segments
-   * are copied as-is, meaning they are not converted to CFS if they aren't,
-   * and vice-versa. If you wish to do that, you can call {@link #maybeMerge}
-   * or {@link #optimize} afterwards.
+   * the new segments are not merged with the existing ones. Also, if the merge 
+   * policy allows compound files, then any segment that is not compound is 
+   * converted to such. However, if the segment is compound, it is copied as-is
+   * even if the merge policy does not allow compound files.
    *
    * <p>This requires this index not be among those to be added.
    *
@@ -2244,6 +2355,7 @@ public class IndexWriter implements Clos
 
       int docCount = 0;
       List<SegmentInfo> infos = new ArrayList<SegmentInfo>();
+      Comparator<String> versionComparator = StringHelper.getVersionComparator();
       for (Directory dir : dirs) {
         if (infoStream != null) {
           message("addIndexes: process directory " + dir);
@@ -2263,46 +2375,22 @@ public class IndexWriter implements Clos
             message("addIndexes: process segment origName=" + info.name + " newName=" + newSegName + " dsName=" + dsName + " info=" + info);
           }
 
-          // Determine if the doc store of this segment needs to be copied. It's
-          // only relevant for segments who share doc store with others, because
-          // the DS might have been copied already, in which case we just want
-          // to update the DS name of this SegmentInfo.
-          // NOTE: pre-3x segments include a null DSName if they don't share doc
-          // store. So the following code ensures we don't accidentally insert
-          // 'null' to the map.
-          final String newDsName;
-          if (dsName != null) {
-            if (dsNames.containsKey(dsName)) {
-              newDsName = dsNames.get(dsName);
-            } else {
-              dsNames.put(dsName, newSegName);
-              newDsName = newSegName;
-            }
-          } else {
-            newDsName = newSegName;
+          // create CFS only if the source segment is not CFS, and MP agrees it
+          // should be CFS.
+          boolean createCFS;
+          synchronized (this) { // Guard segmentInfos
+            createCFS = !info.getUseCompoundFile()
+                && mergePolicy.useCompoundFile(segmentInfos, info)
+                // optimize case only for segments that don't share doc stores
+                && versionComparator.compare(info.getVersion(), "3.1") >= 0;
           }
 
-          // Copy the segment files
-          for (String file: info.files()) {
-            final String newFileName;
-            if (IndexFileNames.isDocStoreFile(file)) {
-              newFileName = newDsName + IndexFileNames.stripSegmentName(file);
-              if (dsFilesCopied.contains(newFileName)) {
-                continue;
-              }
-              dsFilesCopied.add(newFileName);
-            } else {
-              newFileName = newSegName + IndexFileNames.stripSegmentName(file);
-            }
-            assert !directory.fileExists(newFileName): "file \"" + newFileName + "\" already exists";
-            dir.copy(directory, file, newFileName);
+          if (createCFS) {
+            copySegmentIntoCFS(info, newSegName);
+          } else {
+            copySegmentAsIs(info, newSegName, dsNames, dsFilesCopied);
           }
 
-          // Update SI appropriately
-          info.setDocStore(info.getDocStoreOffset(), newDsName, info.getDocStoreIsCompoundFile());
-          info.dir = directory;
-          info.name = newSegName;
-
           infos.add(info);
         }
       }
@@ -2391,6 +2479,76 @@ public class IndexWriter implements Clos
     }
   }
 
+  /** Copies the segment into the IndexWriter's directory, as a compound segment. */
+  private void copySegmentIntoCFS(SegmentInfo info, String segName) throws IOException {
+    String segFileName = IndexFileNames.segmentFileName(segName, "", IndexFileNames.COMPOUND_FILE_EXTENSION);
+    Collection<String> files = info.files();
+    CompoundFileWriter cfsWriter = new CompoundFileWriter(directory, segFileName);
+    for (String file : files) {
+      String newFileName = segName + IndexFileNames.stripSegmentName(file);
+      if (!IndexFileNames.matchesExtension(file, IndexFileNames.DELETES_EXTENSION)
+          && !IndexFileNames.isSeparateNormsFile(file)) {
+        cfsWriter.addFile(file, info.dir);
+      } else {
+        assert !directory.fileExists(newFileName): "file \"" + newFileName + "\" already exists";
+        info.dir.copy(directory, file, newFileName);
+      }
+    }
+    
+    // Create the .cfs
+    cfsWriter.close();
+    
+    info.dir = directory;
+    info.name = segName;
+    info.setUseCompoundFile(true);
+  }
+  
+  /** Copies the segment files as-is into the IndexWriter's directory. */
+  private void copySegmentAsIs(SegmentInfo info, String segName,
+      Map<String, String> dsNames, Set<String> dsFilesCopied)
+      throws IOException {
+    // Determine if the doc store of this segment needs to be copied. It's
+    // only relevant for segments that share doc store with others,
+    // because the DS might have been copied already, in which case we
+    // just want to update the DS name of this SegmentInfo.
+    // NOTE: pre-3x segments include a null DSName if they don't share doc
+    // store. The following code ensures we don't accidentally insert
+    // 'null' to the map.
+    String dsName = info.getDocStoreSegment();
+    final String newDsName;
+    if (dsName != null) {
+      if (dsNames.containsKey(dsName)) {
+        newDsName = dsNames.get(dsName);
+      } else {
+        dsNames.put(dsName, segName);
+        newDsName = segName;
+      }
+    } else {
+      newDsName = segName;
+    }
+    
+    // Copy the segment files
+    for (String file: info.files()) {
+      final String newFileName;
+      if (IndexFileNames.isDocStoreFile(file)) {
+        newFileName = newDsName + IndexFileNames.stripSegmentName(file);
+        if (dsFilesCopied.contains(newFileName)) {
+          continue;
+        }
+        dsFilesCopied.add(newFileName);
+      } else {
+        newFileName = segName + IndexFileNames.stripSegmentName(file);
+      }
+      
+      assert !directory.fileExists(newFileName): "file \"" + newFileName + "\" already exists";
+      info.dir.copy(directory, file, newFileName);
+    }
+    
+    info.setDocStore(info.getDocStoreOffset(), newDsName, info.getDocStoreIsCompoundFile());
+    info.dir = directory;
+    info.name = segName;
+  }
+  
   /**
    * A hook for extending classes to execute operations after pending added and
    * deleted documents have been flushed to the Directory but before the change
@@ -3176,50 +3334,50 @@ public class IndexWriter implements Clos
     runningMerges.remove(merge);
   }
 
-  private synchronized void closeMergeReaders(MergePolicy.OneMerge merge, boolean suppressExceptions) throws IOException {
+  private final synchronized void closeMergeReaders(MergePolicy.OneMerge merge, boolean suppressExceptions) throws IOException {
     final int numSegments = merge.readers.size();
-    if (suppressExceptions) {
-      // Suppress any new exceptions so we throw the
-      // original cause
-      boolean anyChanges = false;
-      for (int i=0;i<numSegments;i++) {
-        if (merge.readers.get(i) != null) {
-          try {
-            anyChanges |= readerPool.release(merge.readers.get(i), false);
-          } catch (Throwable t) {
-          }
-          merge.readers.set(i, null);
-        }
-
-        if (i < merge.readerClones.size() && merge.readerClones.get(i) != null) {
-          try {
-            merge.readerClones.get(i).close();
-          } catch (Throwable t) {
-          }
-          // This was a private clone and we had the
-          // only reference
-          assert merge.readerClones.get(i).getRefCount() == 0: "refCount should be 0 but is " + merge.readerClones.get(i).getRefCount();
-          merge.readerClones.set(i, null);
+    Throwable th = null;
+    
+    boolean anyChanges = false;
+    boolean drop = !suppressExceptions;
+    for (int i = 0; i < numSegments; i++) {
+      if (merge.readers.get(i) != null) {
+        try {
+          anyChanges |= readerPool.release(merge.readers.get(i), drop);
+        } catch (Throwable t) {
+          if (th == null) {
+            th = t;
+          }
         }
+        merge.readers.set(i, null);
       }
-      if (anyChanges) {
-        checkpoint();
-      }
-    } else {
-      for (int i=0;i<numSegments;i++) {
-        if (merge.readers.get(i) != null) {
-          readerPool.release(merge.readers.get(i), true);
-          merge.readers.set(i, null);
-        }
-
-        if (i < merge.readerClones.size() && merge.readerClones.get(i) != null) {
+      
+      if (i < merge.readerClones.size() && merge.readerClones.get(i) != null) {
+        try {
           merge.readerClones.get(i).close();
-          // This was a private clone and we had the only reference
-          assert merge.readerClones.get(i).getRefCount() == 0;
-          merge.readerClones.set(i, null);
+        } catch (Throwable t) {
+          if (th == null) {
+            th = t;
+          }
         }
+        // This was a private clone and we had the
+        // only reference
+        assert merge.readerClones.get(i).getRefCount() == 0: "refCount should be 0 but is " + merge.readerClones.get(i).getRefCount();
+        merge.readerClones.set(i, null);
       }
     }
+    
+    if (suppressExceptions && anyChanges) {
+      checkpoint();
+    }
+    
+    // If any error occured, throw it.
+    if (!suppressExceptions && th != null) {
+      if (th instanceof IOException) throw (IOException) th;
+      if (th instanceof RuntimeException) throw (RuntimeException) th;
+      if (th instanceof Error) throw (Error) th;
+      throw new RuntimeException(th);
+    }
   }
 
   /** Does the actual (time-consuming) work of the merge,

Modified: lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/NormsWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/NormsWriter.java?rev=1129631&r1=1129630&r2=1129631&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/NormsWriter.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/NormsWriter.java Tue May 31 11:25:37 2011
@@ -22,6 +22,7 @@ import java.util.Collection;
 import java.util.Map;
 
 import org.apache.lucene.store.IndexOutput;
+import org.apache.lucene.util.IOUtils;
 
 // TODO FI: norms could actually be stored as doc store
 
@@ -49,9 +50,9 @@ final class NormsWriter extends Inverted
 
     final String normsFileName = IndexFileNames.segmentFileName(state.segmentName, "", IndexFileNames.NORMS_EXTENSION);
     IndexOutput normsOut = state.directory.createOutput(normsFileName);
-
+    boolean success = false;
     try {
-      normsOut.writeBytes(SegmentMerger.NORMS_HEADER, 0, SegmentMerger.NORMS_HEADER.length);
+      normsOut.writeBytes(SegmentNorms.NORMS_HEADER, 0, SegmentNorms.NORMS_HEADER.length);
 
       int normCount = 0;
 
@@ -84,9 +85,9 @@ final class NormsWriter extends Inverted
 
         assert 4+normCount*state.numDocs == normsOut.getFilePointer() : ".nrm file size mismatch: expected=" + (4+normCount*state.numDocs) + " actual=" + normsOut.getFilePointer();
       }
-
+      success = true;
     } finally {
-      normsOut.close();
+      IOUtils.closeSafely(!success, normsOut);
     }
   }
 

Modified: lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/PerFieldCodecWrapper.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/PerFieldCodecWrapper.java?rev=1129631&r1=1129630&r2=1129631&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/PerFieldCodecWrapper.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/PerFieldCodecWrapper.java Tue May 31 11:25:37 2011
@@ -36,6 +36,7 @@ import org.apache.lucene.index.codecs.Te
 import org.apache.lucene.index.codecs.DocValuesConsumer;
 import org.apache.lucene.index.values.DocValues;
 import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.IOUtils;
 
 /**
  * Enables native per field codec support. This class selects the codec used to
@@ -67,7 +68,15 @@ final class PerFieldCodecWrapper extends
       assert segmentCodecs == state.segmentCodecs;
       final Codec[] codecs = segmentCodecs.codecs;
       for (int i = 0; i < codecs.length; i++) {
-        consumers.add(codecs[i].fieldsConsumer(new SegmentWriteState(state, i)));
+        boolean success = false;
+        try {
+          consumers.add(codecs[i].fieldsConsumer(new SegmentWriteState(state, i)));
+          success = true;
+        } finally {
+          if (!success) {
+            IOUtils.closeSafely(true, consumers);
+          }
+        }
       }
     }
 
@@ -80,22 +89,7 @@ final class PerFieldCodecWrapper extends
 
     @Override
     public void close() throws IOException {
-      Iterator<FieldsConsumer> it = consumers.iterator();
-      IOException err = null;
-      while (it.hasNext()) {
-        try {
-          it.next().close();
-        } catch (IOException ioe) {
-          // keep first IOException we hit but keep
-          // closing the rest
-          if (err == null) {
-            err = ioe;
-          }
-        }
-      }
-      if (err != null) {
-        throw err;
-      }
+      IOUtils.closeSafely(false, consumers);
     }
   }
 
@@ -128,14 +122,7 @@ final class PerFieldCodecWrapper extends
           // If we hit exception (eg, IOE because writer was
           // committing, or, for any other reason) we must
           // go back and close all FieldsProducers we opened:
-          for(FieldsProducer fp : producers.values()) {
-            try {
-              fp.close();
-            } catch (Throwable t) {
-              // Suppress all exceptions here so we continue
-              // to throw the original one
-            }
-          }
+          IOUtils.closeSafely(true, producers.values());
         }
       }
     }
@@ -184,22 +171,7 @@ final class PerFieldCodecWrapper extends
     
     @Override
     public void close() throws IOException {
-      Iterator<FieldsProducer> it = codecs.values().iterator();
-      IOException err = null;
-      while (it.hasNext()) {
-        try {
-          it.next().close();
-        } catch (IOException ioe) {
-          // keep first IOException we hit but keep
-          // closing the rest
-          if (err == null) {
-            err = ioe;
-          }
-        }
-      }
-      if (err != null) {
-        throw err;
-      }
+      IOUtils.closeSafely(false, codecs.values());
     }
 
     @Override

Modified: lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/PersistentSnapshotDeletionPolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/PersistentSnapshotDeletionPolicy.java?rev=1129631&r1=1129630&r2=1129631&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/PersistentSnapshotDeletionPolicy.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/PersistentSnapshotDeletionPolicy.java Tue May 31 11:25:37 2011
@@ -59,7 +59,7 @@ public class PersistentSnapshotDeletionP
 
   /**
    * Reads the snapshots information from the given {@link Directory}. This
-   * method does can be used if the snapshots information is needed, however you
+   * method can be used if the snapshots information is needed, however you
    * cannot instantiate the deletion policy (because e.g., some other process
    * keeps a lock on the snapshots directory).
    */
@@ -122,11 +122,19 @@ public class PersistentSnapshotDeletionP
       writer.commit();
     }
 
-    // Initializes the snapshots information. This code should basically run
-    // only if mode != CREATE, but if it is, it's no harm as we only open the
-    // reader once and immediately close it.
-    for (Entry<String, String> e : readSnapshotsInfo(dir).entrySet()) {
-      registerSnapshotInfo(e.getKey(), e.getValue(), null);
+    try {
+      // Initializes the snapshots information. This code should basically run
+      // only if mode != CREATE, but if it is, it's no harm as we only open the
+      // reader once and immediately close it.
+      for (Entry<String, String> e : readSnapshotsInfo(dir).entrySet()) {
+        registerSnapshotInfo(e.getKey(), e.getValue(), null);
+      }
+    } catch (RuntimeException e) {
+      writer.close(); // don't leave any open file handles
+      throw e;
+    } catch (IOException e) {
+      writer.close(); // don't leave any open file handles
+      throw e;
     }
   }
 

Modified: lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/SegmentInfo.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/SegmentInfo.java?rev=1129631&r1=1129630&r2=1129631&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/SegmentInfo.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/SegmentInfo.java Tue May 31 11:25:37 2011
@@ -438,7 +438,7 @@ public final class SegmentInfo implement
    */
   public String getNormFileName(int number) {
     if (hasSeparateNorms(number)) {
-      return IndexFileNames.fileNameFromGeneration(name, "s" + number, normGen.get(number));
+      return IndexFileNames.fileNameFromGeneration(name, IndexFileNames.SEPARATE_NORMS_EXTENSION + number, normGen.get(number));
     } else {
       // single file for all norms
       return IndexFileNames.fileNameFromGeneration(name, IndexFileNames.NORMS_EXTENSION, WITHOUT_GEN);

Modified: lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/SegmentInfos.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/SegmentInfos.java?rev=1129631&r1=1129630&r2=1129631&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/SegmentInfos.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/SegmentInfos.java Tue May 31 11:25:37 2011
@@ -40,6 +40,7 @@ import org.apache.lucene.store.Directory
 import org.apache.lucene.store.IndexInput;
 import org.apache.lucene.store.IndexOutput;
 import org.apache.lucene.store.NoSuchDirectoryException;
+import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.ThreadInterruptedException;
 
 /**
@@ -323,17 +324,13 @@ public final class SegmentInfos implemen
       SegmentInfosWriter infosWriter = codecs.getSegmentInfosWriter();
       segnOutput = infosWriter.writeInfos(directory, segmentFileName, this);
       infosWriter.prepareCommit(segnOutput);
-      success = true;
       pendingSegnOutput = segnOutput;
+      success = true;
     } finally {
       if (!success) {
         // We hit an exception above; try to close the file
         // but suppress any exception:
-        try {
-          segnOutput.close();
-        } catch (Throwable t) {
-          // Suppress so we keep throwing the original exception
-        }
+        IOUtils.closeSafely(true, segnOutput);
         try {
           // Try not to leave a truncated segments_N file in
           // the index:
@@ -945,6 +942,8 @@ public final class SegmentInfos implemen
       } finally {
         genOutput.close();
       }
+    } catch (ThreadInterruptedException t) {
+      throw t;
     } catch (Throwable t) {
       // It's OK if we fail to write this file since it's
       // used only as one of the retry fallbacks.
@@ -962,7 +961,6 @@ public final class SegmentInfos implemen
     prepareCommit(dir);
     finishCommit(dir);
   }
-  
 
   public String toString(Directory directory) {
     StringBuilder buffer = new StringBuilder();

Modified: lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/SegmentMerger.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/SegmentMerger.java?rev=1129631&r1=1129630&r2=1129631&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/SegmentMerger.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/SegmentMerger.java Tue May 31 11:25:37 2011
@@ -27,7 +27,6 @@ import org.apache.lucene.document.Docume
 import org.apache.lucene.index.IndexReader.FieldOption;
 import org.apache.lucene.index.MergePolicy.MergeAbortedException;
 import org.apache.lucene.index.codecs.Codec;
-import org.apache.lucene.index.codecs.CodecProvider;
 import org.apache.lucene.index.codecs.FieldsConsumer;
 import org.apache.lucene.index.codecs.MergeState;
 import org.apache.lucene.index.codecs.PerDocConsumer;
@@ -36,6 +35,7 @@ import org.apache.lucene.store.Directory
 import org.apache.lucene.store.IndexInput;
 import org.apache.lucene.store.IndexOutput;
 import org.apache.lucene.util.Bits;
+import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.MultiBits;
 import org.apache.lucene.util.ReaderUtil;
 
@@ -48,10 +48,6 @@ import org.apache.lucene.util.ReaderUtil
  * @see #add
  */
 final class SegmentMerger {
-
-  /** norms header placeholder */
-  static final byte[] NORMS_HEADER = new byte[]{'N','R','M',-1};
-
   private Directory directory;
   private String segment;
   private int termIndexInterval = IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL;
@@ -125,6 +121,12 @@ final class SegmentMerger {
     return mergedDocs;
   }
 
+  /**
+   * NOTE: this method creates a compound file for all files returned by
+   * info.files(). While, generally, this may include separate norms and
+   * deletion files, this SegmentInfo must not reference such files when this
+   * method is called, because they are not allowed within a compound file.
+   */
   final Collection<String> createCompoundFile(String fileName, final SegmentInfo info)
           throws IOException {
 
@@ -132,6 +134,10 @@ final class SegmentMerger {
     Collection<String> files = info.files();
     CompoundFileWriter cfsWriter = new CompoundFileWriter(directory, fileName, checkAbort);
     for (String file : files) {
+      assert !IndexFileNames.matchesExtension(file, IndexFileNames.DELETES_EXTENSION) 
+                : ".del file is not allowed in .cfs: " + file;
+      assert !IndexFileNames.isSeparateNormsFile(file) 
+                : "separate norms file (.s[0-9]+) is not allowed in .cfs: " + file;
       cfsWriter.addFile(file);
     }
 
@@ -140,7 +146,7 @@ final class SegmentMerger {
 
     return files;
   }
-
+  
   private static void addIndexed(IndexReader reader, FieldInfos fInfos,
       Collection<String> names, boolean storeTermVectors,
       boolean storePositionWithTermVector, boolean storeOffsetWithTermVector,
@@ -557,14 +563,13 @@ final class SegmentMerger {
     }
     codec = segmentWriteState.segmentCodecs.codec();
     final FieldsConsumer consumer = codec.fieldsConsumer(segmentWriteState);
-
-    // NOTE: this is silly, yet, necessary -- we create a
-    // MultiBits as our skip docs only to have it broken
-    // apart when we step through the docs enums in
-    // MultiDocsEnum.
-    mergeState.multiDeletedDocs = new MultiBits(bits, bitsStarts);
-
     try {
+      // NOTE: this is silly, yet, necessary -- we create a
+      // MultiBits as our skip docs only to have it broken
+      // apart when we step through the docs enums in
+      // MultiDocsEnum.
+      mergeState.multiDeletedDocs = new MultiBits(bits, bitsStarts);
+      
       consumer.merge(mergeState,
                      new MultiFields(fields.toArray(Fields.EMPTY_ARRAY),
                                      slices.toArray(ReaderUtil.Slice.EMPTY_ARRAY)));
@@ -604,12 +609,13 @@ final class SegmentMerger {
 
   private void mergeNorms() throws IOException {
     IndexOutput output = null;
+    boolean success = false;
     try {
       for (FieldInfo fi : fieldInfos) {
         if (fi.isIndexed && !fi.omitNorms) {
           if (output == null) {
             output = directory.createOutput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.NORMS_EXTENSION));
-            output.writeBytes(NORMS_HEADER,NORMS_HEADER.length);
+            output.writeBytes(SegmentNorms.NORMS_HEADER, SegmentNorms.NORMS_HEADER.length);
           }
           for (IndexReader reader : readers) {
             final int maxDoc = reader.maxDoc();
@@ -637,10 +643,9 @@ final class SegmentMerger {
           }
         }
       }
+      success = true;
     } finally {
-      if (output != null) {
-        output.close();
-      }
+      IOUtils.closeSafely(!success, output);
     }
   }
 }

Modified: lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/SegmentNorms.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/SegmentNorms.java?rev=1129631&r1=1129630&r2=1129631&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/SegmentNorms.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/SegmentNorms.java Tue May 31 11:25:37 2011
@@ -33,6 +33,10 @@ import org.apache.lucene.store.IndexOutp
  */
 
 final class SegmentNorms implements Cloneable {
+
+  /** norms header placeholder */
+  static final byte[] NORMS_HEADER = new byte[]{'N','R','M',-1};
+
   int refCount = 1;
 
   // If this instance is a clone, the originalNorm
@@ -219,7 +223,7 @@ final class SegmentNorms implements Clon
     boolean success = false;
     try {
       try {
-        out.writeBytes(SegmentMerger.NORMS_HEADER, 0, SegmentMerger.NORMS_HEADER.length);
+        out.writeBytes(SegmentNorms.NORMS_HEADER, 0, SegmentNorms.NORMS_HEADER.length);
         out.writeBytes(bytes, owner.maxDoc());
       } finally {
         out.close();

Modified: lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/SegmentReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/SegmentReader.java?rev=1129631&r1=1129630&r2=1129631&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/SegmentReader.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/SegmentReader.java Tue May 31 11:25:37 2011
@@ -576,7 +576,7 @@ public class SegmentReader extends Index
   }
 
   private void openNorms(Directory cfsDir, int readBufferSize) throws IOException {
-    long nextNormSeek = SegmentMerger.NORMS_HEADER.length; //skip header (header unused for now)
+    long nextNormSeek = SegmentNorms.NORMS_HEADER.length; //skip header (header unused for now)
     int maxDoc = maxDoc();
     for (FieldInfo fi : core.fieldInfos) {
       if (norms.containsKey(fi.name)) {
@@ -621,7 +621,7 @@ public class SegmentReader extends Index
           if (isUnversioned) {
             normSeek = 0;
           } else {
-            normSeek = SegmentMerger.NORMS_HEADER.length;
+            normSeek = SegmentNorms.NORMS_HEADER.length;
           }
         }
 

Modified: lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/TermVectorsTermsWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/TermVectorsTermsWriter.java?rev=1129631&r1=1129630&r2=1129631&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/TermVectorsTermsWriter.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/TermVectorsTermsWriter.java Tue May 31 11:25:37 2011
@@ -54,9 +54,7 @@ final class TermVectorsTermsWriter exten
       fill(state.numDocs);
       assert state.segmentName != null;
       String idxName = IndexFileNames.segmentFileName(state.segmentName, "", IndexFileNames.VECTORS_INDEX_EXTENSION);
-      tvx.close();
-      tvf.close();
-      tvd.close();
+      IOUtils.closeSafely(false, tvx, tvf, tvd);
       tvx = tvd = tvf = null;
       if (4+((long) state.numDocs)*16 != state.directory.fileLength(idxName)) {
         throw new RuntimeException("after flush: tvx size mismatch: " + state.numDocs + " docs vs " + state.directory.fileLength(idxName) + " length in bytes of " + idxName + " file exists?=" + state.directory.fileExists(idxName));
@@ -89,18 +87,25 @@ final class TermVectorsTermsWriter exten
 
   private final void initTermVectorsWriter() throws IOException {
     if (tvx == null) {
-
-      // If we hit an exception while init'ing the term
-      // vector output files, we must abort this segment
-      // because those files will be in an unknown
-      // state:
-      tvx = docWriter.directory.createOutput(IndexFileNames.segmentFileName(docWriter.getSegment(), "", IndexFileNames.VECTORS_INDEX_EXTENSION));
-      tvd = docWriter.directory.createOutput(IndexFileNames.segmentFileName(docWriter.getSegment(), "", IndexFileNames.VECTORS_DOCUMENTS_EXTENSION));
-      tvf = docWriter.directory.createOutput(IndexFileNames.segmentFileName(docWriter.getSegment(), "", IndexFileNames.VECTORS_FIELDS_EXTENSION));
-
-      tvx.writeInt(TermVectorsReader.FORMAT_CURRENT);
-      tvd.writeInt(TermVectorsReader.FORMAT_CURRENT);
-      tvf.writeInt(TermVectorsReader.FORMAT_CURRENT);
+      boolean success = false;
+      try {
+        // If we hit an exception while init'ing the term
+        // vector output files, we must abort this segment
+        // because those files will be in an unknown
+        // state:
+        tvx = docWriter.directory.createOutput(IndexFileNames.segmentFileName(docWriter.getSegment(), "", IndexFileNames.VECTORS_INDEX_EXTENSION));
+        tvd = docWriter.directory.createOutput(IndexFileNames.segmentFileName(docWriter.getSegment(), "", IndexFileNames.VECTORS_DOCUMENTS_EXTENSION));
+        tvf = docWriter.directory.createOutput(IndexFileNames.segmentFileName(docWriter.getSegment(), "", IndexFileNames.VECTORS_FIELDS_EXTENSION));
+
+        tvx.writeInt(TermVectorsReader.FORMAT_CURRENT);
+        tvd.writeInt(TermVectorsReader.FORMAT_CURRENT);
+        tvf.writeInt(TermVectorsReader.FORMAT_CURRENT);
+        success = true;
+      } finally {
+        if (!success) {
+          IOUtils.closeSafely(true, tvx, tvd, tvf);
+        }
+      }
 
       lastDocID = 0;
     }
@@ -139,7 +144,7 @@ final class TermVectorsTermsWriter exten
       }
     }
 
-    assert lastDocID == docState.docID;
+    assert lastDocID == docState.docID: "lastDocID=" + lastDocID + " docState.docID=" + docState.docID;
 
     lastDocID++;
 
@@ -152,21 +157,27 @@ final class TermVectorsTermsWriter exten
   public void abort() {
     hasVectors = false;
     try {
-      IOUtils.closeSafely(tvx, tvd, tvf);
-    } catch (IOException ignored) {
+      IOUtils.closeSafely(true, tvx, tvd, tvf);
+    } catch (IOException e) {
+      // cannot happen since we suppress exceptions
+      throw new RuntimeException(e);
     }
+    
     try {
       docWriter.directory.deleteFile(IndexFileNames.segmentFileName(docWriter.getSegment(), "", IndexFileNames.VECTORS_INDEX_EXTENSION));
     } catch (IOException ignored) {
     }
+    
     try {
       docWriter.directory.deleteFile(IndexFileNames.segmentFileName(docWriter.getSegment(), "", IndexFileNames.VECTORS_DOCUMENTS_EXTENSION));
     } catch (IOException ignored) {
     }
+    
     try {
       docWriter.directory.deleteFile(IndexFileNames.segmentFileName(docWriter.getSegment(), "", IndexFileNames.VECTORS_FIELDS_EXTENSION));
     } catch (IOException ignored) {
     }
+    
     tvx = tvd = tvf = null;
     lastDocID = 0;
 

Modified: lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/TermVectorsWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/TermVectorsWriter.java?rev=1129631&r1=1129630&r2=1129631&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/TermVectorsWriter.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/TermVectorsWriter.java Tue May 31 11:25:37 2011
@@ -31,15 +31,22 @@ final class TermVectorsWriter {
   private FieldInfos fieldInfos;
 
   public TermVectorsWriter(Directory directory, String segment,
-                           FieldInfos fieldInfos)
-    throws IOException {
-    // Open files for TermVector storage
-    tvx = directory.createOutput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.VECTORS_INDEX_EXTENSION));
-    tvx.writeInt(TermVectorsReader.FORMAT_CURRENT);
-    tvd = directory.createOutput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.VECTORS_DOCUMENTS_EXTENSION));
-    tvd.writeInt(TermVectorsReader.FORMAT_CURRENT);
-    tvf = directory.createOutput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.VECTORS_FIELDS_EXTENSION));
-    tvf.writeInt(TermVectorsReader.FORMAT_CURRENT);
+                           FieldInfos fieldInfos) throws IOException {
+    boolean success = false;
+    try {
+      // Open files for TermVector storage
+      tvx = directory.createOutput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.VECTORS_INDEX_EXTENSION));
+      tvx.writeInt(TermVectorsReader.FORMAT_CURRENT);
+      tvd = directory.createOutput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.VECTORS_DOCUMENTS_EXTENSION));
+      tvd.writeInt(TermVectorsReader.FORMAT_CURRENT);
+      tvf = directory.createOutput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.VECTORS_FIELDS_EXTENSION));
+      tvf.writeInt(TermVectorsReader.FORMAT_CURRENT);
+      success = true;
+    } finally {
+      if (!success) {
+        IOUtils.closeSafely(true, tvx, tvd, tvf);
+      }
+    }
 
     this.fieldInfos = fieldInfos;
   }
@@ -51,8 +58,7 @@ final class TermVectorsWriter {
    * @param vectors
    * @throws IOException
    */
-  public final void addAllDocVectors(TermFreqVector[] vectors)
-      throws IOException {
+  public final void addAllDocVectors(TermFreqVector[] vectors) throws IOException {
 
     tvx.writeLong(tvd.getFilePointer());
     tvx.writeLong(tvf.getFilePointer());
@@ -187,6 +193,6 @@ final class TermVectorsWriter {
   final void close() throws IOException {
     // make an effort to close all streams we can but remember and re-throw
     // the first exception encountered in this process
-    IOUtils.closeSafely(tvx, tvd, tvf);
+    IOUtils.closeSafely(false, tvx, tvd, tvf);
   }
 }

Modified: lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/TermsHash.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/TermsHash.java?rev=1129631&r1=1129630&r2=1129631&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/TermsHash.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/TermsHash.java Tue May 31 11:25:37 2011
@@ -54,7 +54,6 @@ final class TermsHash extends InvertedDo
 
   final boolean trackAllocations;
 
-
   public TermsHash(final DocumentsWriterPerThread docWriter, final TermsHashConsumer consumer, boolean trackAllocations, final TermsHash nextTermsHash) {
     this.docState = docWriter.docState;
     this.docWriter = docWriter;
@@ -108,11 +107,11 @@ final class TermsHash extends InvertedDo
     }
 
     for (final Map.Entry<FieldInfo,InvertedDocConsumerPerField> entry : fieldsToFlush.entrySet()) {
-        TermsHashPerField perField = (TermsHashPerField) entry.getValue();
-        childFields.put(entry.getKey(), perField.consumer);
-        if (nextTermsHash != null) {
-          nextChildFields.put(entry.getKey(), perField.nextPerField);
-        }
+      TermsHashPerField perField = (TermsHashPerField) entry.getValue();
+      childFields.put(entry.getKey(), perField.consumer);
+      if (nextTermsHash != null) {
+        nextChildFields.put(entry.getKey(), perField.nextPerField);
+      }
     }
 
     consumer.flush(childFields, state);
@@ -134,12 +133,9 @@ final class TermsHash extends InvertedDo
 
   @Override
   void finishDocument() throws IOException {
-    try {
-      consumer.finishDocument(this);
-    } finally {
-      if (nextTermsHash != null) {
-        nextTermsHash.consumer.finishDocument(nextTermsHash);
-      }
+    consumer.finishDocument(this);
+    if (nextTermsHash != null) {
+      nextTermsHash.consumer.finishDocument(nextTermsHash);
     }
   }
 

Modified: lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/ThreadAffinityDocumentsWriterThreadPool.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/ThreadAffinityDocumentsWriterThreadPool.java?rev=1129631&r1=1129630&r2=1129631&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/ThreadAffinityDocumentsWriterThreadPool.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/ThreadAffinityDocumentsWriterThreadPool.java Tue May 31 11:25:37 2011
@@ -18,7 +18,6 @@ package org.apache.lucene.index;
 import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
 
-import org.apache.lucene.document.Document;
 import org.apache.lucene.index.DocumentsWriterPerThreadPool.ThreadState; //javadoc
 
 /**
@@ -48,12 +47,10 @@ public class ThreadAffinityDocumentsWrit
   }
 
   @Override
-  public ThreadState getAndLock(Thread requestingThread, DocumentsWriter documentsWriter, Document doc) {
+  public ThreadState getAndLock(Thread requestingThread, DocumentsWriter documentsWriter) {
     ThreadState threadState = threadBindings.get(requestingThread);
-    if (threadState != null) {
-      if (threadState.tryLock()) {
-        return threadState;
-      }
+    if (threadState != null && threadState.tryLock()) {
+      return threadState;
     }
     ThreadState minThreadState = null;
 

Modified: lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/BlockTermsWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/BlockTermsWriter.java?rev=1129631&r1=1129630&r2=1129631&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/BlockTermsWriter.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/BlockTermsWriter.java Tue May 31 11:25:37 2011
@@ -31,6 +31,7 @@ import org.apache.lucene.store.RAMOutput
 import org.apache.lucene.util.ArrayUtil;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.CodecUtil;
+import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.RamUsageEstimator;
 
 // TODO: currently we encode all terms between two indexed
@@ -66,24 +67,29 @@ public class BlockTermsWriter extends Fi
 
   //private final String segment;
 
-  public BlockTermsWriter(
-      TermsIndexWriterBase termsIndexWriter,
-      SegmentWriteState state,
-      PostingsWriterBase postingsWriter)
-    throws IOException
-  {
+  public BlockTermsWriter(TermsIndexWriterBase termsIndexWriter,
+      SegmentWriteState state, PostingsWriterBase postingsWriter)
+      throws IOException {
     final String termsFileName = IndexFileNames.segmentFileName(state.segmentName, state.codecIdAsString(), TERMS_EXTENSION);
     this.termsIndexWriter = termsIndexWriter;
     out = state.directory.createOutput(termsFileName);
-    fieldInfos = state.fieldInfos;
-    writeHeader(out);
-    currentField = null;
-    this.postingsWriter = postingsWriter;
-    //segment = state.segmentName;
-
-    //System.out.println("BTW.init seg=" + state.segmentName);
-
-    postingsWriter.start(out);                          // have consumer write its format/header
+    boolean success = false;
+    try {
+      fieldInfos = state.fieldInfos;
+      writeHeader(out);
+      currentField = null;
+      this.postingsWriter = postingsWriter;
+      //segment = state.segmentName;
+      
+      //System.out.println("BTW.init seg=" + state.segmentName);
+      
+      postingsWriter.start(out); // have consumer write its format/header
+      success = true;
+    } finally {
+      if (!success) {
+        IOUtils.closeSafely(true, out);
+      }
+    }
   }
   
   protected void writeHeader(IndexOutput out) throws IOException {
@@ -130,20 +136,11 @@ public class BlockTermsWriter extends Fi
       }
       writeTrailer(dirStart);
     } finally {
-      try {
-        out.close();
-      } finally {
-        try {
-          postingsWriter.close();
-        } finally {
-          termsIndexWriter.close();
-        }
-      }
+      IOUtils.closeSafely(false, out, postingsWriter, termsIndexWriter);
     }
   }
 
   protected void writeTrailer(long dirStart) throws IOException {
-    // TODO Auto-generated method stub
     out.seek(CodecUtil.headerLength(CODEC_NAME));
     out.writeLong(dirStart);    
   }

Modified: lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/CodecProvider.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/CodecProvider.java?rev=1129631&r1=1129630&r2=1129631&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/CodecProvider.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/CodecProvider.java Tue May 31 11:25:37 2011
@@ -89,6 +89,15 @@ public class CodecProvider {
     return codec;
   }
 
+  /**
+   * Returns <code>true</code> iff a codec with the given name is registered
+   * @param name codec name
+   * @return <code>true</code> iff a codec with the given name is registered, otherwise <code>false</code>.
+   */
+  public synchronized boolean isCodecRegistered(String name) {
+    return codecs.containsKey(name);
+  }
+
   public SegmentInfosWriter getSegmentInfosWriter() {
     return infosWriter;
   }
@@ -147,6 +156,14 @@ public class CodecProvider {
   }
 
   /**
+   * Returns <code>true</code> if this provider has a Codec registered for this
+   * field.
+   */
+  public synchronized boolean hasFieldCodec(String name) {
+    return perFieldMap.containsKey(name);
+  }
+
+  /**
    * Returns the default {@link Codec} for this {@link CodecProvider}
    * 
    * @return the default {@link Codec} for this {@link CodecProvider}

Modified: lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/DefaultSegmentInfosWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/DefaultSegmentInfosWriter.java?rev=1129631&r1=1129630&r2=1129631&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/DefaultSegmentInfosWriter.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/DefaultSegmentInfosWriter.java Tue May 31 11:25:37 2011
@@ -24,6 +24,7 @@ import org.apache.lucene.index.SegmentIn
 import org.apache.lucene.store.ChecksumIndexOutput;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IndexOutput;
+import org.apache.lucene.util.IOUtils;
 
 /**
  * Default implementation of {@link SegmentInfosWriter}.
@@ -56,16 +57,24 @@ public class DefaultSegmentInfosWriter e
   public IndexOutput writeInfos(Directory dir, String segmentFileName, SegmentInfos infos)
           throws IOException {
     IndexOutput out = createOutput(dir, segmentFileName);
-    out.writeInt(FORMAT_CURRENT); // write FORMAT
-    out.writeLong(infos.version);
-    out.writeInt(infos.counter); // write counter
-    out.writeLong(infos.getGlobalFieldMapVersion());
-    out.writeInt(infos.size()); // write infos
-    for (SegmentInfo si : infos) {
-      si.write(out);
+    boolean success = false;
+    try {
+      out.writeInt(FORMAT_CURRENT); // write FORMAT
+      out.writeLong(infos.version);
+      out.writeInt(infos.counter); // write counter
+      out.writeLong(infos.getGlobalFieldMapVersion());
+      out.writeInt(infos.size()); // write infos
+      for (SegmentInfo si : infos) {
+        si.write(out);
+      }
+      out.writeStringStringMap(infos.getUserData());
+      success = true;
+      return out;
+    } finally {
+      if (!success) {
+        IOUtils.closeSafely(true, out);
+      }
     }
-    out.writeStringStringMap(infos.getUserData());
-    return out;
   }
   
   protected IndexOutput createOutput(Directory dir, String segmentFileName)

Modified: lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/FixedGapTermsIndexReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/FixedGapTermsIndexReader.java?rev=1129631&r1=1129630&r2=1129631&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/FixedGapTermsIndexReader.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/FixedGapTermsIndexReader.java Tue May 31 11:25:37 2011
@@ -24,6 +24,7 @@ import org.apache.lucene.index.FieldInfo
 import org.apache.lucene.index.SegmentInfo;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.CodecUtil;
+import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.PagedBytes;
 import org.apache.lucene.util.packed.PackedInts;
 
@@ -108,6 +109,7 @@ public class FixedGapTermsIndexReader ex
       }
       success = true;
     } finally {
+      if (!success) IOUtils.closeSafely(true, in);
       if (indexDivisor > 0) {
         in.close();
         in = null;

Modified: lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/FixedGapTermsIndexWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/FixedGapTermsIndexWriter.java?rev=1129631&r1=1129630&r2=1129631&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/FixedGapTermsIndexWriter.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/FixedGapTermsIndexWriter.java Tue May 31 11:25:37 2011
@@ -25,6 +25,7 @@ import org.apache.lucene.index.SegmentWr
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.CodecUtil;
 import org.apache.lucene.util.ArrayUtil;
+import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.packed.PackedInts;
 
 import java.util.List;
@@ -58,9 +59,17 @@ public class FixedGapTermsIndexWriter ex
     final String indexFileName = IndexFileNames.segmentFileName(state.segmentName, state.codecIdAsString(), TERMS_INDEX_EXTENSION);
     termIndexInterval = state.termIndexInterval;
     out = state.directory.createOutput(indexFileName);
-    fieldInfos = state.fieldInfos;
-    writeHeader(out);
-    out.writeInt(termIndexInterval);
+    boolean success = false;
+    try {
+      fieldInfos = state.fieldInfos;
+      writeHeader(out);
+      out.writeInt(termIndexInterval);
+      success = true;
+    } finally {
+      if (!success) {
+        IOUtils.closeSafely(true, out);
+      }
+    }
   }
   
   protected void writeHeader(IndexOutput out) throws IOException {
@@ -202,33 +211,37 @@ public class FixedGapTermsIndexWriter ex
     }
   }
 
-  @Override
   public void close() throws IOException {
-    final long dirStart = out.getFilePointer();
-    final int fieldCount = fields.size();
-
-    int nonNullFieldCount = 0;
-    for(int i=0;i<fieldCount;i++) {
-      SimpleFieldWriter field = fields.get(i);
-      if (field.numIndexTerms > 0) {
-        nonNullFieldCount++;
+    boolean success = false;
+    try {
+      final long dirStart = out.getFilePointer();
+      final int fieldCount = fields.size();
+      
+      int nonNullFieldCount = 0;
+      for(int i=0;i<fieldCount;i++) {
+        SimpleFieldWriter field = fields.get(i);
+        if (field.numIndexTerms > 0) {
+          nonNullFieldCount++;
+        }
       }
-    }
-
-    out.writeVInt(nonNullFieldCount);
-    for(int i=0;i<fieldCount;i++) {
-      SimpleFieldWriter field = fields.get(i);
-      if (field.numIndexTerms > 0) {
-        out.writeVInt(field.fieldInfo.number);
-        out.writeVInt(field.numIndexTerms);
-        out.writeVLong(field.termsStart);
-        out.writeVLong(field.indexStart);
-        out.writeVLong(field.packedIndexStart);
-        out.writeVLong(field.packedOffsetsStart);
+      
+      out.writeVInt(nonNullFieldCount);
+      for(int i=0;i<fieldCount;i++) {
+        SimpleFieldWriter field = fields.get(i);
+        if (field.numIndexTerms > 0) {
+          out.writeVInt(field.fieldInfo.number);
+          out.writeVInt(field.numIndexTerms);
+          out.writeVLong(field.termsStart);
+          out.writeVLong(field.indexStart);
+          out.writeVLong(field.packedIndexStart);
+          out.writeVLong(field.packedOffsetsStart);
+        }
       }
+      writeTrailer(dirStart);
+      success = true;
+    } finally {
+      IOUtils.closeSafely(!success, out);
     }
-    writeTrailer(dirStart);
-    out.close();
   }
 
   protected void writeTrailer(long dirStart) throws IOException {

Modified: lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/TermsIndexWriterBase.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/TermsIndexWriterBase.java?rev=1129631&r1=1129630&r2=1129631&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/TermsIndexWriterBase.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/TermsIndexWriterBase.java Tue May 31 11:25:37 2011
@@ -19,10 +19,12 @@ package org.apache.lucene.index.codecs;
 
 import org.apache.lucene.index.FieldInfo;
 import org.apache.lucene.util.BytesRef;
+
+import java.io.Closeable;
 import java.io.IOException;
 
 /** @lucene.experimental */
-public abstract class TermsIndexWriterBase {
+public abstract class TermsIndexWriterBase implements Closeable {
 
   public abstract class FieldWriter {
     public abstract boolean checkIndexTerm(BytesRef text, TermStats stats) throws IOException;
@@ -31,6 +33,4 @@ public abstract class TermsIndexWriterBa
   }
 
   public abstract FieldWriter addField(FieldInfo fieldInfo, long termsFilePointer) throws IOException;
-
-  public abstract void close() throws IOException;
 }

Modified: lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/VariableGapTermsIndexReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/VariableGapTermsIndexReader.java?rev=1129631&r1=1129630&r2=1129631&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/VariableGapTermsIndexReader.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/VariableGapTermsIndexReader.java Tue May 31 11:25:37 2011
@@ -33,11 +33,11 @@ import org.apache.lucene.store.Directory
 import org.apache.lucene.store.IndexInput;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.CodecUtil;
-import org.apache.lucene.util.automaton.fst.Builder;
-import org.apache.lucene.util.automaton.fst.BytesRefFSTEnum;
-import org.apache.lucene.util.automaton.fst.FST;
-import org.apache.lucene.util.automaton.fst.PositiveIntOutputs;
-import org.apache.lucene.util.automaton.fst.Util; // for toDot
+import org.apache.lucene.util.fst.Builder;
+import org.apache.lucene.util.fst.BytesRefFSTEnum;
+import org.apache.lucene.util.fst.FST;
+import org.apache.lucene.util.fst.PositiveIntOutputs;
+import org.apache.lucene.util.fst.Util; // for toDot
 
 /** See {@link VariableGapTermsIndexWriter}
  * 

Modified: lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/VariableGapTermsIndexWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/VariableGapTermsIndexWriter.java?rev=1129631&r1=1129630&r2=1129631&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/VariableGapTermsIndexWriter.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/VariableGapTermsIndexWriter.java Tue May 31 11:25:37 2011
@@ -28,9 +28,10 @@ import org.apache.lucene.index.SegmentWr
 import org.apache.lucene.store.IndexOutput;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.CodecUtil;
-import org.apache.lucene.util.automaton.fst.Builder;
-import org.apache.lucene.util.automaton.fst.FST;
-import org.apache.lucene.util.automaton.fst.PositiveIntOutputs;
+import org.apache.lucene.util.IOUtils;
+import org.apache.lucene.util.fst.Builder;
+import org.apache.lucene.util.fst.FST;
+import org.apache.lucene.util.fst.PositiveIntOutputs;
 
 /**
  * Selects index terms according to provided pluggable
@@ -159,9 +160,17 @@ public class VariableGapTermsIndexWriter
   public VariableGapTermsIndexWriter(SegmentWriteState state, IndexTermSelector policy) throws IOException {
     final String indexFileName = IndexFileNames.segmentFileName(state.segmentName, state.codecIdAsString(), TERMS_INDEX_EXTENSION);
     out = state.directory.createOutput(indexFileName);
-    fieldInfos = state.fieldInfos;
-    this.policy = policy;
-    writeHeader(out);
+    boolean success = false;
+    try {
+      fieldInfos = state.fieldInfos;
+      this.policy = policy;
+      writeHeader(out);
+      success = true;
+    } finally {
+      if (!success) {
+        IOUtils.closeSafely(true, out);
+      }
+    }
   }
   
   protected void writeHeader(IndexOutput out) throws IOException {
@@ -265,8 +274,8 @@ public class VariableGapTermsIndexWriter
     }
   }
 
-  @Override
   public void close() throws IOException {
+    try {
     final long dirStart = out.getFilePointer();
     final int fieldCount = fields.size();
 
@@ -287,8 +296,10 @@ public class VariableGapTermsIndexWriter
       }
     }
     writeTrailer(dirStart);
+    } finally {
     out.close();
   }
+  }
 
   protected void writeTrailer(long dirStart) throws IOException {
     out.seek(CodecUtil.headerLength(CODEC_NAME));

Modified: lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/intblock/VariableIntBlockIndexOutput.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/intblock/VariableIntBlockIndexOutput.java?rev=1129631&r1=1129630&r2=1129631&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/intblock/VariableIntBlockIndexOutput.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/intblock/VariableIntBlockIndexOutput.java Tue May 31 11:25:37 2011
@@ -41,6 +41,7 @@ public abstract class VariableIntBlockIn
   protected final IndexOutput out;
 
   private int upto;
+  private boolean hitExcDuringWrite;
 
   // TODO what Var-Var codecs exist in practice... and what are there blocksizes like?
   // if its less than 128 we should set that as max and use byte?
@@ -105,19 +106,23 @@ public abstract class VariableIntBlockIn
 
   @Override
   public void write(int v) throws IOException {
+    hitExcDuringWrite = true;
     upto -= add(v)-1;
+    hitExcDuringWrite = false;
     assert upto >= 0;
   }
 
   @Override
   public void close() throws IOException {
     try {
-      // stuff 0s in until the "real" data is flushed:
-      int stuffed = 0;
-      while(upto > stuffed) {
-        upto -= add(0)-1;
-        assert upto >= 0;
-        stuffed += 1;
+      if (!hitExcDuringWrite) {
+        // stuff 0s in until the "real" data is flushed:
+        int stuffed = 0;
+        while(upto > stuffed) {
+          upto -= add(0)-1;
+          assert upto >= 0;
+          stuffed += 1;
+        }
       }
     } finally {
       out.close();

Modified: lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/pulsing/PulsingCodec.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/pulsing/PulsingCodec.java?rev=1129631&r1=1129630&r2=1129631&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/pulsing/PulsingCodec.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/pulsing/PulsingCodec.java Tue May 31 11:25:37 2011
@@ -29,7 +29,6 @@ import org.apache.lucene.index.codecs.Po
 import org.apache.lucene.index.codecs.standard.StandardPostingsWriter;
 import org.apache.lucene.index.codecs.PostingsReaderBase;
 import org.apache.lucene.index.codecs.standard.StandardPostingsReader;
-import org.apache.lucene.index.codecs.DocValuesConsumer;
 import org.apache.lucene.index.codecs.DefaultDocValuesProducer;
 import org.apache.lucene.index.codecs.FieldsConsumer;
 import org.apache.lucene.index.codecs.FieldsProducer;
@@ -45,6 +44,7 @@ import org.apache.lucene.index.codecs.Te
 import org.apache.lucene.index.codecs.standard.StandardCodec;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.IOUtils;
 
 /** This codec "inlines" the postings for terms that have
  *  low docFreq.  It wraps another codec, which is used for
@@ -88,7 +88,7 @@ public class PulsingCodec extends Codec 
       success = true;
     } finally {
       if (!success) {
-        pulsingWriter.close();
+        IOUtils.closeSafely(true, pulsingWriter);
       }
     }
 
@@ -100,11 +100,7 @@ public class PulsingCodec extends Codec 
       return ret;
     } finally {
       if (!success) {
-        try {
-          pulsingWriter.close();
-        } finally {
-          indexWriter.close();
-        }
+        IOUtils.closeSafely(true, pulsingWriter, indexWriter);
       }
     }
   }

Modified: lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/pulsing/PulsingPostingsWriterImpl.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/pulsing/PulsingPostingsWriterImpl.java?rev=1129631&r1=1129630&r2=1129631&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/pulsing/PulsingPostingsWriterImpl.java (original)
+++ lucene/dev/branches/docvalues/lucene/src/java/org/apache/lucene/index/codecs/pulsing/PulsingPostingsWriterImpl.java Tue May 31 11:25:37 2011
@@ -71,8 +71,6 @@ public final class PulsingPostingsWriter
    *  for this term) is <= maxPositions, then the postings are
    *  inlined into terms dict */
   public PulsingPostingsWriterImpl(int maxPositions, PostingsWriterBase wrappedPostingsWriter) throws IOException {
-    super();
-
     pending = new Position[maxPositions];
     for(int i=0;i<maxPositions;i++) {
       pending[i] = new Position();