You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by sa...@apache.org on 2011/05/30 16:51:37 UTC

svn commit: r1129205 [4/7] - in /lucene/dev/branches/solr2452: ./ dev-tools/eclipse/ dev-tools/idea/.idea/ dev-tools/idea/lucene/contrib/spellchecker/ dev-tools/idea/modules/suggest/ dev-tools/maven/lucene/contrib/ dev-tools/maven/lucene/contrib/spellc...

Modified: lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/index/codecs/mockintblock/MockVariableIntBlockCodec.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/index/codecs/mockintblock/MockVariableIntBlockCodec.java?rev=1129205&r1=1129204&r2=1129205&view=diff
==============================================================================
--- lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/index/codecs/mockintblock/MockVariableIntBlockCodec.java (original)
+++ lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/index/codecs/mockintblock/MockVariableIntBlockCodec.java Mon May 30 14:51:25 2011
@@ -46,6 +46,7 @@ import org.apache.lucene.store.Directory
 import org.apache.lucene.store.IndexInput;
 import org.apache.lucene.store.IndexOutput;
 import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.IOUtils;
 
 /**
  * A silly test codec to verify core support for variable
@@ -102,34 +103,42 @@ public class MockVariableIntBlockCodec e
     @Override
     public IntIndexOutput createOutput(Directory dir, String fileName) throws IOException {
       final IndexOutput out = dir.createOutput(fileName);
-      out.writeInt(baseBlockSize);
-      return new VariableIntBlockIndexOutput(out, 2*baseBlockSize) {
-
-        int pendingCount;
-        final int[] buffer = new int[2+2*baseBlockSize];
-
-        @Override
-        protected int add(int value) throws IOException {
-          assert value >= 0;
-          buffer[pendingCount++] = value;
-          // silly variable block length int encoder: if
-          // first value <= 3, we write N vints at once;
-          // else, 2*N
-          final int flushAt = buffer[0] <= 3 ? baseBlockSize : 2*baseBlockSize;
-
-          // intentionally be non-causal here:
-          if (pendingCount == flushAt+1) {
-            for(int i=0;i<flushAt;i++) {
-              out.writeVInt(buffer[i]);
+      boolean success = false;
+      try {
+        out.writeInt(baseBlockSize);
+        VariableIntBlockIndexOutput ret = new VariableIntBlockIndexOutput(out, 2*baseBlockSize) {
+          int pendingCount;
+          final int[] buffer = new int[2+2*baseBlockSize];
+          
+          @Override
+          protected int add(int value) throws IOException {
+            assert value >= 0;
+            buffer[pendingCount++] = value;
+            // silly variable block length int encoder: if
+            // first value <= 3, we write N vints at once;
+            // else, 2*N
+            final int flushAt = buffer[0] <= 3 ? baseBlockSize : 2*baseBlockSize;
+            
+            // intentionally be non-causal here:
+            if (pendingCount == flushAt+1) {
+              for(int i=0;i<flushAt;i++) {
+                out.writeVInt(buffer[i]);
+              }
+              buffer[0] = buffer[flushAt];
+              pendingCount = 1;
+              return flushAt;
+            } else {
+              return 0;
             }
-            buffer[0] = buffer[flushAt];
-            pendingCount = 1;
-            return flushAt;
-          } else {
-            return 0;
           }
+        };
+        success = true;
+        return ret;
+      } finally {
+        if (!success) {
+          IOUtils.closeSafely(true, out);
         }
-      };
+      }
     }
   }
 

Modified: lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/index/codecs/mockrandom/MockRandomCodec.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/index/codecs/mockrandom/MockRandomCodec.java?rev=1129205&r1=1129204&r2=1129205&view=diff
==============================================================================
--- lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/index/codecs/mockrandom/MockRandomCodec.java (original)
+++ lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/index/codecs/mockrandom/MockRandomCodec.java Mon May 30 14:51:25 2011
@@ -136,8 +136,11 @@ public class MockRandomCodec extends Cod
 
     final String seedFileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, SEED_EXT);
     final IndexOutput out = state.directory.createOutput(seedFileName);
-    out.writeLong(seed);
-    out.close();
+    try {
+      out.writeLong(seed);
+    } finally {
+      out.close();
+    }
 
     final Random random = new Random(seed);
     

Modified: lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/index/codecs/mocksep/MockSingleIntIndexOutput.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/index/codecs/mocksep/MockSingleIntIndexOutput.java?rev=1129205&r1=1129204&r2=1129205&view=diff
==============================================================================
--- lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/index/codecs/mocksep/MockSingleIntIndexOutput.java (original)
+++ lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/index/codecs/mocksep/MockSingleIntIndexOutput.java Mon May 30 14:51:25 2011
@@ -20,6 +20,7 @@ package org.apache.lucene.index.codecs.m
 import org.apache.lucene.store.IndexOutput;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.CodecUtil;
+import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.index.codecs.sep.IntIndexOutput;
 import java.io.IOException;
 
@@ -36,7 +37,15 @@ public class MockSingleIntIndexOutput ex
 
   public MockSingleIntIndexOutput(Directory dir, String fileName) throws IOException {
     out = dir.createOutput(fileName);
-    CodecUtil.writeHeader(out, CODEC, VERSION_CURRENT);
+    boolean success = false;
+    try {
+      CodecUtil.writeHeader(out, CODEC, VERSION_CURRENT);
+      success = true;
+    } finally {
+      if (!success) {
+        IOUtils.closeSafely(true, out);
+      }
+    }
   }
 
   /** Write an int to the primary file */

Modified: lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/index/codecs/preflexrw/PreFlexFieldsWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/index/codecs/preflexrw/PreFlexFieldsWriter.java?rev=1129205&r1=1129204&r2=1129205&view=diff
==============================================================================
--- lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/index/codecs/preflexrw/PreFlexFieldsWriter.java (original)
+++ lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/index/codecs/preflexrw/PreFlexFieldsWriter.java Mon May 30 14:51:25 2011
@@ -17,22 +17,23 @@ package org.apache.lucene.index.codecs.p
  * limitations under the License.
  */
 
-import org.apache.lucene.util.BytesRef;
+import java.io.IOException;
+import java.util.Comparator;
+
+import org.apache.lucene.index.CorruptIndexException;
+import org.apache.lucene.index.FieldInfo;
+import org.apache.lucene.index.IndexFileNames;
+import org.apache.lucene.index.SegmentWriteState;
 import org.apache.lucene.index.codecs.FieldsConsumer;
-import org.apache.lucene.index.codecs.TermsConsumer;
 import org.apache.lucene.index.codecs.PostingsConsumer;
 import org.apache.lucene.index.codecs.TermStats;
-import org.apache.lucene.index.codecs.standard.DefaultSkipListWriter;
+import org.apache.lucene.index.codecs.TermsConsumer;
 import org.apache.lucene.index.codecs.preflex.PreFlexCodec;
-import org.apache.lucene.index.CorruptIndexException;
-import org.apache.lucene.index.IndexFileNames;
-import org.apache.lucene.index.SegmentWriteState;
-import org.apache.lucene.index.FieldInfo;
 import org.apache.lucene.index.codecs.preflex.TermInfo;
+import org.apache.lucene.index.codecs.standard.DefaultSkipListWriter;
 import org.apache.lucene.store.IndexOutput;
-
-import java.io.IOException;
-import java.util.Comparator;
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.IOUtils;
 
 class PreFlexFieldsWriter extends FieldsConsumer {
 
@@ -76,11 +77,7 @@ class PreFlexFieldsWriter extends Fields
 
   @Override
   public void close() throws IOException {
-    termsOut.close();
-    freqOut.close();
-    if (proxOut != null) {
-      proxOut.close();
-    }
+    IOUtils.closeSafely(false, termsOut, freqOut, proxOut);
   }
 
   private class PreFlexTermsWriter extends TermsConsumer {

Modified: lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/index/codecs/preflexrw/TermInfosWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/index/codecs/preflexrw/TermInfosWriter.java?rev=1129205&r1=1129204&r2=1129205&view=diff
==============================================================================
--- lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/index/codecs/preflexrw/TermInfosWriter.java (original)
+++ lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/index/codecs/preflexrw/TermInfosWriter.java Mon May 30 14:51:25 2011
@@ -18,19 +18,23 @@ package org.apache.lucene.index.codecs.p
  */
 
 
+import java.io.Closeable;
 import java.io.IOException;
-import org.apache.lucene.store.IndexOutput;
-import org.apache.lucene.store.Directory;
-import org.apache.lucene.util.UnicodeUtil;
+
 import org.apache.lucene.index.FieldInfos;
-import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.index.codecs.preflex.TermInfo;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IndexOutput;
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.CharsRef;
+import org.apache.lucene.util.IOUtils;
+import org.apache.lucene.util.UnicodeUtil;
 
 
 /** This stores a monotonically increasing set of <Term, TermInfo> pairs in a
   Directory.  A TermInfos can be written once, in order.  */
 
-final class TermInfosWriter {
+final class TermInfosWriter implements Closeable {
   /** The file format version, a negative number. */
   public static final int FORMAT = -3;
 
@@ -83,8 +87,26 @@ final class TermInfosWriter {
                   int interval)
        throws IOException {
     initialize(directory, segment, fis, interval, false);
+    boolean success = false;
+    try {
     other = new TermInfosWriter(directory, segment, fis, interval, true);
     other.other = this;
+      success = true;
+    } finally {
+      if (!success) {
+        try {
+          IOUtils.closeSafely(true, output);
+        } catch (IOException e) {
+          // cannot happen since we suppress exceptions
+          throw new RuntimeException(e);
+        }
+
+        try {
+          directory.deleteFile(segment + (isIndex ? ".tii" : ".tis"));
+        } catch (IOException ignored) {
+        }
+      }
+    }
   }
 
   private TermInfosWriter(Directory directory, String segment, FieldInfos fis,
@@ -98,23 +120,41 @@ final class TermInfosWriter {
     fieldInfos = fis;
     isIndex = isi;
     output = directory.createOutput(segment + (isIndex ? ".tii" : ".tis"));
+    boolean success = false;
+    try {
     output.writeInt(FORMAT_CURRENT);              // write format
     output.writeLong(0);                          // leave space for size
     output.writeInt(indexInterval);               // write indexInterval
     output.writeInt(skipInterval);                // write skipInterval
     output.writeInt(maxSkipLevels);               // write maxSkipLevels
     assert initUTF16Results();
+      success = true;
+    } finally {
+      if (!success) {
+        try {
+          IOUtils.closeSafely(true, output);
+        } catch (IOException e) {
+          // cannot happen since we suppress exceptions
+          throw new RuntimeException(e);
+        }
+
+        try {
+          directory.deleteFile(segment + (isIndex ? ".tii" : ".tis"));
+        } catch (IOException ignored) {
+        }
+      }
+    }
   }
 
   // Currently used only by assert statements
-  UnicodeUtil.UTF16Result utf16Result1;
-  UnicodeUtil.UTF16Result utf16Result2;
+  CharsRef utf16Result1;
+  CharsRef utf16Result2;
   private final BytesRef scratchBytes = new BytesRef();
 
   // Currently used only by assert statements
   private boolean initUTF16Results() {
-    utf16Result1 = new UnicodeUtil.UTF16Result();
-    utf16Result2 = new UnicodeUtil.UTF16Result();
+    utf16Result1 = new CharsRef(10);
+    utf16Result2 = new CharsRef(10);
     return true;
   }
 
@@ -145,8 +185,8 @@ final class TermInfosWriter {
       len = utf16Result2.length;
 
     for(int i=0;i<len;i++) {
-      final char ch1 = utf16Result1.result[i];
-      final char ch2 = utf16Result2.result[i];
+      final char ch1 = utf16Result1.chars[i];
+      final char ch2 = utf16Result2.chars[i];
       if (ch1 != ch2)
         return ch1-ch2;
     }
@@ -215,13 +255,18 @@ final class TermInfosWriter {
   }
 
   /** Called to complete TermInfos creation. */
-  void close() throws IOException {
-    output.seek(4);          // write size after format
-    output.writeLong(size);
-    output.close();
-
-    if (!isIndex)
-      other.close();
+  public void close() throws IOException {
+    try {
+      output.seek(4);          // write size after format
+      output.writeLong(size);
+    } finally {
+      try {
+        output.close();
+      } finally {
+        if (!isIndex) {
+          other.close();
+        }
+      }
+    }
   }
-
 }

Modified: lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/search/QueryUtils.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/search/QueryUtils.java?rev=1129205&r1=1129204&r2=1129205&view=diff
==============================================================================
--- lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/search/QueryUtils.java (original)
+++ lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/search/QueryUtils.java Mon May 30 14:51:25 2011
@@ -1,8 +1,24 @@
 package org.apache.lucene.search;
 
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 import java.io.IOException;
 import java.util.Random;
-import java.lang.reflect.Method;
 
 import junit.framework.Assert;
 
@@ -23,21 +39,6 @@ import org.apache.lucene.util._TestUtil;
 
 import static org.apache.lucene.util.LuceneTestCase.TEST_VERSION_CURRENT;
 
-/**
- * Copyright 2005 Apache Software Foundation
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
 
 
 

Modified: lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/store/MockDirectoryWrapper.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/store/MockDirectoryWrapper.java?rev=1129205&r1=1129204&r2=1129205&view=diff
==============================================================================
--- lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/store/MockDirectoryWrapper.java (original)
+++ lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/store/MockDirectoryWrapper.java Mon May 30 14:51:25 2011
@@ -68,24 +68,25 @@ public class MockDirectoryWrapper extend
   boolean trackDiskUsage = false;
   private Set<String> unSyncedFiles;
   private Set<String> createdFiles;
-  Set<String> openFilesForWrite = new HashSet<String>();
+  private Set<String> openFilesForWrite = new HashSet<String>();
+  Set<String> openLocks = Collections.synchronizedSet(new HashSet<String>());
   volatile boolean crashed;
   private ThrottledIndexOutput throttledOutput;
   private Throttling throttling = Throttling.SOMETIMES;
 
   // use this for tracking files for crash.
   // additionally: provides debugging information in case you leave one open
-  Map<Closeable,Exception> openFileHandles = Collections.synchronizedMap(new IdentityHashMap<Closeable,Exception>());
+  private Map<Closeable,Exception> openFileHandles = Collections.synchronizedMap(new IdentityHashMap<Closeable,Exception>());
 
   // NOTE: we cannot initialize the Map here due to the
   // order in which our constructor actually does this
   // member initialization vs when it calls super.  It seems
   // like super is called, then our members are initialized:
-  Map<String,Integer> openFiles;
+  private Map<String,Integer> openFiles;
 
   // Only tracked if noDeleteOpenFile is true: if an attempt
   // is made to delete an open file, we enroll it here.
-  Set<String> openFilesDeleted;
+  private Set<String> openFilesDeleted;
 
   private synchronized void init() {
     if (openFiles == null) {
@@ -107,6 +108,12 @@ public class MockDirectoryWrapper extend
     this.randomState = new Random(random.nextInt());
     this.throttledOutput = new ThrottledIndexOutput(ThrottledIndexOutput
         .mBitsToBytes(40 + randomState.nextInt(10)), 5 + randomState.nextInt(5), null);
+    // force wrapping of lockfactory
+    try {
+      setLockFactory(new MockLockFactoryWrapper(this, delegate.getLockFactory()));
+    } catch (IOException e) {
+      throw new RuntimeException(e);
+    }
     init();
   }
 
@@ -127,7 +134,7 @@ public class MockDirectoryWrapper extend
     SOMETIMES,
     /** never throttle output */
     NEVER
-  };
+  }
   
   public void setThrottling(Throttling throttling) {
     this.throttling = throttling;
@@ -208,6 +215,7 @@ public class MockDirectoryWrapper extend
 
   public synchronized void clearCrash() throws IOException {
     crashed = false;
+    openLocks.clear();
   }
 
   public void setMaxSizeInBytes(long maxSize) {
@@ -362,9 +370,10 @@ public class MockDirectoryWrapper extend
         ramdir.fileMap.put(name, file);
       }
     }
+    
     //System.out.println(Thread.currentThread().getName() + ": MDW: create " + name);
     IndexOutput io = new MockIndexOutputWrapper(this, delegate.createOutput(name), name);
-    openFileHandles.put(io, new RuntimeException("unclosed IndexOutput"));
+    addFileHandle(io, name, false);
     openFilesForWrite.add(name);
     
     // throttling REALLY slows down tests, so don't do it very often for SOMETIMES.
@@ -379,6 +388,18 @@ public class MockDirectoryWrapper extend
     }
   }
 
+  private void addFileHandle(Closeable c, String name, boolean input) {
+    Integer v = openFiles.get(name);
+    if (v != null) {
+      v = Integer.valueOf(v.intValue()+1);
+      openFiles.put(name, v);
+    } else {
+      openFiles.put(name, Integer.valueOf(1));
+    }
+    
+    openFileHandles.put(c, new RuntimeException("unclosed Index" + (input ? "Input" : "Output") + ": " + name));
+  }
+  
   @Override
   public synchronized IndexInput openInput(String name) throws IOException {
     maybeYield();
@@ -391,16 +412,8 @@ public class MockDirectoryWrapper extend
       throw fillOpenTrace(new IOException("MockDirectoryWrapper: file \"" + name + "\" is still open for writing"), name, false);
     }
 
-    if (openFiles.containsKey(name)) {
-      Integer v =  openFiles.get(name);
-      v = Integer.valueOf(v.intValue()+1);
-      openFiles.put(name, v);
-    } else {
-      openFiles.put(name, Integer.valueOf(1));
-    }
-
     IndexInput ii = new MockIndexInputWrapper(this, name, delegate.openInput(name));
-    openFileHandles.put(ii, new RuntimeException("unclosed IndexInput"));
+    addFileHandle(ii, name, true);
     return ii;
   }
 
@@ -447,6 +460,9 @@ public class MockDirectoryWrapper extend
       // super() does not throw IOException currently:
       throw new RuntimeException("MockDirectoryWrapper: cannot close: there are still open files: " + openFiles, cause);
     }
+    if (noDeleteOpenFile && openLocks.size() > 0) {
+      throw new RuntimeException("MockDirectoryWrapper: cannot close: there are still open locks: " + openLocks);
+    }
     open = false;
     if (checkIndexOnClose) {
       if (LuceneTestCase.VERBOSE) {
@@ -465,6 +481,31 @@ public class MockDirectoryWrapper extend
     delegate.close();
   }
 
+  private synchronized void removeOpenFile(Closeable c, String name) {
+    Integer v = openFiles.get(name);
+    // Could be null when crash() was called
+    if (v != null) {
+      if (v.intValue() == 1) {
+        openFiles.remove(name);
+        openFilesDeleted.remove(name);
+      } else {
+        v = Integer.valueOf(v.intValue()-1);
+        openFiles.put(name, v);
+      }
+    }
+
+    openFileHandles.remove(c);
+  }
+  
+  public synchronized void removeIndexOutput(IndexOutput out, String name) {
+    openFilesForWrite.remove(name);
+    removeOpenFile(out, name);
+  }
+  
+  public synchronized void removeIndexInput(IndexInput in, String name) {
+    removeOpenFile(in, name);
+  }
+  
   private CodecProvider codecProvider;
 
   // We pass this CodecProvider to checkIndex when dir is closed...

Modified: lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/store/MockIndexInputWrapper.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/store/MockIndexInputWrapper.java?rev=1129205&r1=1129204&r2=1129205&view=diff
==============================================================================
--- lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/store/MockIndexInputWrapper.java (original)
+++ lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/store/MockIndexInputWrapper.java Mon May 30 14:51:25 2011
@@ -31,8 +31,7 @@ public class MockIndexInputWrapper exten
   private IndexInput delegate;
   private boolean isClone;
 
-  /** Construct an empty output buffer. 
-   * @throws IOException */
+  /** Construct an empty output buffer. */
   public MockIndexInputWrapper(MockDirectoryWrapper dir, String name, IndexInput delegate) {
     this.name = name;
     this.dir = dir;
@@ -41,24 +40,17 @@ public class MockIndexInputWrapper exten
 
   @Override
   public void close() throws IOException {
-    delegate.close();
-    // Pending resolution on LUCENE-686 we may want to
-    // remove the conditional check so we also track that
-    // all clones get closed:
-    if (!isClone) {
-      synchronized(dir) {
-        Integer v = dir.openFiles.get(name);
-        // Could be null when MockRAMDirectory.crash() was called
-        if (v != null) {
-          if (v.intValue() == 1) {
-            dir.openFiles.remove(name);
-            dir.openFilesDeleted.remove(name);
-          } else {
-            v = Integer.valueOf(v.intValue()-1);
-            dir.openFiles.put(name, v);
-          }
-        }
-        dir.openFileHandles.remove(this);
+    try {
+      // turn on the following to look for leaks closing inputs,
+      // after fixing TestTransactions
+      // dir.maybeThrowDeterministicException();
+    } finally {
+      delegate.close();
+      // Pending resolution on LUCENE-686 we may want to
+      // remove the conditional check so we also track that
+      // all clones get closed:
+      if (!isClone) {
+        dir.removeIndexInput(this, name);
       }
     }
   }

Modified: lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/store/MockIndexOutputWrapper.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/store/MockIndexOutputWrapper.java?rev=1129205&r1=1129204&r2=1129205&view=diff
==============================================================================
--- lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/store/MockIndexOutputWrapper.java (original)
+++ lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/store/MockIndexOutputWrapper.java Mon May 30 14:51:25 2011
@@ -45,19 +45,19 @@ public class MockIndexOutputWrapper exte
 
   @Override
   public void close() throws IOException {
-    dir.maybeThrowDeterministicException();
-    delegate.close();
-    if (dir.trackDiskUsage) {
-      // Now compute actual disk usage & track the maxUsedSize
-      // in the MockDirectoryWrapper:
-      long size = dir.getRecomputedActualSizeInBytes();
-      if (size > dir.maxUsedSize) {
-        dir.maxUsedSize = size;
+    try {
+      dir.maybeThrowDeterministicException();
+    } finally {
+      delegate.close();
+      if (dir.trackDiskUsage) {
+        // Now compute actual disk usage & track the maxUsedSize
+        // in the MockDirectoryWrapper:
+        long size = dir.getRecomputedActualSizeInBytes();
+        if (size > dir.maxUsedSize) {
+          dir.maxUsedSize = size;
+        }
       }
-    }
-    synchronized(dir) {
-      dir.openFileHandles.remove(this);
-      dir.openFilesForWrite.remove(name);
+      dir.removeIndexOutput(this, name);
     }
   }
 

Modified: lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/util/LuceneTestCase.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/util/LuceneTestCase.java?rev=1129205&r1=1129204&r2=1129205&view=diff
==============================================================================
--- lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/util/LuceneTestCase.java (original)
+++ lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/util/LuceneTestCase.java Mon May 30 14:51:25 2011
@@ -28,6 +28,7 @@ import java.lang.reflect.Constructor;
 import java.lang.reflect.Method;
 import java.lang.reflect.Modifier;
 import java.util.*;
+import java.util.Map.Entry;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
@@ -132,7 +133,7 @@ public abstract class LuceneTestCase ext
   }
   
   /** set of directories we created, in afterclass we try to clean these up */
-  static final Set<String> tempDirs = Collections.synchronizedSet(new HashSet<String>());
+  private static final Map<File, StackTraceElement[]> tempDirs = Collections.synchronizedMap(new HashMap<File, StackTraceElement[]>());
 
   // by default we randomly pick a different codec for
   // each test case (non-J4 tests) and each test class (J4
@@ -180,7 +181,7 @@ public abstract class LuceneTestCase ext
     SETUP,   // test has called setUp()
     RANTEST, // test is running
     TEARDOWN // test has called tearDown()
-  };
+  }
   
   /**
    * Some tests expect the directory to contain a single segment, and want to do tests on that segment's reader.
@@ -454,11 +455,21 @@ public abstract class LuceneTestCase ext
     }
     // clear out any temp directories if we can
     if (!testsFailed) {
-      for (String path : tempDirs) {
+      for (Entry<File, StackTraceElement[]> entry : tempDirs.entrySet()) {
         try {
-          _TestUtil.rmDir(new File(path));
+          _TestUtil.rmDir(entry.getKey());
         } catch (IOException e) {
           e.printStackTrace();
+          System.err.println("path " + entry.getKey() + " allocated from");
+          // first two STE's are Java's
+          StackTraceElement[] elements = entry.getValue();
+          for (int i = 2; i < elements.length; i++) {
+            StackTraceElement ste = elements[i];            
+            // print only our code's stack information
+            if (ste.getClassName().indexOf("org.apache.lucene") == -1) break; 
+            System.err.println("\t" + ste);
+          }
+          fail("could not remove temp dir: " + entry.getKey());
         }
       }
     }
@@ -961,7 +972,10 @@ public abstract class LuceneTestCase ext
 
         clazz = Class.forName(fsdirClass).asSubclass(FSDirectory.class);
       }
-      MockDirectoryWrapper dir = new MockDirectoryWrapper(random, newFSDirectoryImpl(clazz, f, lf));
+      MockDirectoryWrapper dir = new MockDirectoryWrapper(random, newFSDirectoryImpl(clazz, f));
+      if (lf != null) {
+        dir.setLockFactory(lf);
+      }
       stores.put(dir, Thread.currentThread().getStackTrace());
       return dir;
     } catch (Exception e) {
@@ -1095,7 +1109,7 @@ public abstract class LuceneTestCase ext
   }
 
   private static Directory newFSDirectoryImpl(
-      Class<? extends FSDirectory> clazz, File file, LockFactory lockFactory)
+      Class<? extends FSDirectory> clazz, File file)
       throws IOException {
     FSDirectory d = null;
     try {
@@ -1106,12 +1120,14 @@ public abstract class LuceneTestCase ext
     } catch (Exception e) {
       d = FSDirectory.open(file);
     }
-    if (lockFactory != null) {
-      d.setLockFactory(lockFactory);
-    }
     return d;
   }
 
+  /** Registers a temp file that will be deleted when tests are done. */
+  public static void registerTempFile(File tmpFile) {
+    tempDirs.put(tmpFile.getAbsoluteFile(), Thread.currentThread().getStackTrace());
+  }
+  
   static Directory newDirectoryImpl(Random random, String clazzName) {
     if (clazzName.equals("random"))
       clazzName = randomDirectory(random);
@@ -1121,11 +1137,11 @@ public abstract class LuceneTestCase ext
       final Class<? extends Directory> clazz = Class.forName(clazzName).asSubclass(Directory.class);
       // If it is a FSDirectory type, try its ctor(File)
       if (FSDirectory.class.isAssignableFrom(clazz)) {
-        final File tmpFile = File.createTempFile("test", "tmp", TEMP_DIR);
+        final File tmpFile = _TestUtil.createTempFile("test", "tmp", TEMP_DIR);
         tmpFile.delete();
         tmpFile.mkdir();
-        tempDirs.add(tmpFile.getAbsolutePath());
-        return newFSDirectoryImpl(clazz.asSubclass(FSDirectory.class), tmpFile, null);
+        registerTempFile(tmpFile);
+        return newFSDirectoryImpl(clazz.asSubclass(FSDirectory.class), tmpFile);
       }
 
       // try empty ctor
@@ -1383,6 +1399,11 @@ public abstract class LuceneTestCase ext
     }
 
     @Override
+    public synchronized boolean hasFieldCodec(String name) {
+      return true; // we have a codec for every field
+    }
+
+    @Override
     public synchronized String toString() {
       return "RandomCodecProvider: " + previousMappings.toString();
     }

Modified: lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/util/ThrottledIndexOutput.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/util/ThrottledIndexOutput.java?rev=1129205&r1=1129204&r2=1129205&view=diff
==============================================================================
--- lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/util/ThrottledIndexOutput.java (original)
+++ lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/util/ThrottledIndexOutput.java Mon May 30 14:51:25 2011
@@ -73,9 +73,11 @@ public class ThrottledIndexOutput extend
 
   @Override
   public void close() throws IOException {
-    sleep(closeDelayMillis + getDelay(true));
-    delegate.close();
-
+    try {
+      sleep(closeDelayMillis + getDelay(true));
+    } finally {
+      delegate.close();
+    }
   }
 
   @Override

Modified: lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/util/_TestUtil.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/util/_TestUtil.java?rev=1129205&r1=1129204&r2=1129205&view=diff
==============================================================================
--- lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/util/_TestUtil.java (original)
+++ lucene/dev/branches/solr2452/lucene/src/test-framework/org/apache/lucene/util/_TestUtil.java Mon May 30 14:51:25 2011
@@ -54,8 +54,8 @@ public class _TestUtil {
   /** Returns temp dir, containing String arg in its name;
    *  does not create the directory. */
   public static File getTempDir(String desc) {
-    File f = new File(LuceneTestCase.TEMP_DIR, desc + "." + new Random().nextLong());
-    LuceneTestCase.tempDirs.add(f.getAbsolutePath());
+    File f = new File(LuceneTestCase.TEMP_DIR, desc + "." + LuceneTestCase.random.nextLong());
+    LuceneTestCase.registerTempFile(f);
     return f;
   }
 
@@ -91,7 +91,7 @@ public class _TestUtil {
     rmDir(destDir);
     
     destDir.mkdir();
-    LuceneTestCase.tempDirs.add(destDir.getAbsolutePath());
+    LuceneTestCase.registerTempFile(destDir);
     
     while (entries.hasMoreElements()) {
       ZipEntry entry = entries.nextElement();
@@ -373,4 +373,51 @@ public class _TestUtil {
               field.isStoreOffsetWithTermVector(), field.getOmitNorms(), false, field.getOmitTermFreqAndPositions());
     }
   }
+  
+  /** 
+   * insecure, fast version of File.createTempFile
+   * uses Random instead of SecureRandom.
+   */
+  public static File createTempFile(String prefix, String suffix, File directory)
+      throws IOException {
+    // Force a prefix null check first
+    if (prefix.length() < 3) {
+      throw new IllegalArgumentException("prefix must be 3");
+    }
+    String newSuffix = suffix == null ? ".tmp" : suffix;
+    File result;
+    do {
+      result = genTempFile(prefix, newSuffix, directory);
+    } while (!result.createNewFile());
+    return result;
+  }
+
+  /* Temp file counter */
+  private static int counter = 0;
+
+  /* identify for differnt VM processes */
+  private static int counterBase = 0;
+
+  private static class TempFileLocker {};
+  private static TempFileLocker tempFileLocker = new TempFileLocker();
+
+  private static File genTempFile(String prefix, String suffix, File directory) {
+    int identify = 0;
+
+    synchronized (tempFileLocker) {
+      if (counter == 0) {
+        int newInt = new Random().nextInt();
+        counter = ((newInt / 65535) & 0xFFFF) + 0x2710;
+        counterBase = counter;
+      }
+      identify = counter++;
+    }
+
+    StringBuilder newName = new StringBuilder();
+    newName.append(prefix);
+    newName.append(counterBase);
+    newName.append(identify);
+    newName.append(suffix);
+    return new File(directory, newName.toString());
+  }
 }

Modified: lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestAddIndexes.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestAddIndexes.java?rev=1129205&r1=1129204&r2=1129205&view=diff
==============================================================================
--- lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestAddIndexes.java (original)
+++ lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestAddIndexes.java Mon May 30 14:51:25 2011
@@ -30,6 +30,7 @@ import org.apache.lucene.document.Field.
 import org.apache.lucene.index.IndexWriterConfig.OpenMode;
 import org.apache.lucene.index.codecs.CodecProvider;
 import org.apache.lucene.index.codecs.mocksep.MockSepCodec;
+import org.apache.lucene.index.codecs.pulsing.PulsingCodec;
 import org.apache.lucene.index.codecs.simpletext.SimpleTextCodec;
 import org.apache.lucene.index.codecs.standard.StandardCodec;
 import org.apache.lucene.search.DocIdSetIterator;
@@ -74,7 +75,7 @@ public class TestAddIndexes extends Luce
     writer.close();
 
     writer = newWriter(aux2, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
-    // add 40 documents in compound files
+    // add 50 documents in compound files
     addDocs2(writer, 50);
     assertEquals(50, writer.maxDoc());
     writer.close();
@@ -1084,4 +1085,126 @@ public class TestAddIndexes extends Luce
     assertEquals("Only one compound segment should exist", 4, dir.listAll().length);
   }
   
+  // LUCENE-3126: tests that if a non-CFS segment is copied, it is converted to
+  // a CFS, given MP preferences
+  public void testCopyIntoCFS() throws Exception {
+    // create an index, no CFS (so we can assert that existing segments are not affected)
+    Directory target = newDirectory();
+    LogMergePolicy lmp = newLogMergePolicy(false);
+    IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, null).setMergePolicy(lmp);
+    IndexWriter w = new IndexWriter(target, conf);
+    w.addDocument(new Document());
+    w.commit();
+    assertFalse(w.segmentInfos.info(0).getUseCompoundFile());
+
+    // prepare second index, no-CFS too + .del file + separate norms file
+    Directory src = newDirectory();
+    LogMergePolicy lmp2 = newLogMergePolicy(false);
+    IndexWriterConfig conf2 = newIndexWriterConfig(TEST_VERSION_CURRENT,
+        new MockAnalyzer(random)).setMergePolicy(lmp2);
+    IndexWriter w2 = new IndexWriter(src, conf2);
+    Document doc = new Document();
+    doc.add(new Field("c", "some text", Store.YES, Index.ANALYZED));
+    w2.addDocument(doc);
+    doc = new Document();
+    doc.add(new Field("d", "delete", Store.NO, Index.NOT_ANALYZED_NO_NORMS));
+    w2.addDocument(doc);
+    w2.commit();
+    w2.deleteDocuments(new Term("d", "delete"));
+    w2.commit();
+    w2.close();
+
+    // create separate norms file
+    IndexReader r = IndexReader.open(src, false);
+    r.setNorm(0, "c", (byte) 1);
+    r.close();
+    assertTrue(".del file not found", src.fileExists("_0_1.del"));
+    assertTrue("separate norms file not found", src.fileExists("_0_1.s0"));
+    
+    // Case 1: force 'CFS' on target
+    lmp.setUseCompoundFile(true);
+    lmp.setNoCFSRatio(1.0);
+    w.addIndexes(src);
+    w.commit();
+    assertFalse("existing segments should not be modified by addIndexes", w.segmentInfos.info(0).getUseCompoundFile());
+    assertTrue("segment should have been converted to a CFS by addIndexes", w.segmentInfos.info(1).getUseCompoundFile());
+    assertTrue(".del file not found", target.fileExists("_1_1.del"));
+    assertTrue("separate norms file not found", target.fileExists("_1_1.s0"));
+
+    // Case 2: LMP disallows CFS
+    lmp.setUseCompoundFile(false);
+    w.addIndexes(src);
+    w.commit();
+    assertFalse("segment should not have been converted to a CFS by addIndexes if MP disallows", w.segmentInfos.info(2).getUseCompoundFile());
+
+    w.close();
+
+    // cleanup
+    src.close();
+    target.close();
+  }
+  
+  /*
+   * simple test that ensures we getting expected exceptions 
+   */
+  public void testAddIndexMissingCodec() throws IOException {
+    Directory toAdd = newDirectory();
+    {
+      IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
+          new MockAnalyzer(random));
+      CodecProvider provider = new CodecProvider();
+      provider.register(new StandardCodec());
+      conf.setCodecProvider(provider);
+      IndexWriter w = new IndexWriter(toAdd, conf);
+      Document doc = new Document();
+      doc.add(newField("foo", "bar", Index.NOT_ANALYZED));
+      w.addDocument(doc);
+      w.close();
+    }
+    {
+      Directory dir = newDirectory();
+      IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
+          new MockAnalyzer(random));
+      CodecProvider provider = new CodecProvider();
+      provider.register(new PulsingCodec(1 + random.nextInt(10)));
+      conf.setCodecProvider(provider);
+      IndexWriter w = new IndexWriter(dir, conf);
+      try {
+        w.addIndexes(toAdd);
+        fail("no such codec");
+      } catch (IllegalArgumentException ex) {
+        // expected
+      }
+      w.close();
+      IndexReader open = IndexReader.open(dir);
+      assertEquals(0, open.numDocs());
+      open.close();
+      dir.close();
+    }
+
+    {
+      Directory dir = newDirectory();
+      IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
+          new MockAnalyzer(random));
+      CodecProvider provider = new CodecProvider();
+      provider.register(new PulsingCodec(1 + random.nextInt(10)));
+      conf.setCodecProvider(provider);
+      IndexWriter w = new IndexWriter(dir, conf);
+      IndexReader indexReader = IndexReader.open(toAdd);
+      try {
+        w.addIndexes(indexReader);
+        fail("no such codec");
+      } catch (IllegalArgumentException ex) {
+        // expected
+      }
+      indexReader.close();
+      w.close();
+      IndexReader open = IndexReader.open(dir);
+      assertEquals(0, open.numDocs());
+      open.close();
+      dir.close();
+    }
+    toAdd.close();
+  }
+
 }

Modified: lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestCompoundFile.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestCompoundFile.java?rev=1129205&r1=1129204&r2=1129205&view=diff
==============================================================================
--- lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestCompoundFile.java (original)
+++ lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestCompoundFile.java Mon May 30 14:51:25 2011
@@ -648,4 +648,25 @@ public class TestCompoundFile extends Lu
         }
 
     }
+    
+   public void testAddExternalFile() throws IOException {
+       createSequenceFile(dir, "d1", (byte) 0, 15);
+
+       Directory newDir = newDirectory();
+       CompoundFileWriter csw = new CompoundFileWriter(newDir, "d.csf");
+       csw.addFile("d1", dir);
+       csw.close();
+
+       CompoundFileReader csr = new CompoundFileReader(newDir, "d.csf");
+       IndexInput expected = dir.openInput("d1");
+       IndexInput actual = csr.openInput("d1");
+       assertSameStreams("d1", expected, actual);
+       assertSameSeekBehavior("d1", expected, actual);
+       expected.close();
+       actual.close();
+       csr.close();
+       
+       newDir.close();
+   }
+
 }

Modified: lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestCrash.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestCrash.java?rev=1129205&r1=1129204&r2=1129205&view=diff
==============================================================================
--- lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestCrash.java (original)
+++ lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestCrash.java Mon May 30 14:51:25 2011
@@ -176,6 +176,7 @@ public class TestCrash extends LuceneTes
     reader = IndexReader.open(dir, false);
     assertEquals(157, reader.numDocs());
     reader.close();
+    dir.clearCrash();
     dir.close();
   }
 

Modified: lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestIndexReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestIndexReader.java?rev=1129205&r1=1129204&r2=1129205&view=diff
==============================================================================
--- lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestIndexReader.java (original)
+++ lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestIndexReader.java Mon May 30 14:51:25 2011
@@ -1688,6 +1688,7 @@ public class TestIndexReader extends Luc
     r2.close();
     assertTrue(ints == ints2);
 
+    writer.close();
     dir.close();
   }
 
@@ -1735,6 +1736,7 @@ public class TestIndexReader extends Luc
     assertTrue(((SegmentReader) subs[1]).readOnly);
     assertTrue(ints == ints2);
 
+    writer.close();
     dir.close();
   }
 

Modified: lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestIndexWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestIndexWriter.java?rev=1129205&r1=1129204&r2=1129205&view=diff
==============================================================================
--- lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestIndexWriter.java (original)
+++ lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestIndexWriter.java Mon May 30 14:51:25 2011
@@ -68,6 +68,7 @@ import org.apache.lucene.store.NoLockFac
 import org.apache.lucene.store.RAMDirectory;
 import org.apache.lucene.store.SingleInstanceLockFactory;
 import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.CharsRef;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.ThreadInterruptedException;
 import org.apache.lucene.util.UnicodeUtil;
@@ -1631,7 +1632,7 @@ public class TestIndexWriter extends Luc
   public void testAllUnicodeChars() throws Throwable {
 
     BytesRef utf8 = new BytesRef(10);
-    UnicodeUtil.UTF16Result utf16 = new UnicodeUtil.UTF16Result();
+    CharsRef utf16 = new CharsRef(10);
     char[] chars = new char[2];
     for(int ch=0;ch<0x0010FFFF;ch++) {
 
@@ -1654,7 +1655,7 @@ public class TestIndexWriter extends Luc
       assertEquals("codepoint " + ch, s1, s2);
 
       UnicodeUtil.UTF8toUTF16(utf8.bytes, 0, utf8.length, utf16);
-      assertEquals("codepoint " + ch, s1, new String(utf16.result, 0, utf16.length));
+      assertEquals("codepoint " + ch, s1, new String(utf16.chars, 0, utf16.length));
 
       byte[] b = s1.getBytes("UTF-8");
       assertEquals(utf8.length, b.length);
@@ -1721,7 +1722,7 @@ public class TestIndexWriter extends Luc
     char[] expected = new char[20];
 
     BytesRef utf8 = new BytesRef(20);
-    UnicodeUtil.UTF16Result utf16 = new UnicodeUtil.UTF16Result();
+    CharsRef utf16 = new CharsRef(20);
 
     int num = 100000 * RANDOM_MULTIPLIER;
     for (int iter = 0; iter < num; iter++) {
@@ -1738,62 +1739,7 @@ public class TestIndexWriter extends Luc
       UnicodeUtil.UTF8toUTF16(utf8.bytes, 0, utf8.length, utf16);
       assertEquals(utf16.length, 20);
       for(int i=0;i<20;i++)
-        assertEquals(expected[i], utf16.result[i]);
-    }
-  }
-
-  // LUCENE-510
-  public void testIncrementalUnicodeStrings() throws Throwable {
-    char[] buffer = new char[20];
-    char[] expected = new char[20];
-
-    BytesRef utf8 = new BytesRef(new byte[20]);
-    UnicodeUtil.UTF16Result utf16 = new UnicodeUtil.UTF16Result();
-    UnicodeUtil.UTF16Result utf16a = new UnicodeUtil.UTF16Result();
-
-    boolean hasIllegal = false;
-    byte[] last = new byte[60];
-
-    int num = 100000 * RANDOM_MULTIPLIER;
-    for (int iter = 0; iter < num; iter++) {
-
-      final int prefix;
-
-      if (iter == 0 || hasIllegal)
-        prefix = 0;
-      else
-        prefix = nextInt(20);
-
-      hasIllegal = fillUnicode(buffer, expected, prefix, 20-prefix);
-
-      UnicodeUtil.UTF16toUTF8(buffer, 0, 20, utf8);
-      if (!hasIllegal) {
-        byte[] b = new String(buffer, 0, 20).getBytes("UTF-8");
-        assertEquals(b.length, utf8.length);
-        for(int i=0;i<b.length;i++)
-          assertEquals(b[i], utf8.bytes[i]);
-      }
-
-      int bytePrefix = 20;
-      if (iter == 0 || hasIllegal)
-        bytePrefix = 0;
-      else
-        for(int i=0;i<20;i++)
-          if (last[i] != utf8.bytes[i]) {
-            bytePrefix = i;
-            break;
-          }
-      System.arraycopy(utf8.bytes, 0, last, 0, utf8.length);
-
-      UnicodeUtil.UTF8toUTF16(utf8.bytes, bytePrefix, utf8.length-bytePrefix, utf16);
-      assertEquals(20, utf16.length);
-      for(int i=0;i<20;i++)
-        assertEquals(expected[i], utf16.result[i]);
-
-      UnicodeUtil.UTF8toUTF16(utf8.bytes, 0, utf8.length, utf16a);
-      assertEquals(20, utf16a.length);
-      for(int i=0;i<20;i++)
-        assertEquals(expected[i], utf16a.result[i]);
+        assertEquals(expected[i], utf16.chars[i]);
     }
   }
 
@@ -2177,6 +2123,10 @@ public class TestIndexWriter extends Luc
             allowInterrupt = true;
           }
         } catch (ThreadInterruptedException re) {
+          if (VERBOSE) {
+            System.out.println("TEST: got interrupt");
+            re.printStackTrace(System.out);
+          }
           Throwable e = re.getCause();
           assertTrue(e instanceof InterruptedException);
           if (finish) {
@@ -2774,7 +2724,7 @@ public class TestIndexWriter extends Luc
     // or, at most the write.lock file
     final int extraFileCount;
     if (files.length == 1) {
-      assertEquals("write.lock", files[0]);
+      assertTrue(files[0].endsWith("write.lock"));
       extraFileCount = 1;
     } else {
       assertEquals(0, files.length);

Modified: lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestIndexWriterDelete.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestIndexWriterDelete.java?rev=1129205&r1=1129204&r2=1129205&view=diff
==============================================================================
--- lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestIndexWriterDelete.java (original)
+++ lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestIndexWriterDelete.java Mon May 30 14:51:25 2011
@@ -833,6 +833,10 @@ public class TestIndexWriterDelete exten
       try {
         modifier.addDocument(doc);
       } catch (IOException io) {
+        if (VERBOSE) {
+          System.out.println("TEST: got expected exc:");
+          io.printStackTrace(System.out);
+        }
         break;
       }
     }

Modified: lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java?rev=1129205&r1=1129204&r2=1129205&view=diff
==============================================================================
--- lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java (original)
+++ lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java Mon May 30 14:51:25 2011
@@ -17,24 +17,16 @@ package org.apache.lucene.index;
  * limitations under the License.
  */
 
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Random;
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.io.PrintStream;
 import java.io.Reader;
+import java.io.StringReader;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Random;
 
-import org.apache.lucene.util.Bits;
-import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.LuceneTestCase;
-import org.apache.lucene.util._TestUtil;
-import org.apache.lucene.search.DocIdSetIterator;
-import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.IndexInput;
-import org.apache.lucene.store.IndexOutput;
-import org.apache.lucene.store.MockDirectoryWrapper;
-import org.apache.lucene.store.RAMDirectory;
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.analysis.MockTokenizer;
@@ -43,9 +35,54 @@ import org.apache.lucene.analysis.TokenS
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
 import org.apache.lucene.index.IndexWriterConfig.OpenMode;
+import org.apache.lucene.search.DocIdSetIterator;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.PhraseQuery;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IndexInput;
+import org.apache.lucene.store.IndexOutput;
+import org.apache.lucene.store.MockDirectoryWrapper;
+import org.apache.lucene.store.RAMDirectory;
+import org.apache.lucene.util.Bits;
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util._TestUtil;
 
 public class TestIndexWriterExceptions extends LuceneTestCase {
 
+  private static class DocCopyIterator implements Iterable<Document> {
+    private final Document doc;
+    private final int count;
+
+    public DocCopyIterator(Document doc, int count) {
+      this.count = count;
+      this.doc = doc;
+    }
+
+    // @Override -- not until Java 1.6
+    public Iterator<Document> iterator() {
+      return new Iterator<Document>() {
+        int upto;
+
+        // @Override -- not until Java 1.6
+        public boolean hasNext() {
+          return upto < count;
+        }
+
+        // @Override -- not until Java 1.6
+        public Document next() {
+          upto++;
+          return doc;
+        }
+
+        // @Override -- not until Java 1.6
+        public void remove() {
+          throw new UnsupportedOperationException();
+        }
+      };
+    }
+  }
+
   private class IndexerThread extends Thread {
 
     IndexWriter writer;
@@ -87,7 +124,11 @@ public class TestIndexWriterExceptions e
         idField.setValue(id);
         Term idTerm = new Term("id", id);
         try {
-          writer.updateDocument(idTerm, doc);
+          if (r.nextBoolean()) {
+            writer.updateDocuments(idTerm, new DocCopyIterator(doc, _TestUtil.nextInt(r, 1, 20)));
+          } else {
+            writer.updateDocument(idTerm, doc);
+          }
         } catch (RuntimeException re) {
           if (VERBOSE) {
             System.out.println(Thread.currentThread().getName() + ": EXC: ");
@@ -136,7 +177,7 @@ public class TestIndexWriterExceptions e
 
     @Override
     boolean testPoint(String name) {
-      if (doFail.get() != null && !name.equals("startDoFlush") && r.nextInt(20) == 17) {
+      if (doFail.get() != null && !name.equals("startDoFlush") && r.nextInt(40) == 17) {
         if (VERBOSE) {
           System.out.println(Thread.currentThread().getName() + ": NOW FAIL: " + name);
           new Throwable().printStackTrace(System.out);
@@ -267,6 +308,8 @@ public class TestIndexWriterExceptions e
     }
   }
 
+  private static String CRASH_FAIL_MESSAGE = "I'm experiencing problems";
+
   private class CrashingFilter extends TokenFilter {
     String fieldName;
     int count;
@@ -279,7 +322,7 @@ public class TestIndexWriterExceptions e
     @Override
     public boolean incrementToken() throws IOException {
       if (this.fieldName.equals("crash") && count++ >= 4)
-        throw new IOException("I'm experiencing problems");
+        throw new IOException(CRASH_FAIL_MESSAGE);
       return input.incrementToken();
     }
 
@@ -876,7 +919,7 @@ public class TestIndexWriterExceptions e
       assertTrue(failure.failOnCommit && failure.failOnDeleteFile);
       w.rollback();
       assertFalse(dir.fileExists("1.fnx"));
-      // FIXME: on windows, this often fails! assertEquals(0, dir.listAll().length);
+      assertEquals(0, dir.listAll().length);
       dir.close();
     }
   }
@@ -1278,4 +1321,141 @@ public class TestIndexWriterExceptions e
       }
     }
   }
+
+  public void testAddDocsNonAbortingException() throws Exception {
+    final Directory dir = newDirectory();
+    final RandomIndexWriter w = new RandomIndexWriter(random, dir);
+    final int numDocs1 = random.nextInt(25);
+    for(int docCount=0;docCount<numDocs1;docCount++) {
+      Document doc = new Document();
+      doc.add(newField("content", "good content", Field.Index.ANALYZED));
+      w.addDocument(doc);
+    }
+    
+    final List<Document> docs = new ArrayList<Document>();
+    for(int docCount=0;docCount<7;docCount++) {
+      Document doc = new Document();
+      docs.add(doc);
+      doc.add(newField("id", docCount+"", Field.Index.NOT_ANALYZED));
+      doc.add(newField("content", "silly content " + docCount, Field.Index.ANALYZED));
+      if (docCount == 4) {
+        Field f = newField("crash", "", Field.Index.ANALYZED);
+        doc.add(f);
+        MockTokenizer tokenizer = new MockTokenizer(new StringReader("crash me on the 4th token"), MockTokenizer.WHITESPACE, false);
+        tokenizer.setEnableChecks(false); // disable workflow checking as we forcefully close() in exceptional cases.
+        f.setTokenStream(new CrashingFilter("crash", tokenizer));
+      }
+    }
+    try {
+      w.addDocuments(docs);
+      // BUG: CrashingFilter didn't
+      fail("did not hit expected exception");
+    } catch (IOException ioe) {
+      // expected
+      assertEquals(CRASH_FAIL_MESSAGE, ioe.getMessage());
+    }
+
+    final int numDocs2 = random.nextInt(25);
+    for(int docCount=0;docCount<numDocs2;docCount++) {
+      Document doc = new Document();
+      doc.add(newField("content", "good content", Field.Index.ANALYZED));
+      w.addDocument(doc);
+    }
+
+    final IndexReader r = w.getReader();
+    w.close();
+
+    final IndexSearcher s = new IndexSearcher(r);
+    PhraseQuery pq = new PhraseQuery();
+    pq.add(new Term("content", "silly"));
+    pq.add(new Term("content", "content"));
+    assertEquals(0, s.search(pq, 1).totalHits);
+
+    pq = new PhraseQuery();
+    pq.add(new Term("content", "good"));
+    pq.add(new Term("content", "content"));
+    assertEquals(numDocs1+numDocs2, s.search(pq, 1).totalHits);
+    r.close();
+    dir.close();
+  }
+
+
+  public void testUpdateDocsNonAbortingException() throws Exception {
+    final Directory dir = newDirectory();
+    final RandomIndexWriter w = new RandomIndexWriter(random, dir);
+    final int numDocs1 = random.nextInt(25);
+    for(int docCount=0;docCount<numDocs1;docCount++) {
+      Document doc = new Document();
+      doc.add(newField("content", "good content", Field.Index.ANALYZED));
+      w.addDocument(doc);
+    }
+
+    // Use addDocs (no exception) to get docs in the index:
+    final List<Document> docs = new ArrayList<Document>();
+    final int numDocs2 = random.nextInt(25);
+    for(int docCount=0;docCount<numDocs2;docCount++) {
+      Document doc = new Document();
+      docs.add(doc);
+      doc.add(newField("subid", "subs", Field.Index.NOT_ANALYZED));
+      doc.add(newField("id", docCount+"", Field.Index.NOT_ANALYZED));
+      doc.add(newField("content", "silly content " + docCount, Field.Index.ANALYZED));
+    }
+    w.addDocuments(docs);
+
+    final int numDocs3 = random.nextInt(25);
+    for(int docCount=0;docCount<numDocs3;docCount++) {
+      Document doc = new Document();
+      doc.add(newField("content", "good content", Field.Index.ANALYZED));
+      w.addDocument(doc);
+    }
+
+    docs.clear();
+    final int limit = _TestUtil.nextInt(random, 2, 25);
+    final int crashAt = random.nextInt(limit);
+    for(int docCount=0;docCount<limit;docCount++) {
+      Document doc = new Document();
+      docs.add(doc);
+      doc.add(newField("id", docCount+"", Field.Index.NOT_ANALYZED));
+      doc.add(newField("content", "silly content " + docCount, Field.Index.ANALYZED));
+      if (docCount == crashAt) {
+        Field f = newField("crash", "", Field.Index.ANALYZED);
+        doc.add(f);
+        MockTokenizer tokenizer = new MockTokenizer(new StringReader("crash me on the 4th token"), MockTokenizer.WHITESPACE, false);
+        tokenizer.setEnableChecks(false); // disable workflow checking as we forcefully close() in exceptional cases.
+        f.setTokenStream(new CrashingFilter("crash", tokenizer));
+      }
+    }
+
+    try {
+      w.updateDocuments(new Term("subid", "subs"), docs);
+      // BUG: CrashingFilter didn't
+      fail("did not hit expected exception");
+    } catch (IOException ioe) {
+      // expected
+      assertEquals(CRASH_FAIL_MESSAGE, ioe.getMessage());
+    }
+
+    final int numDocs4 = random.nextInt(25);
+    for(int docCount=0;docCount<numDocs4;docCount++) {
+      Document doc = new Document();
+      doc.add(newField("content", "good content", Field.Index.ANALYZED));
+      w.addDocument(doc);
+    }
+
+    final IndexReader r = w.getReader();
+    w.close();
+
+    final IndexSearcher s = new IndexSearcher(r);
+    PhraseQuery pq = new PhraseQuery();
+    pq.add(new Term("content", "silly"));
+    pq.add(new Term("content", "content"));
+    assertEquals(numDocs2, s.search(pq, 1).totalHits);
+
+    pq = new PhraseQuery();
+    pq.add(new Term("content", "good"));
+    pq.add(new Term("content", "content"));
+    assertEquals(numDocs1+numDocs3+numDocs4, s.search(pq, 1).totalHits);
+    r.close();
+    dir.close();
+  }
 }

Modified: lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestIndexWriterWithThreads.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestIndexWriterWithThreads.java?rev=1129205&r1=1129204&r2=1129205&view=diff
==============================================================================
--- lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestIndexWriterWithThreads.java (original)
+++ lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestIndexWriterWithThreads.java Mon May 30 14:51:25 2011
@@ -65,6 +65,10 @@ public class TestIndexWriterWithThreads 
           writer.updateDocument(new Term("id", ""+(idUpto++)), doc);
           addCount++;
         } catch (IOException ioe) {
+          if (VERBOSE) {
+            System.out.println("TEST: expected exc:");
+            ioe.printStackTrace(System.out);
+          }
           //System.out.println(Thread.currentThread().getName() + ": hit exc");
           //ioe.printStackTrace(System.out);
           if (ioe.getMessage().startsWith("fake disk full at") ||
@@ -218,6 +222,9 @@ public class TestIndexWriterWithThreads 
     int NUM_THREADS = 3;
 
     for(int iter=0;iter<2;iter++) {
+      if (VERBOSE) {
+        System.out.println("TEST: iter=" + iter);
+      }
       MockDirectoryWrapper dir = newDirectory();
 
       IndexWriter writer = new IndexWriter(
@@ -228,6 +235,7 @@ public class TestIndexWriterWithThreads 
               setMergePolicy(newLogMergePolicy(4))
       );
       ((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions();
+      writer.setInfoStream(VERBOSE ? System.out : null);
 
       IndexerThread[] threads = new IndexerThread[NUM_THREADS];
 

Modified: lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestNRTThreads.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestNRTThreads.java?rev=1129205&r1=1129204&r2=1129205&view=diff
==============================================================================
--- lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestNRTThreads.java (original)
+++ lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestNRTThreads.java Mon May 30 14:51:25 2011
@@ -21,33 +21,35 @@ import java.io.File;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collections;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
-import java.util.HashSet;
-import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.concurrent.Executors;
 import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.Fieldable;
 import org.apache.lucene.index.codecs.CodecProvider;
 import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.search.PhraseQuery;
 import org.apache.lucene.search.Query;
+import org.apache.lucene.search.ScoreDoc;
 import org.apache.lucene.search.Sort;
 import org.apache.lucene.search.SortField;
 import org.apache.lucene.search.TermQuery;
 import org.apache.lucene.search.TopDocs;
-import org.apache.lucene.document.Field;
 import org.apache.lucene.store.FSDirectory;
 import org.apache.lucene.store.MockDirectoryWrapper;
-import org.apache.lucene.util.NamedThreadFactory;
 import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.LineFileDocs;
 import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util.NamedThreadFactory;
 import org.apache.lucene.util._TestUtil;
 import org.junit.Test;
 
@@ -57,6 +59,39 @@ import org.junit.Test;
 
 public class TestNRTThreads extends LuceneTestCase {
 
+  private static class SubDocs {
+    public final String packID;
+    public final List<String> subIDs;
+    public boolean deleted;
+
+    public SubDocs(String packID, List<String> subIDs) {
+      this.packID = packID;
+      this.subIDs = subIDs;
+    }
+  }
+
+  // TODO: is there a pre-existing way to do this!!!
+  private Document cloneDoc(Document doc1) {
+    final Document doc2 = new Document();
+    for(Fieldable f : doc1.getFields()) {
+      Field field1 = (Field) f;
+      
+      Field field2 = new Field(field1.name(),
+                               field1.stringValue(),
+                               field1.isStored() ? Field.Store.YES : Field.Store.NO,
+                               field1.isIndexed() ? (field1.isTokenized() ? Field.Index.ANALYZED : Field.Index.NOT_ANALYZED) : Field.Index.NO);
+      if (field1.getOmitNorms()) {
+        field2.setOmitNorms(true);
+      }
+      if (field1.getOmitTermFreqAndPositions()) {
+        field2.setOmitTermFreqAndPositions(true);
+      }
+      doc2.add(field2);
+    }
+
+    return doc2;
+  }
+
   @Test
   public void testNRTThreads() throws Exception {
 
@@ -121,13 +156,16 @@ public class TestNRTThreads extends Luce
 
     final int NUM_INDEX_THREADS = 2;
     final int NUM_SEARCH_THREADS = 3;
+
     final int RUN_TIME_SEC = LuceneTestCase.TEST_NIGHTLY ? 300 : 5;
 
     final AtomicBoolean failed = new AtomicBoolean();
     final AtomicInteger addCount = new AtomicInteger();
     final AtomicInteger delCount = new AtomicInteger();
+    final AtomicInteger packCount = new AtomicInteger();
 
     final Set<String> delIDs = Collections.synchronizedSet(new HashSet<String>());
+    final List<SubDocs> allSubDocs = Collections.synchronizedList(new ArrayList<SubDocs>());
 
     final long stopTime = System.currentTimeMillis() + RUN_TIME_SEC*1000;
     Thread[] threads = new Thread[NUM_INDEX_THREADS];
@@ -135,7 +173,9 @@ public class TestNRTThreads extends Luce
       threads[thread] = new Thread() {
           @Override
           public void run() {
+            // TODO: would be better if this were cross thread, so that we make sure one thread deleting anothers added docs works:
             final List<String> toDeleteIDs = new ArrayList<String>();
+            final List<SubDocs> toDeleteSubDocs = new ArrayList<SubDocs>();
             while(System.currentTimeMillis() < stopTime && !failed.get()) {
               try {
                 Document doc = docs.nextDoc();
@@ -153,7 +193,92 @@ public class TestNRTThreads extends Luce
                   if (VERBOSE) {
                     //System.out.println(Thread.currentThread().getName() + ": add doc id:" + doc.get("docid"));
                   }
-                  writer.addDocument(doc);
+
+                  if (random.nextBoolean()) {
+                    // Add a pack of adjacent sub-docs
+                    final String packID;
+                    final SubDocs delSubDocs;
+                    if (toDeleteSubDocs.size() > 0 && random.nextBoolean()) {
+                      delSubDocs = toDeleteSubDocs.get(random.nextInt(toDeleteSubDocs.size()));
+                      assert !delSubDocs.deleted;
+                      toDeleteSubDocs.remove(delSubDocs);
+                      // reuse prior packID
+                      packID = delSubDocs.packID;
+                    } else {
+                      delSubDocs = null;
+                      // make new packID
+                      packID = packCount.getAndIncrement() + "";
+                    }
+
+                    final Field packIDField = newField("packID", packID, Field.Store.YES, Field.Index.NOT_ANALYZED);
+                    final List<String> docIDs = new ArrayList<String>();
+                    final SubDocs subDocs = new SubDocs(packID, docIDs);
+                    final List<Document> docsList = new ArrayList<Document>();
+
+                    allSubDocs.add(subDocs);
+                    doc.add(packIDField);
+                    docsList.add(cloneDoc(doc));
+                    docIDs.add(doc.get("docid"));
+
+                    final int maxDocCount = _TestUtil.nextInt(random, 1, 10);
+                    while(docsList.size() < maxDocCount) {
+                      doc = docs.nextDoc();
+                      if (doc == null) {
+                        break;
+                      }
+                      docsList.add(cloneDoc(doc));
+                      docIDs.add(doc.get("docid"));
+                    }
+                    addCount.addAndGet(docsList.size());
+
+                    if (delSubDocs != null) {
+                      delSubDocs.deleted = true;
+                      delIDs.addAll(delSubDocs.subIDs);
+                      delCount.addAndGet(delSubDocs.subIDs.size());
+                      if (VERBOSE) {
+                        System.out.println("TEST: update pack packID=" + delSubDocs.packID + " count=" + docsList.size() + " docs=" + docIDs);
+                      }
+                      writer.updateDocuments(new Term("packID", delSubDocs.packID), docsList);
+                      /*
+                      // non-atomic:
+                      writer.deleteDocuments(new Term("packID", delSubDocs.packID));
+                      for(Document subDoc : docsList) {
+                        writer.addDocument(subDoc);
+                      }
+                      */
+                    } else {
+                      if (VERBOSE) {
+                        System.out.println("TEST: add pack packID=" + packID + " count=" + docsList.size() + " docs=" + docIDs);
+                      }
+                      writer.addDocuments(docsList);
+                      
+                      /*
+                      // non-atomic:
+                      for(Document subDoc : docsList) {
+                        writer.addDocument(subDoc);
+                      }
+                      */
+                    }
+                    doc.removeField("packID");
+
+                    if (random.nextInt(5) == 2) {
+                      if (VERBOSE) {
+                        //System.out.println(Thread.currentThread().getName() + ": buffer del id:" + packID);
+                      }
+                      toDeleteSubDocs.add(subDocs);
+                    }
+
+                  } else {
+                    writer.addDocument(doc);
+                    addCount.getAndIncrement();
+
+                    if (random.nextInt(5) == 3) {
+                      if (VERBOSE) {
+                        //System.out.println(Thread.currentThread().getName() + ": buffer del id:" + doc.get("docid"));
+                      }
+                      toDeleteIDs.add(doc.get("docid"));
+                    }
+                  }
                 } else {
                   // we use update but it never replaces a
                   // prior doc
@@ -161,14 +286,17 @@ public class TestNRTThreads extends Luce
                     //System.out.println(Thread.currentThread().getName() + ": update doc id:" + doc.get("docid"));
                   }
                   writer.updateDocument(new Term("docid", doc.get("docid")), doc);
-                }
-                if (random.nextInt(5) == 3) {
-                  if (VERBOSE) {
-                    //System.out.println(Thread.currentThread().getName() + ": buffer del id:" + doc.get("docid"));
+                  addCount.getAndIncrement();
+
+                  if (random.nextInt(5) == 3) {
+                    if (VERBOSE) {
+                      //System.out.println(Thread.currentThread().getName() + ": buffer del id:" + doc.get("docid"));
+                    }
+                    toDeleteIDs.add(doc.get("docid"));
                   }
-                  toDeleteIDs.add(doc.get("docid"));
                 }
-                if (random.nextInt(50) == 17) {
+
+                if (random.nextInt(30) == 17) {
                   if (VERBOSE) {
                     //System.out.println(Thread.currentThread().getName() + ": apply " + toDeleteIDs.size() + " deletes");
                   }
@@ -184,8 +312,19 @@ public class TestNRTThreads extends Luce
                   }
                   delIDs.addAll(toDeleteIDs);
                   toDeleteIDs.clear();
+
+                  for(SubDocs subDocs : toDeleteSubDocs) {
+                    assert !subDocs.deleted;
+                    writer.deleteDocuments(new Term("packID", subDocs.packID));
+                    subDocs.deleted = true;
+                    if (VERBOSE) {
+                      System.out.println("  del subs: " + subDocs.subIDs + " packID=" + subDocs.packID);
+                    }
+                    delIDs.addAll(subDocs.subIDs);
+                    delCount.addAndGet(subDocs.subIDs.size());
+                  }
+                  toDeleteSubDocs.clear();
                 }
-                addCount.getAndIncrement();
                 if (addedField != null) {
                   doc.removeField(addedField);
                 }
@@ -356,7 +495,7 @@ public class TestNRTThreads extends Luce
     if (VERBOSE) {
       System.out.println("TEST: done join [" + (System.currentTimeMillis()-t0) + " ms]; addCount=" + addCount + " delCount=" + delCount);
     }
-    
+
     final IndexReader r2 = writer.getReader();
     final IndexSearcher s = newSearcher(r2);
     boolean doFail = false;
@@ -367,6 +506,43 @@ public class TestNRTThreads extends Luce
         doFail = true;
       }
     }
+
+    // Make sure each group of sub-docs are still in docID order:
+    for(SubDocs subDocs : allSubDocs) {
+      if (!subDocs.deleted) {
+        // We sort by relevance but the scores should be identical so sort falls back to by docID:
+        TopDocs hits = s.search(new TermQuery(new Term("packID", subDocs.packID)), 20);
+        assertEquals(subDocs.subIDs.size(), hits.totalHits);
+        int lastDocID = -1;
+        int startDocID = -1;
+        for(ScoreDoc scoreDoc : hits.scoreDocs) {
+          final int docID = scoreDoc.doc;
+          if (lastDocID != -1) {
+            assertEquals(1+lastDocID, docID);
+          } else {
+            startDocID = docID;
+          }
+          lastDocID = docID;
+          final Document doc = s.doc(docID);
+          assertEquals(subDocs.packID, doc.get("packID"));
+        }
+
+        lastDocID = startDocID - 1;
+        for(String subID : subDocs.subIDs) {
+          hits = s.search(new TermQuery(new Term("docid", subID)), 1);
+          assertEquals(1, hits.totalHits);
+          final int docID = hits.scoreDocs[0].doc;
+          if (lastDocID != -1) {
+            assertEquals(1+lastDocID, docID);
+          }
+          lastDocID = docID;
+        }          
+      } else {
+        for(String subID : subDocs.subIDs) {
+          assertEquals(0, s.search(new TermQuery(new Term("docid", subID)), 1).totalHits);
+        }
+      }
+    }
     
     final int endID = Integer.parseInt(docs.nextDoc().get("docid"));
     for(int id=0;id<endID;id++) {

Modified: lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestPersistentSnapshotDeletionPolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestPersistentSnapshotDeletionPolicy.java?rev=1129205&r1=1129204&r2=1129205&view=diff
==============================================================================
--- lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestPersistentSnapshotDeletionPolicy.java (original)
+++ lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestPersistentSnapshotDeletionPolicy.java Mon May 30 14:51:25 2011
@@ -30,9 +30,13 @@ import org.junit.Before;
 import org.junit.Test;
 
 public class TestPersistentSnapshotDeletionPolicy extends TestSnapshotDeletionPolicy {
+
   // Keep it a class member so that getDeletionPolicy can use it
   private Directory snapshotDir;
   
+  // so we can close it if called by SDP tests
+  private PersistentSnapshotDeletionPolicy psdp;
+  
   @Before
   @Override
   public void setUp() throws Exception {
@@ -43,15 +47,17 @@ public class TestPersistentSnapshotDelet
   @After
   @Override
   public void tearDown() throws Exception {
+    if (psdp != null) psdp.close();
     snapshotDir.close();
     super.tearDown();
   }
   
   @Override
   protected SnapshotDeletionPolicy getDeletionPolicy() throws IOException {
+    if (psdp != null) psdp.close();
     snapshotDir.close();
     snapshotDir = newDirectory();
-    return new PersistentSnapshotDeletionPolicy(
+    return psdp = new PersistentSnapshotDeletionPolicy(
         new KeepOnlyLastCommitDeletionPolicy(), snapshotDir, OpenMode.CREATE,
         TEST_VERSION_CURRENT);
   }
@@ -173,6 +179,8 @@ public class TestPersistentSnapshotDelet
      fail("should not have reached here - the snapshots directory should be locked!");
     } catch (LockObtainFailedException e) {
       // expected
+    } finally {
+      psdp.close();
     }
     
     // Reading the snapshots info should succeed though

Modified: lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestSegmentMerger.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestSegmentMerger.java?rev=1129205&r1=1129204&r2=1129205&view=diff
==============================================================================
--- lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestSegmentMerger.java (original)
+++ lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/index/TestSegmentMerger.java Mon May 30 14:51:25 2011
@@ -20,8 +20,12 @@ package org.apache.lucene.index;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.store.BufferedIndexInput;
 import org.apache.lucene.store.Directory;
+import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.document.Document;
-import org.apache.lucene.index.codecs.CodecProvider;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.Field.Index;
+import org.apache.lucene.document.Field.Store;
+import org.apache.lucene.index.IndexWriterConfig.OpenMode;
 import org.apache.lucene.util.BytesRef;
 
 import java.io.IOException;
@@ -127,4 +131,50 @@ public class TestSegmentMerger extends L
     TestSegmentReader.checkNorms(mergedReader);
     mergedReader.close();
   }
+  
+  // LUCENE-3143
+  public void testInvalidFilesToCreateCompound() throws Exception {
+    Directory dir = newDirectory();
+    IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
+    IndexWriter w = new IndexWriter(dir, iwc);
+    
+    // Create an index w/ .del file
+    w.addDocument(new Document());
+    Document doc = new Document();
+    doc.add(new Field("c", "test", Store.NO, Index.ANALYZED));
+    w.addDocument(doc);
+    w.commit();
+    w.deleteDocuments(new Term("c", "test"));
+    w.close();
+    
+    // Assert that SM fails if .del exists
+    SegmentMerger sm = new SegmentMerger(dir, 1, "a", null, null, null);
+    try {
+      sm.createCompoundFile("b1", w.segmentInfos.info(0));
+      fail("should not have been able to create a .cfs with .del and .s* files");
+    } catch (AssertionError e) {
+      // expected
+    }
+    
+    // Create an index w/ .s*
+    w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
+    doc = new Document();
+    doc.add(new Field("c", "test", Store.NO, Index.ANALYZED));
+    w.addDocument(doc);
+    w.close();
+    IndexReader r = IndexReader.open(dir, false);
+    r.setNorm(0, "c", (byte) 1);
+    r.close();
+    
+    // Assert that SM fails if .s* exists
+    try {
+      sm.createCompoundFile("b2", w.segmentInfos.info(0));
+      fail("should not have been able to create a .cfs with .del and .s* files");
+    } catch (AssertionError e) {
+      // expected
+    }
+
+    dir.close();
+  }
+
 }

Modified: lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/search/TestFieldCache.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/search/TestFieldCache.java?rev=1129205&r1=1129204&r2=1129205&view=diff
==============================================================================
--- lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/search/TestFieldCache.java (original)
+++ lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/search/TestFieldCache.java Mon May 30 14:51:25 2011
@@ -217,6 +217,7 @@ public class TestFieldCache extends Luce
     IndexReader r = IndexReader.open(writer, true);
     FieldCache.DocTerms terms = FieldCache.DEFAULT.getTerms(r, "foobar");
     FieldCache.DocTermsIndex termsIndex = FieldCache.DEFAULT.getTermsIndex(r, "foobar");
+    writer.close();
     r.close();
     dir.close();
   }

Modified: lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/search/TestRegexpRandom2.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/search/TestRegexpRandom2.java?rev=1129205&r1=1129204&r2=1129205&view=diff
==============================================================================
--- lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/search/TestRegexpRandom2.java (original)
+++ lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/search/TestRegexpRandom2.java Mon May 30 14:51:25 2011
@@ -35,6 +35,7 @@ import org.apache.lucene.index.RandomInd
 import org.apache.lucene.index.codecs.CodecProvider;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.CharsRef;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.UnicodeUtil;
 import org.apache.lucene.util._TestUtil;
@@ -114,7 +115,7 @@ public class TestRegexpRandom2 extends L
 
     private class SimpleAutomatonTermsEnum extends FilteredTermsEnum {
       CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton);
-      UnicodeUtil.UTF16Result utf16 = new UnicodeUtil.UTF16Result();
+      CharsRef utf16 = new CharsRef(10);
 
       private SimpleAutomatonTermsEnum(TermsEnum tenum) throws IOException {
         super(tenum);
@@ -124,7 +125,7 @@ public class TestRegexpRandom2 extends L
       @Override
       protected AcceptStatus accept(BytesRef term) throws IOException {
         UnicodeUtil.UTF8toUTF16(term.bytes, term.offset, term.length, utf16);
-        return runAutomaton.run(utf16.result, 0, utf16.length) ? 
+        return runAutomaton.run(utf16.chars, 0, utf16.length) ? 
             AcceptStatus.YES : AcceptStatus.NO;
       }
     }

Modified: lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/store/TestBufferedIndexInput.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/store/TestBufferedIndexInput.java?rev=1129205&r1=1129204&r2=1129205&view=diff
==============================================================================
--- lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/store/TestBufferedIndexInput.java (original)
+++ lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/store/TestBufferedIndexInput.java Mon May 30 14:51:25 2011
@@ -87,7 +87,7 @@ public class TestBufferedIndexInput exte
     // NOTE: this does only test the chunked reads and NOT if the Bug is triggered.
     //final int tmpFileSize = 1024 * 1024 * 5;
     final int inputBufferSize = 128;
-    File tmpInputFile = File.createTempFile("IndexInput", "tmpFile");
+    File tmpInputFile = _TestUtil.createTempFile("IndexInput", "tmpFile", TEMP_DIR);
     tmpInputFile.deleteOnExit();
     writeBytes(tmpInputFile, TEST_FILE_LENGTH);
 

Modified: lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/store/TestLockFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/store/TestLockFactory.java?rev=1129205&r1=1129204&r2=1129205&view=diff
==============================================================================
--- lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/store/TestLockFactory.java (original)
+++ lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/store/TestLockFactory.java Mon May 30 14:51:25 2011
@@ -255,15 +255,21 @@ public class TestLockFactory extends Luc
     // write.lock is stored in index):
     public void testDefaultFSLockFactoryPrefix() throws IOException {
 
-      // Make sure we get null prefix:
+      // Make sure we get null prefix, which wont happen if setLockFactory is ever called.
       File dirName = _TestUtil.getTempDir("TestLockFactory.10");
-      Directory dir = newFSDirectory(dirName);
-
-      String prefix = dir.getLockFactory().getLockPrefix();
-
-      assertTrue("Default lock prefix should be null", null == prefix);
 
+      Directory dir = new SimpleFSDirectory(dirName);
+      assertNull("Default lock prefix should be null", dir.getLockFactory().getLockPrefix());
+      dir.close();
+      
+      dir = new MMapDirectory(dirName);
+      assertNull("Default lock prefix should be null", dir.getLockFactory().getLockPrefix());
+      dir.close();
+      
+      dir = new NIOFSDirectory(dirName);
+      assertNull("Default lock prefix should be null", dir.getLockFactory().getLockPrefix());
       dir.close();
+ 
       _TestUtil.rmDir(dirName);
     }
 

Modified: lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/store/TestMultiMMap.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/store/TestMultiMMap.java?rev=1129205&r1=1129204&r2=1129205&view=diff
==============================================================================
--- lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/store/TestMultiMMap.java (original)
+++ lucene/dev/branches/solr2452/lucene/src/test/org/apache/lucene/store/TestMultiMMap.java Mon May 30 14:51:25 2011
@@ -51,7 +51,7 @@ public class TestMultiMMap extends Lucen
   }
   
   private void assertChunking(Random random, int chunkSize) throws Exception {
-    File path = File.createTempFile("mmap" + chunkSize, "tmp", workDir);
+    File path = _TestUtil.createTempFile("mmap" + chunkSize, "tmp", workDir);
     path.delete();
     path.mkdirs();
     MMapDirectory dir = new MMapDirectory(path);