You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by mi...@apache.org on 2015/01/04 15:53:21 UTC

svn commit: r1649347 [11/31] - in /lucene/dev/branches/lucene6005: ./ dev-tools/ dev-tools/idea/solr/contrib/dataimporthandler-extras/ dev-tools/idea/solr/contrib/extraction/ dev-tools/idea/solr/contrib/map-reduce/ dev-tools/idea/solr/contrib/velocity/...

Modified: lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/mockfile/FilterPath.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/mockfile/FilterPath.java?rev=1649347&r1=1649346&r2=1649347&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/mockfile/FilterPath.java (original)
+++ lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/mockfile/FilterPath.java Sun Jan  4 14:53:12 2015
@@ -82,7 +82,7 @@ public class FilterPath implements Path
     if (root == null) {
       return null;
     }
-    return new FilterPath(root, fileSystem);
+    return wrap(root);
   }
 
   @Override
@@ -91,7 +91,7 @@ public class FilterPath implements Path
     if (fileName == null) {
       return null;
     }
-    return new FilterPath(fileName, fileSystem);
+    return wrap(fileName);
   }
 
   @Override
@@ -100,7 +100,7 @@ public class FilterPath implements Path
     if (parent == null) {
       return null;
     }
-    return new FilterPath(parent, fileSystem);
+    return wrap(parent);
   }
 
   @Override
@@ -110,12 +110,12 @@ public class FilterPath implements Path
 
   @Override
   public Path getName(int index) {
-    return new FilterPath(delegate.getName(index), fileSystem);
+    return wrap(delegate.getName(index));
   }
 
   @Override
   public Path subpath(int beginIndex, int endIndex) {
-    return new FilterPath(delegate.subpath(beginIndex, endIndex), fileSystem);
+    return wrap(delegate.subpath(beginIndex, endIndex));
   }
 
   @Override
@@ -148,7 +148,7 @@ public class FilterPath implements Path
 
   @Override
   public Path normalize() {
-    return new FilterPath(delegate.normalize(), fileSystem);
+    return wrap(delegate.normalize());
   }
 
   @Override
@@ -156,12 +156,12 @@ public class FilterPath implements Path
     if (other instanceof FilterPath) {
       other = ((FilterPath)other).delegate;
     }
-    return new FilterPath(delegate.resolve(other), fileSystem);
+    return wrap(delegate.resolve(other));
   }
 
   @Override
   public Path resolve(String other) {
-    return new FilterPath(delegate.resolve(other), fileSystem);
+    return wrap(delegate.resolve(other));
   }
 
   @Override
@@ -169,12 +169,12 @@ public class FilterPath implements Path
     if (other instanceof FilterPath) {
       other = ((FilterPath)other).delegate;
     }
-    return new FilterPath(delegate.resolveSibling(other), fileSystem);
+    return wrap(delegate.resolveSibling(other));
   }
 
   @Override
   public Path resolveSibling(String other) {
-    return new FilterPath(delegate.resolveSibling(other), fileSystem);
+    return wrap(delegate.resolveSibling(other));
   }
 
   @Override
@@ -182,7 +182,7 @@ public class FilterPath implements Path
     if (other instanceof FilterPath) {
       other = ((FilterPath)other).delegate;
     }
-    return new FilterPath(delegate.relativize(other), fileSystem);
+    return wrap(delegate.relativize(other));
   }
 
   // TODO: should these methods not expose delegate result directly?
@@ -200,12 +200,12 @@ public class FilterPath implements Path
 
   @Override
   public Path toAbsolutePath() {
-    return new FilterPath(delegate.toAbsolutePath(), fileSystem);
+    return wrap(delegate.toAbsolutePath());
   }
 
   @Override
   public Path toRealPath(LinkOption... options) throws IOException {
-    return new FilterPath(delegate.toRealPath(options), fileSystem);
+    return wrap(delegate.toRealPath(options));
   }
 
   @Override
@@ -235,7 +235,7 @@ public class FilterPath implements Path
 
       @Override
       public Path next() {
-        return new FilterPath(iterator.next(), fileSystem);
+        return wrap(iterator.next());
       }
 
       @Override
@@ -267,4 +267,10 @@ public class FilterPath implements Path
     }
     return path;
   }
+  
+  /** Override this to customize the return wrapped
+   *  path from various operations */
+  protected Path wrap(Path other) {
+    return new FilterPath(other, fileSystem);
+  }
 }

Modified: lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/mockfile/HandleTrackingFS.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/mockfile/HandleTrackingFS.java?rev=1649347&r1=1649346&r2=1649347&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/mockfile/HandleTrackingFS.java (original)
+++ lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/mockfile/HandleTrackingFS.java Sun Jan  4 14:53:12 2015
@@ -93,9 +93,15 @@ public abstract class HandleTrackingFS e
   @Override
   public InputStream newInputStream(Path path, OpenOption... options) throws IOException {
     InputStream stream = new FilterInputStream2(super.newInputStream(path, options)) {
+      
+      boolean closed;
+      
       @Override
       public void close() throws IOException {
-        onClose(path, this);
+        if (!closed) {
+          closed = true;
+          onClose(path, this);
+        }
         super.close();
       }
 
@@ -121,9 +127,15 @@ public abstract class HandleTrackingFS e
   @Override
   public OutputStream newOutputStream(final Path path, OpenOption... options) throws IOException {
     OutputStream stream = new FilterOutputStream2(super.newOutputStream(path, options)) {
+      
+      boolean closed;
+
       @Override
       public void close() throws IOException {
-        onClose(path, this);
+        if (!closed) {
+          closed = true;
+          onClose(path, this);
+        }
         super.close();
       }
       
@@ -149,9 +161,15 @@ public abstract class HandleTrackingFS e
   @Override
   public FileChannel newFileChannel(Path path, Set<? extends OpenOption> options, FileAttribute<?>... attrs) throws IOException {
     FileChannel channel = new FilterFileChannel(super.newFileChannel(path, options, attrs)) {
+      
+      boolean closed;
+      
       @Override
       protected void implCloseChannel() throws IOException {
-        onClose(path, this);
+        if (!closed) {
+          closed = true;
+          onClose(path, this);
+        }
         super.implCloseChannel();
       }
 
@@ -177,9 +195,15 @@ public abstract class HandleTrackingFS e
   @Override
   public AsynchronousFileChannel newAsynchronousFileChannel(Path path, Set<? extends OpenOption> options, ExecutorService executor, FileAttribute<?>... attrs) throws IOException {
     AsynchronousFileChannel channel = new FilterAsynchronousFileChannel(super.newAsynchronousFileChannel(path, options, executor, attrs)) {
+      
+      boolean closed;
+      
       @Override
       public void close() throws IOException {
-        onClose(path, this);
+        if (!closed) {
+          closed = true;
+          onClose(path, this);
+        }
         super.close();
       }
 
@@ -205,9 +229,15 @@ public abstract class HandleTrackingFS e
   @Override
   public SeekableByteChannel newByteChannel(Path path, Set<? extends OpenOption> options, FileAttribute<?>... attrs) throws IOException {
     SeekableByteChannel channel = new FilterSeekableByteChannel(super.newByteChannel(path, options, attrs)) {
+      
+      boolean closed;
+      
       @Override
       public void close() throws IOException {
-        onClose(path, this);
+        if (!closed) {
+          closed = true;
+          onClose(path, this);
+        }
         super.close();
       }
 
@@ -237,9 +267,15 @@ public abstract class HandleTrackingFS e
       stream = new TrackingSecureDirectoryStream((SecureDirectoryStream<Path>)stream, dir);
     } else {
       stream = new FilterDirectoryStream<Path>(stream) {
+        
+        boolean closed;
+        
         @Override
         public void close() throws IOException {
-          onClose(dir, this);
+          if (!closed) {
+            closed = true;
+            onClose(dir, this);
+          }
           super.close();
         }
         
@@ -271,10 +307,15 @@ public abstract class HandleTrackingFS e
       super(delegate);
       this.dir = dir;
     }
+    
+    boolean closed;
 
     @Override
     public void close() throws IOException {
-      onClose(dir, this);
+      if (!closed) {
+        closed = true;
+        onClose(dir, this);
+      }
       super.close();
     }
     
@@ -303,9 +344,15 @@ public abstract class HandleTrackingFS e
     @Override
     public SeekableByteChannel newByteChannel(Path path, Set<? extends OpenOption> options, FileAttribute<?>... attrs) throws IOException {
       SeekableByteChannel channel = new FilterSeekableByteChannel(super.newByteChannel(path, options, attrs)) {
+        
+        boolean closed;
+        
         @Override
         public void close() throws IOException {
-          onClose(path, this);
+          if (!closed) {
+            closed = true;
+            onClose(path, this);
+          }
           super.close();
         }
 

Modified: lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/mockfile/LeakFS.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/mockfile/LeakFS.java?rev=1649347&r1=1649346&r2=1649347&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/mockfile/LeakFS.java (original)
+++ lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/mockfile/LeakFS.java Sun Jan  4 14:53:12 2015
@@ -55,7 +55,7 @@ public class LeakFS extends HandleTracki
   @Override
   public synchronized void onClose() {
     if (!openHandles.isEmpty()) {
-      // print the first one as its very verbose otherwise
+      // print the first one as it's very verbose otherwise
       Exception cause = null;
       Iterator<Exception> stacktraces = openHandles.values().iterator();
       if (stacktraces.hasNext()) {

Modified: lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/mockfile/WindowsFS.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/mockfile/WindowsFS.java?rev=1649347&r1=1649346&r2=1649347&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/mockfile/WindowsFS.java (original)
+++ lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/mockfile/WindowsFS.java Sun Jan  4 14:53:12 2015
@@ -89,7 +89,7 @@ public class WindowsFS extends HandleTra
   }
   
   /** 
-   * Checks that its ok to delete {@code Path}. If the file
+   * Checks that it's ok to delete {@code Path}. If the file
    * is still open, it throws IOException("access denied").
    */
   private void checkDeleteAccess(Path path) throws IOException {

Modified: lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/search/CheckHits.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/search/CheckHits.java?rev=1649347&r1=1649346&r2=1649347&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/search/CheckHits.java (original)
+++ lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/search/CheckHits.java Sun Jan  4 14:53:12 2015
@@ -342,13 +342,13 @@ public class CheckHits {
     if (!deep) return;
 
     Explanation detail[] = expl.getDetails();
-    // TODO: can we improve this entire method? its really geared to work only with TF/IDF
+    // TODO: can we improve this entire method? it's really geared to work only with TF/IDF
     if (expl.getDescription().endsWith("computed from:")) {
       return; // something more complicated.
     }
     if (detail!=null) {
       if (detail.length==1) {
-        // simple containment, unless its a freq of: (which lets a query explain how the freq is calculated), 
+        // simple containment, unless it's a freq of: (which lets a query explain how the freq is calculated), 
         // just verify contained expl has same score
         if (!expl.getDescription().endsWith("with freq of:"))
           verifyExplanation(q,doc,score,deep,detail[0]);

Modified: lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/store/BaseDirectoryTestCase.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/store/BaseDirectoryTestCase.java?rev=1649347&r1=1649346&r2=1649347&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/store/BaseDirectoryTestCase.java (original)
+++ lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/store/BaseDirectoryTestCase.java Sun Jan  4 14:53:12 2015
@@ -46,7 +46,7 @@ public abstract class BaseDirectoryTestC
   
   // first some basic tests for the directory api
   
-  public void testCopy() throws Exception {
+  public void testCopyFrom() throws Exception {
     Directory source = getDirectory(createTempDir("testCopy"));
     Directory dest = newDirectory();
     
@@ -57,7 +57,7 @@ public abstract class BaseDirectoryTestC
     output.writeBytes(bytes, bytes.length);
     output.close();
     
-    source.copy(dest, "foobar", "foobaz", newIOContext(random()));
+    dest.copyFrom(source, "foobar", "foobaz", newIOContext(random()));
     assertTrue(slowFileExists(dest, "foobaz"));
     
     IndexInput input = dest.openInput("foobaz", newIOContext(random()));
@@ -71,7 +71,7 @@ public abstract class BaseDirectoryTestC
     IOUtils.close(source, dest);
   }
   
-  public void testCopyDestination() throws Exception {
+  public void testCopyFromDestination() throws Exception {
     Directory source = newDirectory();
     Directory dest = getDirectory(createTempDir("testCopyDestination"));
     
@@ -82,7 +82,7 @@ public abstract class BaseDirectoryTestC
     output.writeBytes(bytes, bytes.length);
     output.close();
     
-    source.copy(dest, "foobar", "foobaz", newIOContext(random()));
+    dest.copyFrom(source, "foobar", "foobaz", newIOContext(random()));
     assertTrue(slowFileExists(dest, "foobaz"));
     
     IndexInput input = dest.openInput("foobaz", newIOContext(random()));
@@ -141,7 +141,7 @@ public abstract class BaseDirectoryTestC
     output2.writeString("bogus!");
     output2.close();
     
-    source.copy(dest, "foobar", "foobaz", newIOContext(random()));
+    dest.copyFrom(source, "foobar", "foobaz", newIOContext(random()));
     assertTrue(slowFileExists(dest, "foobaz"));
     
     IndexInput input = dest.openInput("foobaz", newIOContext(random()));
@@ -735,7 +735,7 @@ public abstract class BaseDirectoryTestC
     
     // this test backdoors the directory via the filesystem. so it must be an FSDir (for now)
     // TODO: figure a way to test this better/clean it up. E.g. we should be testing for FileSwitchDir,
-    // if its using two FSdirs and so on
+    // if it's using two FSdirs and so on
     if (fsdir instanceof FSDirectory == false) {
       fsdir.close();
       assumeTrue("test only works for FSDirectory subclasses", false);
@@ -1040,5 +1040,35 @@ public abstract class BaseDirectoryTestC
     out.close();
     dir.close();
   }
+  
+  public void testDoubleCloseDirectory() throws Throwable {
+    Directory dir = getDirectory(createTempDir());
+    IndexOutput out = dir.createOutput("foobar", newIOContext(random()));
+    out.writeString("testing");
+    out.close();
+    dir.close();
+    dir.close(); // close again
+  }
+  
+  public void testDoubleCloseOutput() throws Throwable {
+    Directory dir = getDirectory(createTempDir());
+    IndexOutput out = dir.createOutput("foobar", newIOContext(random()));
+    out.writeString("testing");
+    out.close();
+    out.close(); // close again
+    dir.close();
+  }
+  
+  public void testDoubleCloseInput() throws Throwable {
+    Directory dir = getDirectory(createTempDir());
+    IndexOutput out = dir.createOutput("foobar", newIOContext(random()));
+    out.writeString("testing");
+    out.close();
+    IndexInput in = dir.openInput("foobar", newIOContext(random()));
+    assertEquals("testing", in.readString());
+    in.close();
+    in.close(); // close again
+    dir.close();
+  }
 }
 

Modified: lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/store/BaseDirectoryWrapper.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/store/BaseDirectoryWrapper.java?rev=1649347&r1=1649346&r2=1649347&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/store/BaseDirectoryWrapper.java (original)
+++ lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/store/BaseDirectoryWrapper.java Sun Jan  4 14:53:12 2015
@@ -40,9 +40,11 @@ public class BaseDirectoryWrapper extend
 
   @Override
   public void close() throws IOException {
-    isOpen = false;
-    if (checkIndexOnClose && DirectoryReader.indexExists(this)) {
-      TestUtil.checkIndex(this, crossCheckTermVectorsOnClose);
+    if (isOpen) {
+      isOpen = false;
+      if (checkIndexOnClose && DirectoryReader.indexExists(this)) {
+        TestUtil.checkIndex(this, crossCheckTermVectorsOnClose);
+      }
     }
     super.close();
   }
@@ -71,8 +73,9 @@ public class BaseDirectoryWrapper extend
     return crossCheckTermVectorsOnClose;
   }
 
+  // why does this class override this method?
   @Override
-  public void copy(Directory to, String src, String dest, IOContext context) throws IOException {
-    in.copy(to, src, dest, context);
+  public void copyFrom(Directory from, String src, String dest, IOContext context) throws IOException {
+    in.copyFrom(from, src, dest, context);
   }
 }

Modified: lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java?rev=1649347&r1=1649346&r2=1649347&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java (original)
+++ lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java Sun Jan  4 14:53:12 2015
@@ -134,6 +134,16 @@ public class MockDirectoryWrapper extend
   public int getInputCloneCount() {
     return inputCloneCount.get();
   }
+  
+  boolean verboseClone;
+  
+  /** 
+   * If set to true, we print a fake exception
+   * with filename and stacktrace on every indexinput clone()
+   */
+  public void setVerboseClone(boolean v) {
+    verboseClone = v;
+  }
 
   public void setTrackDiskUsage(boolean v) {
     trackDiskUsage = v;
@@ -259,7 +269,7 @@ public class MockDirectoryWrapper extend
       success = true;
     } finally {
       if (success) {
-        // we don't do this stuff with lucene's commit, but its just for completeness
+        // we don't do this stuff with lucene's commit, but it's just for completeness
         if (unSyncedFiles.contains(source)) {
           unSyncedFiles.remove(source);
           unSyncedFiles.add(dest);
@@ -720,158 +730,173 @@ public class MockDirectoryWrapper extend
 
   @Override
   public synchronized void close() throws IOException {
-    // files that we tried to delete, but couldn't because readers were open.
-    // all that matters is that we tried! (they will eventually go away)
-    //   still open when we tried to delete
-    Set<String> pendingDeletions = new HashSet<>(openFilesDeleted);
-    //   virus scanner when we tried to delete
-    pendingDeletions.addAll(triedToDelete);
-    maybeYield();
-    if (openFiles == null) {
-      openFiles = new HashMap<>();
-      openFilesDeleted = new HashSet<>();
-    }
-    if (openFiles.size() > 0) {
-      // print the first one as its very verbose otherwise
-      Exception cause = null;
-      Iterator<Exception> stacktraces = openFileHandles.values().iterator();
-      if (stacktraces.hasNext()) {
-        cause = stacktraces.next();
-      }
-      // RuntimeException instead of IOException because
-      // super() does not throw IOException currently:
-      throw new RuntimeException("MockDirectoryWrapper: cannot close: there are still open files: " + openFiles, cause);
-    }
-    if (openLocks.size() > 0) {
-      Exception cause = null;
-      Iterator<Exception> stacktraces = openLocks.values().iterator();
-      if (stacktraces.hasNext()) {
-        cause = stacktraces.next();
-      }
-      throw new RuntimeException("MockDirectoryWrapper: cannot close: there are still open locks: " + openLocks, cause);
-    }
-
-    isOpen = false;
-    if (getCheckIndexOnClose()) {
-      randomIOExceptionRate = 0.0;
-      randomIOExceptionRateOnOpen = 0.0;
-      if (DirectoryReader.indexExists(this)) {
-        if (LuceneTestCase.VERBOSE) {
-          System.out.println("\nNOTE: MockDirectoryWrapper: now crush");
-        }
-        crash(); // corrupt any unsynced-files
-        if (LuceneTestCase.VERBOSE) {
-          System.out.println("\nNOTE: MockDirectoryWrapper: now run CheckIndex");
-        } 
-        TestUtil.checkIndex(this, getCrossCheckTermVectorsOnClose(), true);
-
-        // TODO: factor this out / share w/ TestIW.assertNoUnreferencedFiles
-        if (assertNoUnreferencedFilesOnClose) {
-          // now look for unreferenced files: discount ones that we tried to delete but could not
-          Set<String> allFiles = new HashSet<>(Arrays.asList(listAll()));
-          allFiles.removeAll(pendingDeletions);
-          String[] startFiles = allFiles.toArray(new String[0]);
-          IndexWriterConfig iwc = new IndexWriterConfig(null);
-          iwc.setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE);
-          new IndexWriter(in, iwc).rollback();
-          String[] endFiles = in.listAll();
+    if (isOpen) {
+      isOpen = false;
+    } else {
+      in.close(); // but call it again on our wrapped dir
+      return;
+    }
 
-          Set<String> startSet = new TreeSet<>(Arrays.asList(startFiles));
-          Set<String> endSet = new TreeSet<>(Arrays.asList(endFiles));
-          
-          if (pendingDeletions.contains("segments.gen") && endSet.contains("segments.gen")) {
-            // this is possible if we hit an exception while writing segments.gen, we try to delete it
-            // and it ends out in pendingDeletions (but IFD wont remove this).
-            startSet.add("segments.gen");
-            if (LuceneTestCase.VERBOSE) {
-              System.out.println("MDW: Unreferenced check: Ignoring segments.gen that we could not delete.");
-            }
+    boolean success = false;
+    try {
+      // files that we tried to delete, but couldn't because readers were open.
+      // all that matters is that we tried! (they will eventually go away)
+      //   still open when we tried to delete
+      Set<String> pendingDeletions = new HashSet<>(openFilesDeleted);
+      //   virus scanner when we tried to delete
+      pendingDeletions.addAll(triedToDelete);
+      maybeYield();
+      if (openFiles == null) {
+        openFiles = new HashMap<>();
+        openFilesDeleted = new HashSet<>();
+      }
+      if (openFiles.size() > 0) {
+        // print the first one as it's very verbose otherwise
+        Exception cause = null;
+        Iterator<Exception> stacktraces = openFileHandles.values().iterator();
+        if (stacktraces.hasNext()) {
+          cause = stacktraces.next();
+        }
+        // RuntimeException instead of IOException because
+        // super() does not throw IOException currently:
+        throw new RuntimeException("MockDirectoryWrapper: cannot close: there are still open files: " + openFiles, cause);
+      }
+      if (openLocks.size() > 0) {
+        Exception cause = null;
+        Iterator<Exception> stacktraces = openLocks.values().iterator();
+        if (stacktraces.hasNext()) {
+          cause = stacktraces.next();
+        }
+        throw new RuntimeException("MockDirectoryWrapper: cannot close: there are still open locks: " + openLocks, cause);
+      }
+      
+      if (getCheckIndexOnClose()) {
+        randomIOExceptionRate = 0.0;
+        randomIOExceptionRateOnOpen = 0.0;
+        if (DirectoryReader.indexExists(this)) {
+          if (LuceneTestCase.VERBOSE) {
+            System.out.println("\nNOTE: MockDirectoryWrapper: now crush");
           }
+          crash(); // corrupt any unsynced-files
+          if (LuceneTestCase.VERBOSE) {
+            System.out.println("\nNOTE: MockDirectoryWrapper: now run CheckIndex");
+          } 
+          TestUtil.checkIndex(this, getCrossCheckTermVectorsOnClose(), true);
           
-          // its possible we cannot delete the segments_N on windows if someone has it open and
-          // maybe other files too, depending on timing. normally someone on windows wouldnt have
-          // an issue (IFD would nuke this stuff eventually), but we pass NoDeletionPolicy...
-          for (String file : pendingDeletions) {
-            if (file.startsWith("segments") && !file.equals("segments.gen") && endSet.contains(file)) {
-              startSet.add(file);
+          // TODO: factor this out / share w/ TestIW.assertNoUnreferencedFiles
+          if (assertNoUnreferencedFilesOnClose) {
+            // now look for unreferenced files: discount ones that we tried to delete but could not
+            Set<String> allFiles = new HashSet<>(Arrays.asList(listAll()));
+            allFiles.removeAll(pendingDeletions);
+            String[] startFiles = allFiles.toArray(new String[0]);
+            IndexWriterConfig iwc = new IndexWriterConfig(null);
+            iwc.setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE);
+            new IndexWriter(in, iwc).rollback();
+            String[] endFiles = in.listAll();
+            
+            Set<String> startSet = new TreeSet<>(Arrays.asList(startFiles));
+            Set<String> endSet = new TreeSet<>(Arrays.asList(endFiles));
+            
+            if (pendingDeletions.contains("segments.gen") && endSet.contains("segments.gen")) {
+              // this is possible if we hit an exception while writing segments.gen, we try to delete it
+              // and it ends out in pendingDeletions (but IFD wont remove this).
+              startSet.add("segments.gen");
               if (LuceneTestCase.VERBOSE) {
-                System.out.println("MDW: Unreferenced check: Ignoring segments file: " + file + " that we could not delete.");
+                System.out.println("MDW: Unreferenced check: Ignoring segments.gen that we could not delete.");
               }
-              SegmentInfos sis;
-              try {
-                sis = SegmentInfos.readCommit(in, file);
-              } catch (IOException ioe) {
-                // OK: likely some of the .si files were deleted
-                sis = new SegmentInfos();
-              }
-
-              try {
-                Set<String> ghosts = new HashSet<>(sis.files(in, false));
-                for (String s : ghosts) {
-                  if (endSet.contains(s) && !startSet.contains(s)) {
-                    assert pendingDeletions.contains(s);
-                    if (LuceneTestCase.VERBOSE) {
-                      System.out.println("MDW: Unreferenced check: Ignoring referenced file: " + s + " " +
-                                         "from " + file + " that we could not delete.");
+            }
+            
+            // it's possible we cannot delete the segments_N on windows if someone has it open and
+            // maybe other files too, depending on timing. normally someone on windows wouldnt have
+            // an issue (IFD would nuke this stuff eventually), but we pass NoDeletionPolicy...
+            for (String file : pendingDeletions) {
+              if (file.startsWith("segments") && !file.equals("segments.gen") && endSet.contains(file)) {
+                startSet.add(file);
+                if (LuceneTestCase.VERBOSE) {
+                  System.out.println("MDW: Unreferenced check: Ignoring segments file: " + file + " that we could not delete.");
+                }
+                SegmentInfos sis;
+                try {
+                  sis = SegmentInfos.readCommit(in, file);
+                } catch (IOException ioe) {
+                  // OK: likely some of the .si files were deleted
+                  sis = new SegmentInfos();
+                }
+                
+                try {
+                  Set<String> ghosts = new HashSet<>(sis.files(in, false));
+                  for (String s : ghosts) {
+                    if (endSet.contains(s) && !startSet.contains(s)) {
+                      assert pendingDeletions.contains(s);
+                      if (LuceneTestCase.VERBOSE) {
+                        System.out.println("MDW: Unreferenced check: Ignoring referenced file: " + s + " " +
+                            "from " + file + " that we could not delete.");
+                      }
+                      startSet.add(s);
                     }
-                    startSet.add(s);
                   }
+                } catch (Throwable t) {
+                  System.err.println("ERROR processing leftover segments file " + file + ":");
+                  t.printStackTrace();
                 }
-              } catch (Throwable t) {
-                System.err.println("ERROR processing leftover segments file " + file + ":");
-                t.printStackTrace();
               }
             }
-          }
-
-          startFiles = startSet.toArray(new String[0]);
-          endFiles = endSet.toArray(new String[0]);
-
-          if (!Arrays.equals(startFiles, endFiles)) {
-            List<String> removed = new ArrayList<>();
-            for(String fileName : startFiles) {
-              if (!endSet.contains(fileName)) {
-                removed.add(fileName);
+            
+            startFiles = startSet.toArray(new String[0]);
+            endFiles = endSet.toArray(new String[0]);
+            
+            if (!Arrays.equals(startFiles, endFiles)) {
+              List<String> removed = new ArrayList<>();
+              for(String fileName : startFiles) {
+                if (!endSet.contains(fileName)) {
+                  removed.add(fileName);
+                }
               }
-            }
-
-            List<String> added = new ArrayList<>();
-            for(String fileName : endFiles) {
-              if (!startSet.contains(fileName)) {
-                added.add(fileName);
+              
+              List<String> added = new ArrayList<>();
+              for(String fileName : endFiles) {
+                if (!startSet.contains(fileName)) {
+                  added.add(fileName);
+                }
               }
+              
+              String extras;
+              if (removed.size() != 0) {
+                extras = "\n\nThese files were removed: " + removed;
+              } else {
+                extras = "";
+              }
+              
+              if (added.size() != 0) {
+                extras += "\n\nThese files were added (waaaaaaaaaat!): " + added;
+              }
+              
+              if (pendingDeletions.size() != 0) {
+                extras += "\n\nThese files we had previously tried to delete, but couldn't: " + pendingDeletions;
+              }
+              
+              throw new RuntimeException("unreferenced files: before delete:\n    " + Arrays.toString(startFiles) + "\n  after delete:\n    " + Arrays.toString(endFiles) + extras);
             }
-
-            String extras;
-            if (removed.size() != 0) {
-              extras = "\n\nThese files were removed: " + removed;
-            } else {
-              extras = "";
-            }
-
-            if (added.size() != 0) {
-              extras += "\n\nThese files were added (waaaaaaaaaat!): " + added;
-            }
-
-            if (pendingDeletions.size() != 0) {
-              extras += "\n\nThese files we had previously tried to delete, but couldn't: " + pendingDeletions;
-            }
-             
-            throw new RuntimeException("unreferenced files: before delete:\n    " + Arrays.toString(startFiles) + "\n  after delete:\n    " + Arrays.toString(endFiles) + extras);
+            
+            DirectoryReader ir1 = DirectoryReader.open(this);
+            int numDocs1 = ir1.numDocs();
+            ir1.close();
+            new IndexWriter(this, new IndexWriterConfig(null)).close();
+            DirectoryReader ir2 = DirectoryReader.open(this);
+            int numDocs2 = ir2.numDocs();
+            ir2.close();
+            assert numDocs1 == numDocs2 : "numDocs changed after opening/closing IW: before=" + numDocs1 + " after=" + numDocs2;
           }
-
-          DirectoryReader ir1 = DirectoryReader.open(this);
-          int numDocs1 = ir1.numDocs();
-          ir1.close();
-          new IndexWriter(this, new IndexWriterConfig(null)).close();
-          DirectoryReader ir2 = DirectoryReader.open(this);
-          int numDocs2 = ir2.numDocs();
-          ir2.close();
-          assert numDocs1 == numDocs2 : "numDocs changed after opening/closing IW: before=" + numDocs1 + " after=" + numDocs2;
         }
       }
+      success = true;
+    } finally {
+      if (success) {
+        IOUtils.close(in);
+      } else {
+        IOUtils.closeWhileHandlingException(in);
+      }
     }
-    in.close();
   }
 
   synchronized void removeOpenFile(Closeable c, String name) {
@@ -1023,11 +1048,13 @@ public class MockDirectoryWrapper extend
     }
   }
 
+  // TODO: why does this class override this method?
+  // we should use the default implementation so all of our checks work?
   @Override
-  public synchronized void copy(Directory to, String src, String dest, IOContext context) throws IOException {
+  public synchronized void copyFrom(Directory from, String src, String dest, IOContext context) throws IOException {
     maybeYield();
     // randomize the IOContext here?
-    in.copy(to, src, dest, context);
+    in.copyFrom(from, src, dest, context);
   }
   
   /** Use this when throwing fake {@code IOException},

Modified: lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/store/MockIndexInputWrapper.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/store/MockIndexInputWrapper.java?rev=1649347&r1=1649346&r2=1649347&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/store/MockIndexInputWrapper.java (original)
+++ lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/store/MockIndexInputWrapper.java Sun Jan  4 14:53:12 2015
@@ -1,7 +1,9 @@
 package org.apache.lucene.store;
 
+import java.io.Closeable;
 import java.io.IOException;
 import java.util.Map;
+import java.util.Set;
 
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
@@ -42,16 +44,20 @@ public class MockIndexInputWrapper exten
 
   @Override
   public void close() throws IOException {
-    try {
-      // turn on the following to look for leaks closing inputs,
-      // after fixing TestTransactions
-      // dir.maybeThrowDeterministicException();
-    } finally {
-      closed = true;
-      delegate.close();
+    // TODO turn on the following to look for leaks closing inputs,
+    // after fixing TestTransactions
+    // dir.maybeThrowDeterministicException();
+    if (closed) {
+      delegate.close(); // don't mask double-close bugs
+      return;
+    }
+    closed = true;
+    
+    try (Closeable delegate = this.delegate) {
       // Pending resolution on LUCENE-686 we may want to
       // remove the conditional check so we also track that
       // all clones get closed:
+      assert delegate != null;
       if (!isClone) {
         dir.removeIndexInput(this, name);
       }
@@ -67,6 +73,9 @@ public class MockIndexInputWrapper exten
   @Override
   public MockIndexInputWrapper clone() {
     ensureOpen();
+    if (dir.verboseClone) {
+      new Exception("clone: " + this).printStackTrace(System.out);
+    }
     dir.inputCloneCount.incrementAndGet();
     IndexInput iiclone = delegate.clone();
     MockIndexInputWrapper clone = new MockIndexInputWrapper(dir, name, iiclone);
@@ -91,6 +100,9 @@ public class MockIndexInputWrapper exten
   @Override
   public IndexInput slice(String sliceDescription, long offset, long length) throws IOException {
     ensureOpen();
+    if (dir.verboseClone) {
+      new Exception("slice: " + this).printStackTrace(System.out);
+    }
     dir.inputCloneCount.incrementAndGet();
     IndexInput slice = delegate.slice(sliceDescription, offset, length);
     MockIndexInputWrapper clone = new MockIndexInputWrapper(dir, sliceDescription, slice);
@@ -178,6 +190,30 @@ public class MockIndexInputWrapper exten
   }
 
   @Override
+  public int readZInt() throws IOException {
+    ensureOpen();
+    return delegate.readZInt();
+  }
+
+  @Override
+  public long readZLong() throws IOException {
+    ensureOpen();
+    return delegate.readZLong();
+  }
+
+  @Override
+  public Set<String> readStringSet() throws IOException {
+    ensureOpen();
+    return delegate.readStringSet();
+  }
+
+  @Override
+  public void skipBytes(long numBytes) throws IOException {
+    ensureOpen();
+    super.skipBytes(numBytes);
+  }
+
+  @Override
   public String toString() {
     return "MockIndexInputWrapper(" + delegate + ")";
   }

Modified: lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/store/MockIndexOutputWrapper.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/store/MockIndexOutputWrapper.java?rev=1649347&r1=1649346&r2=1649347&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/store/MockIndexOutputWrapper.java (original)
+++ lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/store/MockIndexOutputWrapper.java Sun Jan  4 14:53:12 2015
@@ -17,6 +17,7 @@ package org.apache.lucene.store;
  * limitations under the License.
  */
 
+import java.io.Closeable;
 import java.io.IOException;
 
 import org.apache.lucene.util.LuceneTestCase;
@@ -88,12 +89,21 @@ public class MockIndexOutputWrapper exte
     }
   }
   
+  private boolean closed;
+  
   @Override
   public void close() throws IOException {
-    try {
+    if (closed) {
+      delegate.close(); // don't mask double-close bugs
+      return;
+    }
+    closed = true;
+    
+    try (Closeable delegate = this.delegate) {
+      assert delegate != null;
       dir.maybeThrowDeterministicException();
     } finally {
-      delegate.close();
+      dir.removeIndexOutput(this, name);
       if (dir.trackDiskUsage) {
         // Now compute actual disk usage & track the maxUsedSize
         // in the MockDirectoryWrapper:
@@ -102,7 +112,12 @@ public class MockIndexOutputWrapper exte
           dir.maxUsedSize = size;
         }
       }
-      dir.removeIndexOutput(this, name);
+    }
+  }
+  
+  private void ensureOpen() {
+    if (closed) {
+      throw new AlreadyClosedException("Already closed: " + this);
     }
   }
 
@@ -114,6 +129,7 @@ public class MockIndexOutputWrapper exte
   
   @Override
   public void writeBytes(byte[] b, int offset, int len) throws IOException {
+    ensureOpen();
     checkCrashed();
     checkDiskFull(b, offset, null, len);
     
@@ -143,6 +159,7 @@ public class MockIndexOutputWrapper exte
 
   @Override
   public void copyBytes(DataInput input, long numBytes) throws IOException {
+    ensureOpen();
     checkCrashed();
     checkDiskFull(null, 0, input, numBytes);
     

Modified: lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/util/BaseBitSetTestCase.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/util/BaseBitSetTestCase.java?rev=1649347&r1=1649346&r2=1649347&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/util/BaseBitSetTestCase.java (original)
+++ lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/util/BaseBitSetTestCase.java Sun Jan  4 14:53:12 2015
@@ -18,6 +18,7 @@ package org.apache.lucene.util;
  */
 
 import java.io.IOException;
+import java.util.Collection;
 import java.util.Collections;
 
 import org.apache.lucene.search.DocIdSet;
@@ -171,45 +172,103 @@ public abstract class BaseBitSetTestCase
     }
   }
 
-  /** Test the {@link BitSet#and}, {@link BitSet#or} and {@link BitSet#andNot} methods. */
-  public void testBulkOperations() throws IOException {
+  private void testOr(float load) throws IOException {
     final int numBits = 1 + random().nextInt(100000);
-    BitSet set1 = new JavaUtilBitSet(randomSet(numBits, 0), numBits);
+    BitSet set1 = new JavaUtilBitSet(randomSet(numBits, 0), numBits); // empty
     T set2 = copyOf(set1, numBits);
-    final int iters = TEST_NIGHTLY ? 50 + random().nextInt(50) : 10 + random().nextInt(10);
-    for (int i = 0; i < iters; ++i) {
-      // make extreme percents more likely
-      float percentSet2 = rarely() ? 0 : (float) Math.pow(random().nextDouble(), 2);
-      if (random().nextBoolean()) {
-        percentSet2 = 1 - percentSet2;
-      }
-      BitSet bulkSet = new JavaUtilBitSet(randomSet(numBits, percentSet2), numBits);
-      // operations are sometimes specialized based on the impl, so randomize the impl
-      final DocIdSet bulkSetCopy = randomCopy(bulkSet, numBits);
-      // now randomize the operation
-      if (bulkSetCopy.iterator() == null) {
-        continue;
-      }
-      DocIdSetIterator it1 = bulkSetCopy.iterator();
-      DocIdSetIterator it2 = bulkSetCopy.iterator();
-      switch (random().nextInt(3)) {
-        case 0:
-          set1.or(it1);
-          set2.or(it2);
-          break;
-        case 1:
-          set1.and(it1);
-          set2.and(it2);
-          break;
-        default:
-          set1.andNot(it1);
-          set2.andNot(it2);
-          break;
+    
+    final int iterations = atLeast(10);
+    for (int iter = 0; iter < iterations; ++iter) {
+      DocIdSet otherSet = randomCopy(new JavaUtilBitSet(randomSet(numBits, load), numBits), numBits);
+      DocIdSetIterator otherIterator = otherSet.iterator();
+      if (otherIterator != null) {
+        set1.or(otherIterator);
+        set2.or(otherSet.iterator());
+        assertEquals(set1, set2, numBits);
+      }
+    }
+  }
+
+  /** Test {@link BitSet#or(DocIdSetIterator)} on sparse sets. */
+  public void testOrSparse() throws IOException {
+    testOr(0.001f);
+  }
+
+  /** Test {@link BitSet#or(DocIdSetIterator)} on dense sets. */
+  public void testOrDense() throws IOException {
+    testOr(0.5f);
+  }
+
+  /** Test {@link BitSet#or(DocIdSetIterator)} on a random density. */
+  public void testOrRandom() throws IOException {
+    testOr(random().nextFloat());
+  }
+
+  private void testAnd(float load) throws IOException {
+    final int numBits = 1 + random().nextInt(100000);
+    BitSet set1 = new JavaUtilBitSet(randomSet(numBits, numBits), numBits); // full
+    T set2 = copyOf(set1, numBits);
+    
+    final int iterations = atLeast(10);
+    for (int iter = 0; iter < iterations; ++iter) {
+      // BitSets have specializations to merge with certain impls, so we randomize the impl...
+      DocIdSet otherSet = randomCopy(new JavaUtilBitSet(randomSet(numBits, load), numBits), numBits);
+      DocIdSetIterator otherIterator = otherSet.iterator();
+      if (otherIterator != null) {
+        set1.and(otherIterator);
+        set2.and(otherSet.iterator());
+        assertEquals(set1, set2, numBits);
       }
-      assertEquals(set1, set2, numBits);
     }
   }
 
+  /** Test {@link BitSet#and(DocIdSetIterator)} on sparse sets. */
+  public void testAndSparse() throws IOException {
+    testAnd(0.1f);
+  }
+
+  /** Test {@link BitSet#and(DocIdSetIterator)} on dense sets. */
+  public void testAndDense() throws IOException {
+    testAnd(0.99f);
+  }
+
+  /** Test {@link BitSet#and(DocIdSetIterator)} on a random density. */
+  public void testAndRandom() throws IOException {
+    testAnd(random().nextFloat());
+  }
+
+  private void testAndNot(float load) throws IOException {
+    final int numBits = 1 + random().nextInt(100000);
+    BitSet set1 = new JavaUtilBitSet(randomSet(numBits, numBits), numBits); // full
+    T set2 = copyOf(set1, numBits);
+    
+    final int iterations = atLeast(10);
+    for (int iter = 0; iter < iterations; ++iter) {
+      DocIdSet otherSet = randomCopy(new JavaUtilBitSet(randomSet(numBits, load), numBits), numBits);
+      DocIdSetIterator otherIterator = otherSet.iterator();
+      if (otherIterator != null) {
+        set1.andNot(otherIterator);
+        set2.andNot(otherSet.iterator());
+        assertEquals(set1, set2, numBits);
+      }
+    }
+  }
+
+  /** Test {@link BitSet#andNot(DocIdSetIterator)} on sparse sets. */
+  public void testAndNotSparse() throws IOException {
+    testAndNot(0.01f);
+  }
+  
+  /** Test {@link BitSet#andNot(DocIdSetIterator)} on dense sets. */
+  public void testAndNotDense() throws IOException {
+    testAndNot(0.9f);
+  }
+
+  /** Test {@link BitSet#andNot(DocIdSetIterator)} on a random density. */
+  public void testAndNotRandom() throws IOException {
+    testAndNot(random().nextFloat());
+  }
+
   private static class JavaUtilBitSet extends BitSet {
 
     private final java.util.BitSet bitSet;
@@ -241,7 +300,7 @@ public abstract class BaseBitSetTestCase
     }
 
     @Override
-    public Iterable<? extends Accountable> getChildResources() {
+    public Collection<Accountable> getChildResources() {
       return Collections.emptyList();
     }
 

Modified: lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/util/LineFileDocs.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/util/LineFileDocs.java?rev=1649347&r1=1649346&r2=1649347&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/util/LineFileDocs.java (original)
+++ lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/util/LineFileDocs.java Sun Jan  4 14:53:12 2015
@@ -91,7 +91,7 @@ public class LineFileDocs implements Clo
     boolean needSkip = true;
     long size = 0L, seekTo = 0L;
     if (is == null) {
-      // if its not in classpath, we load it as absolute filesystem path (e.g. Hudson's home dir)
+      // if it's not in classpath, we load it as absolute filesystem path (e.g. Hudson's home dir)
       Path file = Paths.get(path);
       size = Files.size(file);
       if (path.endsWith(".gz")) {

Modified: lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java?rev=1649347&r1=1649346&r2=1649347&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java (original)
+++ lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java Sun Jan  4 14:53:12 2015
@@ -72,8 +72,8 @@ import org.apache.lucene.index.FieldFilt
 import org.apache.lucene.index.FieldInfo;
 import org.apache.lucene.index.FieldInfos;
 import org.apache.lucene.index.Fields;
-import org.apache.lucene.index.IndexReader.ReaderClosedListener;
 import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexReader.ReaderClosedListener;
 import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.index.IndexWriterConfig;
 import org.apache.lucene.index.IndexableField;
@@ -85,6 +85,8 @@ import org.apache.lucene.index.LogDocMer
 import org.apache.lucene.index.LogMergePolicy;
 import org.apache.lucene.index.MergePolicy;
 import org.apache.lucene.index.MergeScheduler;
+import org.apache.lucene.index.MismatchedDirectoryReader;
+import org.apache.lucene.index.MismatchedLeafReader;
 import org.apache.lucene.index.MockRandomMergePolicy;
 import org.apache.lucene.index.MultiDocValues;
 import org.apache.lucene.index.MultiFields;
@@ -100,8 +102,8 @@ import org.apache.lucene.index.SortedDoc
 import org.apache.lucene.index.SortedNumericDocValues;
 import org.apache.lucene.index.SortedSetDocValues;
 import org.apache.lucene.index.Terms;
-import org.apache.lucene.index.TermsEnum.SeekStatus;
 import org.apache.lucene.index.TermsEnum;
+import org.apache.lucene.index.TermsEnum.SeekStatus;
 import org.apache.lucene.index.TieredMergePolicy;
 import org.apache.lucene.search.AssertingIndexSearcher;
 import org.apache.lucene.search.DocIdSet;
@@ -117,12 +119,12 @@ import org.apache.lucene.store.Directory
 import org.apache.lucene.store.FSDirectory;
 import org.apache.lucene.store.FSLockFactory;
 import org.apache.lucene.store.FlushInfo;
-import org.apache.lucene.store.IOContext.Context;
 import org.apache.lucene.store.IOContext;
+import org.apache.lucene.store.IOContext.Context;
 import org.apache.lucene.store.LockFactory;
 import org.apache.lucene.store.MergeInfo;
-import org.apache.lucene.store.MockDirectoryWrapper.Throttling;
 import org.apache.lucene.store.MockDirectoryWrapper;
+import org.apache.lucene.store.MockDirectoryWrapper.Throttling;
 import org.apache.lucene.store.NRTCachingDirectory;
 import org.apache.lucene.store.RateLimitedDirectoryWrapper;
 import org.apache.lucene.util.automaton.AutomatonTestUtil;
@@ -149,16 +151,16 @@ import com.carrotsearch.randomizedtestin
 import com.carrotsearch.randomizedtesting.annotations.SeedDecorators;
 import com.carrotsearch.randomizedtesting.annotations.TestGroup;
 import com.carrotsearch.randomizedtesting.annotations.TestMethodProviders;
-import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction.Action;
 import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction;
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction.Action;
 import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
-import com.carrotsearch.randomizedtesting.annotations.ThreadLeakGroup.Group;
 import com.carrotsearch.randomizedtesting.annotations.ThreadLeakGroup;
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakGroup.Group;
 import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering;
-import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
 import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
-import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies.Consequence;
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
 import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies;
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies.Consequence;
 import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite;
 import com.carrotsearch.randomizedtesting.generators.RandomPicks;
 import com.carrotsearch.randomizedtesting.rules.NoClassHooksShadowingRule;
@@ -470,7 +472,7 @@ public abstract class LuceneTestCase ext
   public static final FilterCachingPolicy MAYBE_CACHE_POLICY = new FilterCachingPolicy() {
 
     @Override
-    public void onCache(Filter filter) {}
+    public void onUse(Filter filter) {}
 
     @Override
     public boolean shouldCache(Filter filter, LeafReaderContext context, DocIdSet set) throws IOException {
@@ -931,8 +933,6 @@ public abstract class LuceneTestCase ext
     if (r.nextBoolean()) {
       c.setMergeScheduler(new SerialMergeScheduler());
     } else if (rarely(r)) {
-      int maxThreadCount = TestUtil.nextInt(r, 1, 4);
-      int maxMergeCount = TestUtil.nextInt(r, maxThreadCount, maxThreadCount + 4);
       ConcurrentMergeScheduler cms;
       if (r.nextBoolean()) {
         cms = new ConcurrentMergeScheduler();
@@ -943,9 +943,22 @@ public abstract class LuceneTestCase ext
             }
           };
       }
+      int maxThreadCount = TestUtil.nextInt(r, 1, 4);
+      int maxMergeCount = TestUtil.nextInt(r, maxThreadCount, maxThreadCount + 4);
       cms.setMaxMergesAndThreads(maxMergeCount, maxThreadCount);
       c.setMergeScheduler(cms);
+    } else {
+      // Always use consistent settings, else CMS's dynamic (SSD or not)
+      // defaults can change, hurting reproducibility:
+      ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
+
+      // Only 1 thread can run at once (should maybe help reproducibility),
+      // with up to 3 pending merges before segment-producing threads are
+      // stalled:
+      cms.setMaxMergesAndThreads(3, 1);
+      c.setMergeScheduler(cms);
     }
+
     if (r.nextBoolean()) {
       if (rarely(r)) {
         // crazy value
@@ -1349,7 +1362,7 @@ public abstract class LuceneTestCase ext
   public static BaseDirectoryWrapper newDirectory(Random r, Directory d) throws IOException {
     Directory impl = newDirectoryImpl(r, TEST_DIRECTORY);
     for (String file : d.listAll()) {
-     d.copy(impl, file, file, newIOContext(r));
+     impl.copyFrom(d, file, file, newIOContext(r));
     }
     return wrapDirectory(r, impl, rarely(r));
   }
@@ -1498,7 +1511,7 @@ public abstract class LuceneTestCase ext
       // TODO: remove this, and fix those tests to wrap before putting slow around:
       final boolean wasOriginallyAtomic = r instanceof LeafReader;
       for (int i = 0, c = random.nextInt(6)+1; i < c; i++) {
-        switch(random.nextInt(5)) {
+        switch(random.nextInt(6)) {
           case 0:
             r = SlowCompositeReaderWrapper.wrap(r);
             break;
@@ -1539,6 +1552,13 @@ public abstract class LuceneTestCase ext
               r = new AssertingDirectoryReader((DirectoryReader)r);
             }
             break;
+          case 5:
+            if (r instanceof LeafReader) {
+              r = new MismatchedLeafReader((LeafReader)r, random);
+            } else if (r instanceof DirectoryReader) {
+              r = new MismatchedDirectoryReader((DirectoryReader)r, random);
+            }
+            break;
           default:
             fail("should not get here");
         }

Modified: lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/util/RamUsageTester.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/util/RamUsageTester.java?rev=1649347&r1=1649346&r2=1649347&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/util/RamUsageTester.java (original)
+++ lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/util/RamUsageTester.java Sun Jan  4 14:53:12 2015
@@ -22,15 +22,13 @@ import java.lang.reflect.Field;
 import java.lang.reflect.Modifier;
 import java.util.AbstractList;
 import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.IdentityHashMap;
-import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
-import java.util.NoSuchElementException;
+import java.util.Set;
 
 /** Crawls object graph to collect RAM usage for testing */
 public final class RamUsageTester {
@@ -91,7 +89,7 @@ public final class RamUsageTester {
    */
   private static long measureObjectSize(Object root, Accumulator accumulator) {
     // Objects seen so far.
-    final IdentityHashSet<Object> seen = new IdentityHashSet<>();
+    final Set<Object> seen = Collections.newSetFromMap(new IdentityHashMap<Object, Boolean>());
     // Class cache with reference Field and precalculated shallow size. 
     final IdentityHashMap<Class<?>, ClassCache> classCache = new IdentityHashMap<>();
     // Stack of objects pending traversal. Recursion caused stack overflows. 
@@ -213,243 +211,5 @@ public final class RamUsageTester {
         referenceFields.toArray(new Field[referenceFields.size()]));
     return cachedInfo;
   }
-  
-  /**
-   * An identity hash set implemented using open addressing. No null keys are allowed.
-   * 
-   * TODO: If this is useful outside this class, make it public - needs some work
-   */
-  static final class IdentityHashSet<KType> implements Iterable<KType> {
-    /**
-     * Default load factor.
-     */
-    public final static float DEFAULT_LOAD_FACTOR = 0.75f;
-
-    /**
-     * Minimum capacity for the set.
-     */
-    public final static int MIN_CAPACITY = 4;
-
-    /**
-     * All of set entries. Always of power of two length.
-     */
-    public Object[] keys;
-    
-    /**
-     * Cached number of assigned slots.
-     */
-    public int assigned;
-    
-    /**
-     * The load factor for this set (fraction of allocated or deleted slots before
-     * the buffers must be rehashed or reallocated).
-     */
-    public final float loadFactor;
-    
-    /**
-     * Cached capacity threshold at which we must resize the buffers.
-     */
-    private int resizeThreshold;
-    
-    /**
-     * Creates a hash set with the default capacity of 16.
-     * load factor of {@value #DEFAULT_LOAD_FACTOR}. `
-     */
-    public IdentityHashSet() {
-      this(16, DEFAULT_LOAD_FACTOR);
-    }
-    
-    /**
-     * Creates a hash set with the given capacity, load factor of
-     * {@value #DEFAULT_LOAD_FACTOR}.
-     */
-    public IdentityHashSet(int initialCapacity) {
-      this(initialCapacity, DEFAULT_LOAD_FACTOR);
-    }
-    
-    /**
-     * Creates a hash set with the given capacity and load factor.
-     */
-    public IdentityHashSet(int initialCapacity, float loadFactor) {
-      initialCapacity = Math.max(MIN_CAPACITY, initialCapacity);
-      
-      assert initialCapacity > 0 : "Initial capacity must be between (0, "
-          + Integer.MAX_VALUE + "].";
-      assert loadFactor > 0 && loadFactor < 1 : "Load factor must be between (0, 1).";
-      this.loadFactor = loadFactor;
-      allocateBuffers(roundCapacity(initialCapacity));
-    }
-    
-    /**
-     * Adds a reference to the set. Null keys are not allowed.
-     */
-    public boolean add(KType e) {
-      assert e != null : "Null keys not allowed.";
-      
-      if (assigned >= resizeThreshold) expandAndRehash();
-      
-      final int mask = keys.length - 1;
-      int slot = rehash(e) & mask;
-      Object existing;
-      while ((existing = keys[slot]) != null) {
-        if (e == existing) {
-          return false; // already found.
-        }
-        slot = (slot + 1) & mask;
-      }
-      assigned++;
-      keys[slot] = e;
-      return true;
-    }
-
-    /**
-     * Checks if the set contains a given ref.
-     */
-    public boolean contains(KType e) {
-      final int mask = keys.length - 1;
-      int slot = rehash(e) & mask;
-      Object existing;
-      while ((existing = keys[slot]) != null) {
-        if (e == existing) {
-          return true;
-        }
-        slot = (slot + 1) & mask;
-      }
-      return false;
-    }
 
-    /** Rehash via MurmurHash.
-     * 
-     * <p>The implementation is based on the
-     * finalization step from Austin Appleby's
-     * <code>MurmurHash3</code>.
-     * 
-     * @see <a href="http://sites.google.com/site/murmurhash/">http://sites.google.com/site/murmurhash/</a>
-     */
-    private static int rehash(Object o) {
-      int k = System.identityHashCode(o);
-      k ^= k >>> 16;
-      k *= 0x85ebca6b;
-      k ^= k >>> 13;
-      k *= 0xc2b2ae35;
-      k ^= k >>> 16;
-      return k;
-    }
-    
-    /**
-     * Expand the internal storage buffers (capacity) or rehash current keys and
-     * values if there are a lot of deleted slots.
-     */
-    private void expandAndRehash() {
-      final Object[] oldKeys = this.keys;
-      
-      assert assigned >= resizeThreshold;
-      allocateBuffers(nextCapacity(keys.length));
-      
-      /*
-       * Rehash all assigned slots from the old hash table.
-       */
-      final int mask = keys.length - 1;
-      for (int i = 0; i < oldKeys.length; i++) {
-        final Object key = oldKeys[i];
-        if (key != null) {
-          int slot = rehash(key) & mask;
-          while (keys[slot] != null) {
-            slot = (slot + 1) & mask;
-          }
-          keys[slot] = key;
-        }
-      }
-      Arrays.fill(oldKeys, null);
-    }
-    
-    /**
-     * Allocate internal buffers for a given capacity.
-     * 
-     * @param capacity
-     *          New capacity (must be a power of two).
-     */
-    private void allocateBuffers(int capacity) {
-      this.keys = new Object[capacity];
-      this.resizeThreshold = (int) (capacity * DEFAULT_LOAD_FACTOR);
-    }
-    
-    /**
-     * Return the next possible capacity, counting from the current buffers' size.
-     */
-    protected int nextCapacity(int current) {
-      assert current > 0 && Long.bitCount(current) == 1 : "Capacity must be a power of two.";
-      assert ((current << 1) > 0) : "Maximum capacity exceeded ("
-          + (0x80000000 >>> 1) + ").";
-      
-      if (current < MIN_CAPACITY / 2) current = MIN_CAPACITY / 2;
-      return current << 1;
-    }
-    
-    /**
-     * Round the capacity to the next allowed value.
-     */
-    protected int roundCapacity(int requestedCapacity) {
-      // Maximum positive integer that is a power of two.
-      if (requestedCapacity > (0x80000000 >>> 1)) return (0x80000000 >>> 1);
-      
-      int capacity = MIN_CAPACITY;
-      while (capacity < requestedCapacity) {
-        capacity <<= 1;
-      }
-
-      return capacity;
-    }
-    
-    public void clear() {
-      assigned = 0;
-      Arrays.fill(keys, null);
-    }
-    
-    public int size() {
-      return assigned;
-    }
-    
-    public boolean isEmpty() {
-      return size() == 0;
-    }
-
-    @Override
-    public Iterator<KType> iterator() {
-      return new Iterator<KType>() {
-        int pos = -1;
-        Object nextElement = fetchNext();
-
-        @Override
-        public boolean hasNext() {
-          return nextElement != null;
-        }
-
-        @SuppressWarnings("unchecked")
-        @Override
-        public KType next() {
-          Object r = this.nextElement;
-          if (r == null) {
-            throw new NoSuchElementException();
-          }
-          this.nextElement = fetchNext();
-          return (KType) r;
-        }
-
-        private Object fetchNext() {
-          pos++;
-          while (pos < keys.length && keys[pos] == null) {
-            pos++;
-          }
-
-          return (pos >= keys.length ? null : keys[pos]);
-        }
-
-        @Override
-        public void remove() {
-          throw new UnsupportedOperationException();
-        }
-      };
-    }
-  }
 }

Modified: lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/util/TestRuleSetupAndRestoreClassEnv.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/util/TestRuleSetupAndRestoreClassEnv.java?rev=1649347&r1=1649346&r2=1649347&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/util/TestRuleSetupAndRestoreClassEnv.java (original)
+++ lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/util/TestRuleSetupAndRestoreClassEnv.java Sun Jan  4 14:53:12 2015
@@ -113,7 +113,7 @@ final class TestRuleSetupAndRestoreClass
 
   @Override
   protected void before() throws Exception {
-    // enable this by default, for IDE consistency with ant tests (as its the default from ant)
+    // enable this by default, for IDE consistency with ant tests (as it's the default from ant)
     // TODO: really should be in solr base classes, but some extend LTC directly.
     // we do this in beforeClass, because some tests currently disable it
     restoreProperties.put("solr.directoryFactory", System.getProperty("solr.directoryFactory"));

Modified: lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java?rev=1649347&r1=1649346&r2=1649347&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java (original)
+++ lucene/dev/branches/lucene6005/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java Sun Jan  4 14:53:12 2015
@@ -30,6 +30,8 @@ import java.nio.file.FileSystem;
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.Iterator;
 import java.util.List;
@@ -188,7 +190,48 @@ public final class TestUtil {
       // ok
     }
   }
-  
+
+  /**
+   * Checks that the provided collection is read-only.
+   * @see #checkIterator(Iterator)
+   */
+  public static <T> void checkReadOnly(Collection<T> coll) {
+    int size = 0;
+    for (Iterator<?> it = coll.iterator(); it.hasNext(); ) {
+      it.next();
+      size += 1;
+    }
+    if (size != coll.size()) {
+      throw new AssertionError("broken collection, reported size is "
+          + coll.size() + " but iterator has " + size + " elements: " + coll);
+    }
+
+    if (coll.isEmpty() == false) {
+      try {
+        coll.remove(coll.iterator().next());
+        throw new AssertionError("broken collection (supports remove): " + coll);
+      } catch (UnsupportedOperationException e) {
+        // ok
+      }
+    }
+
+    try {
+      coll.add(null);
+      throw new AssertionError("broken collection (supports add): " + coll);
+    } catch (UnsupportedOperationException e) {
+      // ok
+    }
+
+    try {
+      coll.addAll(Collections.singleton(null));
+      throw new AssertionError("broken collection (supports addAll): " + coll);
+    } catch (UnsupportedOperationException e) {
+      // ok
+    }
+
+    checkIterator(coll.iterator());
+  }
+
   public static void syncConcurrentMerges(IndexWriter writer) {
     syncConcurrentMerges(writer.getConfig().getMergeScheduler());
   }
@@ -845,7 +888,7 @@ public final class TestUtil {
     }
     MergeScheduler ms = w.getConfig().getMergeScheduler();
     if (ms instanceof ConcurrentMergeScheduler) {
-      // wtf... shouldnt it be even lower since its 1 by default?!?!
+      // wtf... shouldnt it be even lower since it's 1 by default?!?!
       ((ConcurrentMergeScheduler) ms).setMaxMergesAndThreads(3, 2);
     }
   }

Modified: lucene/dev/branches/lucene6005/lucene/tools/build.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/tools/build.xml?rev=1649347&r1=1649346&r2=1649347&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/tools/build.xml (original)
+++ lucene/dev/branches/lucene6005/lucene/tools/build.xml Sun Jan  4 14:53:12 2015
@@ -33,7 +33,7 @@
 
   <path id="test.classpath"/>
 
-  <!-- redefine the test compilation, so its just a no-op -->
+  <!-- redefine the test compilation, so it's just a no-op -->
   <target name="compile-test"/>
   
   <!-- redefine the forbidden apis to be no-ops -->

Modified: lucene/dev/branches/lucene6005/lucene/tools/custom-tasks.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/tools/custom-tasks.xml?rev=1649347&r1=1649346&r2=1649347&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/tools/custom-tasks.xml (original)
+++ lucene/dev/branches/lucene6005/lucene/tools/custom-tasks.xml Sun Jan  4 14:53:12 2015
@@ -47,7 +47,7 @@
     
     <replaceregex pattern="[-]tests$" replace="-tests" flags="gi" />
 
-    <!-- git hashcode pattern: its always 40 chars right? -->
+    <!-- git hashcode pattern: it's always 40 chars right? -->
     <replaceregex pattern="\-[a-z0-9]{40,40}$" replace="" flags="gi" />
   </filtermapper>