You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by rm...@apache.org on 2011/02/09 02:04:13 UTC

svn commit: r1068718 [8/21] - in /lucene/dev/branches/bulkpostings: ./ dev-tools/eclipse/ dev-tools/idea/.idea/ dev-tools/idea/lucene/ dev-tools/maven/ dev-tools/maven/lucene/ dev-tools/maven/lucene/contrib/ant/ dev-tools/maven/lucene/contrib/db/bdb-je...

Modified: lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/automaton/State.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/automaton/State.java?rev=1068718&r1=1068717&r2=1068718&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/automaton/State.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/automaton/State.java Wed Feb  9 01:03:49 2011
@@ -31,7 +31,6 @@ package org.apache.lucene.util.automaton
 import org.apache.lucene.util.ArrayUtil;
 import org.apache.lucene.util.RamUsageEstimator;
 
-import java.io.Serializable;
 import java.util.Collection;
 import java.util.Comparator;
 import java.util.Iterator;
@@ -41,7 +40,7 @@ import java.util.Iterator;
  * 
  * @lucene.experimental
  */
-public class State implements Serializable, Comparable<State> {
+public class State implements Comparable<State> {
   
   boolean accept;
   public Transition[] transitionsArray;

Modified: lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/automaton/Transition.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/automaton/Transition.java?rev=1068718&r1=1068717&r2=1068718&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/automaton/Transition.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/automaton/Transition.java Wed Feb  9 01:03:49 2011
@@ -29,7 +29,6 @@
 
 package org.apache.lucene.util.automaton;
 
-import java.io.Serializable;
 import java.util.Comparator;
 
 /**
@@ -40,7 +39,7 @@ import java.util.Comparator;
  * 
  * @lucene.experimental
  */
-public class Transition implements Serializable, Cloneable {
+public class Transition implements Cloneable {
   
   /*
    * CLASS INVARIANT: min<=max

Modified: lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/automaton/UTF32ToUTF8.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/automaton/UTF32ToUTF8.java?rev=1068718&r1=1068717&r2=1068718&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/automaton/UTF32ToUTF8.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/automaton/UTF32ToUTF8.java Wed Feb  9 01:03:49 2011
@@ -26,7 +26,8 @@ import java.util.ArrayList;
 // TODO
 //   - do we really need the .bits...?  if not we can make util in UnicodeUtil to convert 1 char into a BytesRef
 
-final class UTF32ToUTF8 {
+/** @lucene.internal */
+public final class UTF32ToUTF8 {
 
   // Unicode boundaries for UTF8 bytes 1,2,3,4
   private static final int[] startCodes = new int[] {0, 128, 2048, 65536};
@@ -105,6 +106,7 @@ final class UTF32ToUTF8 {
       }
     }
 
+    @Override
     public String toString() {
       StringBuilder b = new StringBuilder();
       for(int i=0;i<len;i++) {

Modified: lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/automaton/fst/FST.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/automaton/fst/FST.java?rev=1068718&r1=1068717&r2=1068718&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/automaton/fst/FST.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/automaton/fst/FST.java Wed Feb  9 01:03:49 2011
@@ -490,7 +490,7 @@ public class FST<T> {
     }
   }
 
-  // Not private beacaus NodeHash needs access:
+  // Not private because NodeHash needs access:
   Arc<T> readFirstRealArc(int address, Arc<T> arc) throws IOException {
 
     final BytesReader in = getBytesReader(address);

Modified: lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/automaton/fst/PairOutputs.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/automaton/fst/PairOutputs.java?rev=1068718&r1=1068717&r2=1068718&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/automaton/fst/PairOutputs.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/automaton/fst/PairOutputs.java Wed Feb  9 01:03:49 2011
@@ -55,6 +55,7 @@ public class PairOutputs<A,B> extends Ou
       }
     }
 
+    @Override
     public int hashCode() {
       return output1.hashCode() + output2.hashCode();
     }

Modified: lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/packed/Packed32.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/packed/Packed32.java?rev=1068718&r1=1068717&r2=1068718&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/packed/Packed32.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/packed/Packed32.java Wed Feb  9 01:03:49 2011
@@ -214,6 +214,7 @@ class Packed32 extends PackedInts.Reader
     Arrays.fill(blocks, 0);
   }
 
+  @Override
   public String toString() {
     return "Packed32(bitsPerValue=" + bitsPerValue + ", maxPos=" + maxPos
             + ", elements.length=" + blocks.length + ")";

Modified: lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/packed/Packed64.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/packed/Packed64.java?rev=1068718&r1=1068717&r2=1068718&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/packed/Packed64.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/packed/Packed64.java Wed Feb  9 01:03:49 2011
@@ -199,6 +199,7 @@ class Packed64 extends PackedInts.Reader
                            | ((value << shifts[base + 2]) & writeMasks[base+2]);
   }
 
+  @Override
   public String toString() {
     return "Packed64(bitsPerValue=" + bitsPerValue + ", size="
             + size() + ", maxPos=" + maxPos

Modified: lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/packed/PackedInts.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/packed/PackedInts.java?rev=1068718&r1=1068717&r2=1068718&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/packed/PackedInts.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/packed/PackedInts.java Wed Feb  9 01:03:49 2011
@@ -251,7 +251,7 @@ public class PackedInts {
 
   /** Returns how many bits are required to hold values up
    *  to and including maxValue
-   * @param maxValue the maximum value tha should be representable.
+   * @param maxValue the maximum value that should be representable.
    * @return the amount of bits needed to represent values from 0 to maxValue.
    * @lucene.internal
    */

Modified: lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/packed/PackedWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/packed/PackedWriter.java?rev=1068718&r1=1068717&r2=1068718&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/packed/PackedWriter.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/packed/PackedWriter.java Wed Feb  9 01:03:49 2011
@@ -106,6 +106,7 @@ class PackedWriter extends PackedInts.Wr
     }
   }
 
+  @Override
   public String toString() {
     return "PackedWriter(written " + written + "/" + valueCount + " with "
             + bitsPerValue + " bits/value)";

Modified: lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/packed/package.html
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/packed/package.html?rev=1068718&r1=1068717&r2=1068718&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/packed/package.html (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/java/org/apache/lucene/util/packed/package.html Wed Feb  9 01:03:49 2011
@@ -1,4 +1,20 @@
 <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
 <html>
 <head></head>
 <body bgcolor="white">

Modified: lucene/dev/branches/bulkpostings/lucene/src/java/overview.html
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/java/overview.html?rev=1068718&r1=1068717&r2=1068718&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/java/overview.html (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/java/overview.html Wed Feb  9 01:03:49 2011
@@ -45,48 +45,36 @@ to check if the results are what we expe
 <!-- = Java Sourcecode to HTML automatically converted code = -->
 <!-- =   Java2Html Converter 5.0 [2006-03-04] by Markus Gebhard  markus@jave.de   = -->
 <!-- =     Further information: http://www.java2html.de     = -->
-<div align="left" class="java">
-<table border="0" cellpadding="3" cellspacing="0" bgcolor="#ffffff">
-   <tr>
-  <!-- start source code -->
-   <td nowrap="nowrap" valign="top" align="left">
-    <code>
-<font color="#ffffff">&nbsp;&nbsp;&nbsp;&nbsp;</font><font color="#000000">Analyzer&nbsp;analyzer&nbsp;=&nbsp;</font><font color="#7f0055"><b>new&nbsp;</b></font><font color="#000000">StandardAnalyzer</font><font color="#000000">(</font><font color="#000000">Version.LUCENE_CURRENT</font><font color="#000000">)</font><font color="#000000">;</font><br />
-<font color="#ffffff"></font><br />
-<font color="#ffffff">&nbsp;&nbsp;&nbsp;&nbsp;</font><font color="#3f7f5f">//&nbsp;Store&nbsp;the&nbsp;index&nbsp;in&nbsp;memory:</font><br />
-<font color="#ffffff">&nbsp;&nbsp;&nbsp;&nbsp;</font><font color="#000000">Directory&nbsp;directory&nbsp;=&nbsp;</font><font color="#7f0055"><b>new&nbsp;</b></font><font color="#000000">RAMDirectory</font><font color="#000000">()</font><font color="#000000">;</font><br />
-<font color="#ffffff">&nbsp;&nbsp;&nbsp;&nbsp;</font><font color="#3f7f5f">//&nbsp;To&nbsp;store&nbsp;an&nbsp;index&nbsp;on&nbsp;disk,&nbsp;use&nbsp;this&nbsp;instead:</font><br />
-<font color="#ffffff">&nbsp;&nbsp;&nbsp;&nbsp;</font><font color="#3f7f5f">//Directory&nbsp;directory&nbsp;=&nbsp;FSDirectory.open(&#34;/tmp/testindex&#34;);</font><br />
-<font color="#ffffff">&nbsp;&nbsp;&nbsp;&nbsp;</font><font color="#000000">IndexWriter&nbsp;iwriter&nbsp;=&nbsp;</font><font color="#7f0055"><b>new&nbsp;</b></font><font color="#000000">IndexWriter</font><font color="#000000">(</font><font color="#000000">directory,&nbsp;analyzer,&nbsp;true,</font><br />
-<font color="#ffffff">&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</font><font color="#7f0055"><b>new&nbsp;</b></font><font color="#000000">IndexWriter.MaxFieldLength</font><font color="#000000">(</font><font color="#990000">25000</font><font color="#000000">))</font><font color="#000000">;</font><br />
-<font color="#ffffff">&nbsp;&nbsp;&nbsp;&nbsp;</font><font color="#000000">Document&nbsp;doc&nbsp;=&nbsp;</font><font color="#7f0055"><b>new&nbsp;</b></font><font color="#000000">Document</font><font color="#000000">()</font><font color="#000000">;</font><br />
-<font color="#ffffff">&nbsp;&nbsp;&nbsp;&nbsp;</font><font color="#000000">String&nbsp;text&nbsp;=&nbsp;</font><font color="#2a00ff">&#34;This&nbsp;is&nbsp;the&nbsp;text&nbsp;to&nbsp;be&nbsp;indexed.&#34;</font><font color="#000000">;</font><br />
-<font color="#ffffff">&nbsp;&nbsp;&nbsp;&nbsp;</font><font color="#000000">doc.add</font><font color="#000000">(</font><font color="#7f0055"><b>new&nbsp;</b></font><font color="#000000">Field</font><font color="#000000">(</font><font color="#2a00ff">&#34;fieldname&#34;</font><font color="#000000">,&nbsp;text,&nbsp;Field.Store.YES,</font><br />
-<font color="#ffffff">&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</font><font color="#000000">Field.Index.ANALYZED</font><font color="#000000">))</font><font color="#000000">;</font><br />
-<font color="#ffffff">&nbsp;&nbsp;&nbsp;&nbsp;</font><font color="#000000">iwriter.addDocument</font><font color="#000000">(</font><font color="#000000">doc</font><font color="#000000">)</font><font color="#000000">;</font><br />
-<font color="#ffffff">&nbsp;&nbsp;&nbsp;&nbsp;</font><font color="#000000">iwriter.close</font><font color="#000000">()</font><font color="#000000">;</font><br />
-<font color="#ffffff">&nbsp;&nbsp;&nbsp;&nbsp;</font><br />
-<font color="#ffffff">&nbsp;&nbsp;&nbsp;&nbsp;</font><font color="#3f7f5f">//&nbsp;Now&nbsp;search&nbsp;the&nbsp;index:</font><br />
-<font color="#ffffff">&nbsp;&nbsp;&nbsp;&nbsp;</font><font color="#000000">IndexSearcher&nbsp;isearcher&nbsp;=&nbsp;</font><font color="#7f0055"><b>new&nbsp;</b></font><font color="#000000">IndexSearcher</font><font color="#000000">(</font><font color="#000000">directory,&nbsp;</font><font color="#7f0055"><b>true</b></font><font color="#000000">)</font><font color="#000000">;&nbsp;</font><font color="#3f7f5f">//&nbsp;read-only=true</font><br />
-<font color="#ffffff">&nbsp;&nbsp;&nbsp;&nbsp;</font><font color="#3f7f5f">//&nbsp;Parse&nbsp;a&nbsp;simple&nbsp;query&nbsp;that&nbsp;searches&nbsp;for&nbsp;&#34;text&#34;:</font><br />
-<font color="#ffffff">&nbsp;&nbsp;&nbsp;&nbsp;</font><font color="#000000">QueryParser&nbsp;parser&nbsp;=&nbsp;</font><font color="#7f0055"><b>new&nbsp;</b></font><font color="#000000">QueryParser</font><font color="#000000">(</font><font color="#2a00ff">&#34;fieldname&#34;</font><font color="#000000">,&nbsp;analyzer</font><font color="#000000">)</font><font color="#000000">;</font><br />
-<font color="#ffffff">&nbsp;&nbsp;&nbsp;&nbsp;</font><font color="#000000">Query&nbsp;query&nbsp;=&nbsp;parser.parse</font><font color="#000000">(</font><font color="#2a00ff">&#34;text&#34;</font><font color="#000000">)</font><font color="#000000">;</font><br />
-<font color="#ffffff">&nbsp;&nbsp;&nbsp;&nbsp;</font><font color="#000000">ScoreDoc</font><font color="#000000">[]&nbsp;</font><font color="#000000">hits&nbsp;=&nbsp;isearcher.search</font><font color="#000000">(</font><font color="#000000">query,&nbsp;null,&nbsp;</font><font color="#990000">1000</font><font color="#000000">)</font><font color="#000000">.scoreDocs;</font><br />
-<font color="#ffffff">&nbsp;&nbsp;&nbsp;&nbsp;</font><font color="#000000">assertEquals</font><font color="#000000">(</font><font color="#990000">1</font><font color="#000000">,&nbsp;hits.length</font><font color="#000000">)</font><font color="#000000">;</font><br />
-<font color="#ffffff">&nbsp;&nbsp;&nbsp;&nbsp;</font><font color="#3f7f5f">//&nbsp;Iterate&nbsp;through&nbsp;the&nbsp;results:</font><br />
-<font color="#ffffff">&nbsp;&nbsp;&nbsp;&nbsp;</font><font color="#7f0055"><b>for&nbsp;</b></font><font color="#000000">(</font><font color="#7f0055"><b>int&nbsp;</b></font><font color="#000000">i&nbsp;=&nbsp;</font><font color="#990000">0</font><font color="#000000">;&nbsp;i&nbsp;&lt;&nbsp;hits.length;&nbsp;i++</font><font color="#000000">)&nbsp;{</font><br />
-<font color="#ffffff">&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</font><font color="#000000">Document&nbsp;hitDoc&nbsp;=&nbsp;isearcher.doc</font><font color="#000000">(</font><font color="#000000">hits</font><font color="#000000">[</font><font color="#000000">i</font><font color="#000000">]</font><font color="#000000">.doc</font><font color="#000000">)</font><font color="#000000">;</font><br />
-<font color="#ffffff">&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</font><font color="#000000">assertEquals</font><font color="#000000">(</font><font color="#2a00ff">&#34;This&nbsp;is&nbsp;the&nbsp;text&nbsp;to&nbsp;be&nbsp;indexed.&#34;</font><font color="#000000">,&nbsp;hitDoc.get</font><font color="#000000">(</font><font color="#2a00ff">&#34;fieldname&#34;</font><font color="#000000">))</font><font color="#000000">;</font><br />
-<font color="#ffffff">&nbsp;&nbsp;&nbsp;&nbsp;</font><font color="#000000">}</font><br />
-<font color="#ffffff">&nbsp;&nbsp;&nbsp;&nbsp;</font><font color="#000000">isearcher.close</font><font color="#000000">()</font><font color="#000000">;</font><br />
-<font color="#ffffff">&nbsp;&nbsp;&nbsp;&nbsp;</font><font color="#000000">directory.close</font><font color="#000000">()</font><font color="#000000">;</font></code>
-    
-   </td>
-  <!-- end source code -->
-   </tr>
+<pre class="prettyprint">
+    Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_CURRENT);
 
-</table>
-</div>
+    // Store the index in memory:
+    Directory directory = new RAMDirectory();
+    // To store an index on disk, use this instead:
+    //Directory directory = FSDirectory.open("/tmp/testindex");
+    IndexWriter iwriter = new IndexWriter(directory, analyzer, true,
+                                          new IndexWriter.MaxFieldLength(25000));
+    Document doc = new Document();
+    String text = "This is the text to be indexed.";
+    doc.add(new Field("fieldname", text, Field.Store.YES,
+        Field.Index.ANALYZED));
+    iwriter.addDocument(doc);
+    iwriter.close();
+    
+    // Now search the index:
+    IndexSearcher isearcher = new IndexSearcher(directory, true); // read-only=true
+    // Parse a simple query that searches for "text":
+    QueryParser parser = new QueryParser("fieldname", analyzer);
+    Query query = parser.parse("text");
+    ScoreDoc[] hits = isearcher.search(query, null, 1000).scoreDocs;
+    assertEquals(1, hits.length);
+    // Iterate through the results:
+    for (int i = 0; i < hits.length; i++) {
+      Document hitDoc = isearcher.doc(hits[i].doc);
+      assertEquals("This is the text to be indexed.", hitDoc.get("fieldname"));
+    }
+    isearcher.close();
+    directory.close();</pre>
 <!-- =       END of automatically generated HTML code       = -->
 <!-- ======================================================== -->
 

Copied: lucene/dev/branches/bulkpostings/lucene/src/test-framework/org/apache/lucene/index/codecs/mocksep/MockSingleIntIndexInput.java (from r1068688, lucene/dev/trunk/lucene/src/test-framework/org/apache/lucene/index/codecs/mocksep/MockSingleIntIndexInput.java)
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test-framework/org/apache/lucene/index/codecs/mocksep/MockSingleIntIndexInput.java?p2=lucene/dev/branches/bulkpostings/lucene/src/test-framework/org/apache/lucene/index/codecs/mocksep/MockSingleIntIndexInput.java&p1=lucene/dev/trunk/lucene/src/test-framework/org/apache/lucene/index/codecs/mocksep/MockSingleIntIndexInput.java&r1=1068688&r2=1068718&rev=1068718&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test-framework/org/apache/lucene/index/codecs/mocksep/MockSingleIntIndexInput.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test-framework/org/apache/lucene/index/codecs/mocksep/MockSingleIntIndexInput.java Wed Feb  9 01:03:49 2011
@@ -24,6 +24,7 @@ import org.apache.lucene.store.DataInput
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IndexInput;
 import org.apache.lucene.util.CodecUtil;
+import org.apache.lucene.index.BulkPostingsEnum;
 
 /** Reads IndexInputs written with {@link
  *  SingleIntIndexOutput}.  NOTE: this class is just for
@@ -53,19 +54,41 @@ public class MockSingleIntIndexInput ext
     in.close();
   }
 
-  public static class Reader extends IntIndexInput.Reader {
+  public static class Reader extends BulkPostingsEnum.BlockReader {
     // clone:
     private final IndexInput in;
+    private int offset;
+    private final int[] buffer = new int[1];
 
     public Reader(IndexInput in) {
       this.in = in;
     }
 
-    /** Reads next single int */
     @Override
-    public int next() throws IOException {
-      //System.out.println("msii.next() fp=" + in.getFilePointer() + " vs " + in.length());
-      return in.readVInt();
+    public int[] getBuffer() {
+      return buffer;
+    }
+
+    @Override
+    public int offset() {
+      return offset;
+    }
+
+    @Override
+    public void setOffset(int offset) {
+      this.offset = offset;
+    }
+
+    @Override
+    public int end() {
+      return 1;
+    }
+
+    @Override
+    public int fill() throws IOException {
+      buffer[0] = in.readVInt();
+      offset = 0;
+      return 1;
     }
   }
   
@@ -83,12 +106,12 @@ public class MockSingleIntIndexInput ext
     }
 
     @Override
-    public void read(IntIndexInput.Reader indexIn, boolean absolute)
+    public void read(BulkPostingsEnum.BlockReader indexIn, boolean absolute)
       throws IOException {
       if (absolute) {
-        fp = indexIn.readVLong();
+        fp = readVLong(indexIn);
       } else {
-        fp += indexIn.readVLong();
+        fp += readVLong(indexIn);
       }
     }
 
@@ -98,8 +121,9 @@ public class MockSingleIntIndexInput ext
     }
 
     @Override
-    public void seek(IntIndexInput.Reader other) throws IOException {
+    public void seek(BulkPostingsEnum.BlockReader other) throws IOException {
       ((Reader) other).in.seek(fp);
+      other.fill();
     }
 
     @Override

Modified: lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/TestAssertions.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/TestAssertions.java?rev=1068718&r1=1068717&r2=1068718&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/TestAssertions.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/TestAssertions.java Wed Feb  9 01:03:49 2011
@@ -35,34 +35,45 @@ public class TestAssertions extends Luce
   }
   
   static class TestAnalyzer1 extends Analyzer {
+    @Override
     public final TokenStream tokenStream(String s, Reader r) { return null; }
+    @Override
     public final TokenStream reusableTokenStream(String s, Reader r) { return null; }
   }
 
   static final class TestAnalyzer2 extends Analyzer {
+    @Override
     public TokenStream tokenStream(String s, Reader r) { return null; }
+    @Override
     public TokenStream reusableTokenStream(String s, Reader r) { return null; }
   }
 
   static class TestAnalyzer3 extends Analyzer {
+    @Override
     public TokenStream tokenStream(String s, Reader r) { return null; }
+    @Override
     public TokenStream reusableTokenStream(String s, Reader r) { return null; }
   }
 
   static class TestAnalyzer4 extends Analyzer {
+    @Override
     public final TokenStream tokenStream(String s, Reader r) { return null; }
+    @Override
     public TokenStream reusableTokenStream(String s, Reader r) { return null; }
   }
 
   static class TestTokenStream1 extends TokenStream {
+    @Override
     public final boolean incrementToken() { return false; }
   }
 
   static final class TestTokenStream2 extends TokenStream {
+    @Override
     public boolean incrementToken() { return false; }
   }
 
   static class TestTokenStream3 extends TokenStream {
+    @Override
     public boolean incrementToken() { return false; }
   }
 

Modified: lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/TestExternalCodecs.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/TestExternalCodecs.java?rev=1068718&r1=1068717&r2=1068718&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/TestExternalCodecs.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/TestExternalCodecs.java Wed Feb  9 01:03:49 2011
@@ -63,6 +63,7 @@ public class TestExternalCodecs extends 
         return t2.length-t1.length;
       }
 
+      @Override
       public boolean equals(Object other) {
         return this == other;
       }
@@ -344,6 +345,7 @@ public class TestExternalCodecs extends 
         return ramField.termToDocs.get(current).totalTermFreq;
       }
 
+      @Override
       public DocsEnum docs(Bits skipDocs, DocsEnum reuse) {
         return new RAMDocsEnum(ramField.termToDocs.get(current), skipDocs);
       }
@@ -737,7 +739,8 @@ public class TestExternalCodecs extends 
     
     
     final int NUM_DOCS = 173;
-    Directory dir = newDirectory();
+    MockDirectoryWrapper dir = newDirectory();
+    dir.setCheckIndexOnClose(false); // we use a custom codec provider
     IndexWriter w = new IndexWriter(
         dir,
         newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, true, true)).
@@ -766,7 +769,7 @@ public class TestExternalCodecs extends 
     }
     w.deleteDocuments(new Term("id", "77"));
 
-    IndexReader r = IndexReader.open(w);
+    IndexReader r = IndexReader.open(w, true);
     IndexReader[] subs = r.getSequentialSubReaders();
     // test each segment
     for(int i=0;i<subs.length;i++) {
@@ -776,7 +779,7 @@ public class TestExternalCodecs extends 
     testTermsOrder(r);
     
     assertEquals(NUM_DOCS-1, r.numDocs());
-    IndexSearcher s = new IndexSearcher(r);
+    IndexSearcher s = newSearcher(r);
     assertEquals(NUM_DOCS-1, s.search(new TermQuery(new Term("field1", "standard")), 1).totalHits);
     assertEquals(NUM_DOCS-1, s.search(new TermQuery(new Term("field2", "pulsing")), 1).totalHits);
     r.close();
@@ -784,10 +787,10 @@ public class TestExternalCodecs extends 
 
     w.deleteDocuments(new Term("id", "44"));
     w.optimize();
-    r = IndexReader.open(w);
+    r = IndexReader.open(w, true);
     assertEquals(NUM_DOCS-2, r.maxDoc());
     assertEquals(NUM_DOCS-2, r.numDocs());
-    s = new IndexSearcher(r);
+    s = newSearcher(r);
     assertEquals(NUM_DOCS-2, s.search(new TermQuery(new Term("field1", "standard")), 1).totalHits);
     assertEquals(NUM_DOCS-2, s.search(new TermQuery(new Term("field2", "pulsing")), 1).totalHits);
     assertEquals(1, s.search(new TermQuery(new Term("id", "76")), 1).totalHits);

Modified: lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/TestMergeSchedulerExternal.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/TestMergeSchedulerExternal.java?rev=1068718&r1=1068717&r2=1068718&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/TestMergeSchedulerExternal.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/TestMergeSchedulerExternal.java Wed Feb  9 01:03:49 2011
@@ -91,8 +91,8 @@ public class TestMergeSchedulerExternal 
     
     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
         TEST_VERSION_CURRENT, new MockAnalyzer()).setMergeScheduler(new MyMergeScheduler())
-        .setMaxBufferedDocs(2).setRAMBufferSizeMB(
-            IndexWriterConfig.DISABLE_AUTO_FLUSH));
+        .setMaxBufferedDocs(2).setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH)
+        .setMergePolicy(newLogMergePolicy()));
     LogMergePolicy logMP = (LogMergePolicy) writer.getConfig().getMergePolicy();
     logMP.setMergeFactor(10);
     for(int i=0;i<20;i++)

Modified: lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/TestSearch.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/TestSearch.java?rev=1068718&r1=1068717&r2=1068718&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/TestSearch.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/TestSearch.java Wed Feb  9 01:03:49 2011
@@ -74,8 +74,11 @@ public class TestSearch extends LuceneTe
       Directory directory = newDirectory();
       Analyzer analyzer = new MockAnalyzer();
       IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
-      LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
-      lmp.setUseCompoundFile(useCompoundFile);
+      MergePolicy mp = conf.getMergePolicy();
+      if (mp instanceof LogMergePolicy) {
+        ((LogMergePolicy) mp).setUseCompoundFile(useCompoundFile);
+      }
+      
       IndexWriter writer = new IndexWriter(directory, conf);
 
       String[] docs = {
@@ -90,6 +93,7 @@ public class TestSearch extends LuceneTe
       for (int j = 0; j < docs.length; j++) {
         Document d = new Document();
         d.add(newField("contents", docs[j], Field.Store.YES, Field.Index.ANALYZED));
+        d.add(newField("id", ""+j, Field.Index.NOT_ANALYZED_NO_NORMS));
         writer.addDocument(d);
       }
       writer.close();
@@ -106,6 +110,10 @@ public class TestSearch extends LuceneTe
       };
       ScoreDoc[] hits = null;
 
+      Sort sort = new Sort(new SortField[] {
+          SortField.FIELD_SCORE,
+          new SortField("id", SortField.INT)});
+
       QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, "contents", analyzer);
       parser.setPhraseSlop(4);
       for (int j = 0; j < queries.length; j++) {
@@ -115,7 +123,7 @@ public class TestSearch extends LuceneTe
           System.out.println("TEST: query=" + query);
         }
 
-        hits = searcher.search(query, null, 1000).scoreDocs;
+        hits = searcher.search(query, null, 1000, sort).scoreDocs;
 
         out.println(hits.length + " total results");
         for (int i = 0 ; i < hits.length && i < 10; i++) {

Modified: lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/TestSearchForDuplicates.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/TestSearchForDuplicates.java?rev=1068718&r1=1068717&r2=1068718&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/TestSearchForDuplicates.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/TestSearchForDuplicates.java Wed Feb  9 01:03:49 2011
@@ -80,8 +80,10 @@ public class TestSearchForDuplicates ext
       Directory directory = newDirectory();
       Analyzer analyzer = new MockAnalyzer();
       IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
-      LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
-      lmp.setUseCompoundFile(useCompoundFiles);
+      final MergePolicy mp = conf.getMergePolicy();
+      if (mp instanceof LogMergePolicy) {
+        ((LogMergePolicy) mp).setUseCompoundFile(useCompoundFiles);
+      }
       IndexWriter writer = new IndexWriter(directory, conf);
       if (VERBOSE) {
         System.out.println("TEST: now build index");
@@ -93,9 +95,6 @@ public class TestSearchForDuplicates ext
       for (int j = 0; j < MAX_DOCS; j++) {
         Document d = new Document();
         d.add(newField(PRIORITY_FIELD, HIGH_PRIORITY, Field.Store.YES, Field.Index.ANALYZED));
-
-        // NOTE: this ID_FIELD produces no tokens since
-        // MockAnalyzer discards numbers
         d.add(newField(ID_FIELD, Integer.toString(j), Field.Store.YES, Field.Index.ANALYZED));
         writer.addDocument(d);
       }
@@ -112,7 +111,11 @@ public class TestSearchForDuplicates ext
         System.out.println("TEST: search query=" + query);
       }
 
-      ScoreDoc[] hits = searcher.search(query, null, MAX_DOCS).scoreDocs;
+      final Sort sort = new Sort(new SortField[] {
+          SortField.FIELD_SCORE,
+          new SortField(ID_FIELD, SortField.INT)});
+
+      ScoreDoc[] hits = searcher.search(query, null, MAX_DOCS, sort).scoreDocs;
       printHits(out, hits, searcher);
       checkHits(hits, MAX_DOCS, searcher);
 
@@ -127,7 +130,7 @@ public class TestSearchForDuplicates ext
       query = parser.parse(HIGH_PRIORITY + " OR " + MED_PRIORITY);
       out.println("Query: " + query.toString(PRIORITY_FIELD));
 
-      hits = searcher.search(query, null, MAX_DOCS).scoreDocs;
+      hits = searcher.search(query, null, MAX_DOCS, sort).scoreDocs;
       printHits(out, hits, searcher);
       checkHits(hits, MAX_DOCS, searcher);
 
@@ -149,7 +152,7 @@ public class TestSearchForDuplicates ext
   private void checkHits(ScoreDoc[] hits, int expectedCount, IndexSearcher searcher) throws IOException {
     assertEquals("total results", expectedCount, hits.length);
     for (int i = 0 ; i < hits.length; i++) {
-      if ( i < 10 || (i > 94 && i < 105) ) {
+      if (i < 10 || (i > 94 && i < 105) ) {
         Document d = searcher.doc(hits[i].doc);
         assertEquals("check " + i, String.valueOf(i), d.get(ID_FIELD));
       }

Modified: lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/analysis/tokenattributes/TestCharTermAttributeImpl.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/analysis/tokenattributes/TestCharTermAttributeImpl.java?rev=1068718&r1=1068717&r2=1068718&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/analysis/tokenattributes/TestCharTermAttributeImpl.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/analysis/tokenattributes/TestCharTermAttributeImpl.java Wed Feb  9 01:03:49 2011
@@ -227,6 +227,7 @@ public class TestCharTermAttributeImpl e
       public char charAt(int i) { return longTestString.charAt(i); }
       public int length() { return longTestString.length(); }
       public CharSequence subSequence(int start, int end) { return longTestString.subSequence(start, end); }
+      @Override
       public String toString() { return longTestString; }
     });
     assertEquals("4567890123456"+longTestString, t.toString());

Modified: lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/document/TestDocument.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/document/TestDocument.java?rev=1068718&r1=1068717&r2=1068718&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/document/TestDocument.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/document/TestDocument.java Wed Feb  9 01:03:49 2011
@@ -156,7 +156,7 @@ public class TestDocument extends Lucene
     writer.addDocument(makeDocumentWithFields());
     IndexReader reader = writer.getReader();
     
-    IndexSearcher searcher = new IndexSearcher(reader);
+    IndexSearcher searcher = newSearcher(reader);
     
     // search for something that does exists
     Query query = new TermQuery(new Term("keyword", "test1"));
@@ -238,7 +238,7 @@ public class TestDocument extends Lucene
     writer.addDocument(doc);
     
     IndexReader reader = writer.getReader();
-    IndexSearcher searcher = new IndexSearcher(reader);
+    IndexSearcher searcher = newSearcher(reader);
     
     Query query = new TermQuery(new Term("keyword", "test"));
     

Modified: lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestAddIndexes.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestAddIndexes.java?rev=1068718&r1=1068717&r2=1068718&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestAddIndexes.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestAddIndexes.java Wed Feb  9 01:03:49 2011
@@ -157,6 +157,7 @@ public class TestAddIndexes extends Luce
 
     setUpDirs(dir, aux);
     IndexWriter writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
+    writer.setInfoStream(VERBOSE ? System.out : null);
     writer.addIndexes(aux);
 
     // Adds 10 docs, then replaces them with another 10
@@ -452,6 +453,7 @@ public class TestAddIndexes extends Luce
             setMaxBufferedDocs(100).
             setMergePolicy(newLogMergePolicy(10))
     );
+    writer.setInfoStream(VERBOSE ? System.out : null);
     writer.addIndexes(aux);
     assertEquals(30, writer.maxDoc());
     assertEquals(3, writer.getSegmentCount());

Modified: lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestAtomicUpdate.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestAtomicUpdate.java?rev=1068718&r1=1068717&r2=1068718&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestAtomicUpdate.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestAtomicUpdate.java Wed Feb  9 01:03:49 2011
@@ -131,6 +131,7 @@ public class TestAtomicUpdate extends Lu
         .setMaxBufferedDocs(7);
     ((LogMergePolicy) conf.getMergePolicy()).setMergeFactor(3);
     IndexWriter writer = new MockIndexWriter(directory, conf);
+    writer.setInfoStream(VERBOSE ? System.out : null);
 
     // Establish a base index of 100 docs:
     for(int i=0;i<100;i++) {

Modified: lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java?rev=1068718&r1=1068717&r2=1068718&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java Wed Feb  9 01:03:49 2011
@@ -17,20 +17,13 @@ package org.apache.lucene.index;
  * limitations under the License.
  */
 
-import java.io.BufferedOutputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.File;
-import java.io.FileOutputStream;
 import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
 import java.io.PrintStream;
 import java.util.Arrays;
-import java.util.Enumeration;
 import java.util.List;
 import java.util.Random;
-import java.util.zip.ZipEntry;
-import java.util.zip.ZipFile;
 
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.document.Document;
@@ -78,39 +71,6 @@ public class TestBackwardsCompatibility 
   }
   */
 
-  /* Unzips zipName --> dirName, removing dirName
-     first */
-  public void unzip(File zipName, String destDirName) throws IOException {
-
-    ZipFile zipFile = new ZipFile(zipName);
-
-    Enumeration<? extends ZipEntry> entries = zipFile.entries();
-
-    String dirName = fullDir(destDirName);
-
-    File fileDir = new File(dirName);
-    rmDir(destDirName);
-
-    fileDir.mkdir();
-
-    while (entries.hasMoreElements()) {
-      ZipEntry entry = entries.nextElement();
-
-      InputStream in = zipFile.getInputStream(entry);
-      OutputStream out = new BufferedOutputStream(new FileOutputStream(new File(fileDir, entry.getName())));
-
-      byte[] buffer = new byte[8192];
-      int len;
-      while((len = in.read(buffer)) >= 0) {
-        out.write(buffer, 0, len);
-      }
-
-      in.close();
-      out.close();
-    }
-
-    zipFile.close();
-  }
 /*
   public void testCreateCFS() throws IOException {
     String dirName = "testindex.cfs";
@@ -153,10 +113,9 @@ public class TestBackwardsCompatibility 
       if (VERBOSE) {
         System.out.println("TEST: index " + unsupportedNames[i]);
       }
-      unzip(getDataFile("unsupported." + unsupportedNames[i] + ".zip"), unsupportedNames[i]);
-
-      String fullPath = fullDir(unsupportedNames[i]);
-      Directory dir = newFSDirectory(new File(fullPath));
+      File oldIndxeDir = _TestUtil.getTempDir(unsupportedNames[i]);
+      _TestUtil.unzip(getDataFile("unsupported." + unsupportedNames[i] + ".zip"), oldIndxeDir);
+      Directory dir = newFSDirectory(oldIndxeDir);
 
       IndexReader reader = null;
       IndexWriter writer = null;
@@ -200,7 +159,7 @@ public class TestBackwardsCompatibility 
       assertTrue(bos.toString().contains(IndexFormatTooOldException.class.getName()));
 
       dir.close();
-      rmDir(unsupportedNames[i]);
+      _TestUtil.rmDir(oldIndxeDir);
     }
   }
   
@@ -209,10 +168,9 @@ public class TestBackwardsCompatibility 
       if (VERBOSE) {
         System.out.println("\nTEST: index=" + oldNames[i]);
       }
-      unzip(getDataFile("index." + oldNames[i] + ".zip"), oldNames[i]);
-
-      String fullPath = fullDir(oldNames[i]);
-      Directory dir = newFSDirectory(new File(fullPath));
+      File oldIndxeDir = _TestUtil.getTempDir(oldNames[i]);
+      _TestUtil.unzip(getDataFile("index." + oldNames[i] + ".zip"), oldIndxeDir);
+      Directory dir = newFSDirectory(oldIndxeDir);
 
       IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(
           TEST_VERSION_CURRENT, new MockAnalyzer()));
@@ -223,15 +181,15 @@ public class TestBackwardsCompatibility 
       _TestUtil.checkIndex(dir);
       
       dir.close();
-      rmDir(oldNames[i]);
+      _TestUtil.rmDir(oldIndxeDir);
     }
   }
 
   public void testAddOldIndexes() throws IOException {
     for (String name : oldNames) {
-      unzip(getDataFile("index." + name + ".zip"), name);
-      String fullPath = fullDir(name);
-      Directory dir = newFSDirectory(new File(fullPath));
+      File oldIndxeDir = _TestUtil.getTempDir(name);
+      _TestUtil.unzip(getDataFile("index." + name + ".zip"), oldIndxeDir);
+      Directory dir = newFSDirectory(oldIndxeDir);
 
       Directory targetDir = newDirectory();
       IndexWriter w = new IndexWriter(targetDir, newIndexWriterConfig(
@@ -243,15 +201,15 @@ public class TestBackwardsCompatibility 
       
       dir.close();
       targetDir.close();
-      rmDir(name);
+      _TestUtil.rmDir(oldIndxeDir);
     }
   }
 
   public void testAddOldIndexesReader() throws IOException {
     for (String name : oldNames) {
-      unzip(getDataFile("index." + name + ".zip"), name);
-      String fullPath = fullDir(name);
-      Directory dir = newFSDirectory(new File(fullPath));
+      File oldIndxeDir = _TestUtil.getTempDir(name);
+      _TestUtil.unzip(getDataFile("index." + name + ".zip"), oldIndxeDir);
+      Directory dir = newFSDirectory(oldIndxeDir);
       IndexReader reader = IndexReader.open(dir);
       
       Directory targetDir = newDirectory();
@@ -265,23 +223,25 @@ public class TestBackwardsCompatibility 
       
       dir.close();
       targetDir.close();
-      rmDir(name);
+      _TestUtil.rmDir(oldIndxeDir);
     }
   }
 
   public void testSearchOldIndex() throws IOException {
     for(int i=0;i<oldNames.length;i++) {
-      unzip(getDataFile("index." + oldNames[i] + ".zip"), oldNames[i]);
-      searchIndex(oldNames[i], oldNames[i]);
-      rmDir(oldNames[i]);
+      File oldIndxeDir = _TestUtil.getTempDir(oldNames[i]);
+      _TestUtil.unzip(getDataFile("index." + oldNames[i] + ".zip"), oldIndxeDir);
+      searchIndex(oldIndxeDir, oldNames[i]);
+      _TestUtil.rmDir(oldIndxeDir);
     }
   }
 
   public void testIndexOldIndexNoAdds() throws IOException {
     for(int i=0;i<oldNames.length;i++) {
-      unzip(getDataFile("index." + oldNames[i] + ".zip"), oldNames[i]);
-      changeIndexNoAdds(random, oldNames[i]);
-      rmDir(oldNames[i]);
+      File oldIndxeDir = _TestUtil.getTempDir(oldNames[i]);
+      _TestUtil.unzip(getDataFile("index." + oldNames[i] + ".zip"), oldIndxeDir);
+      changeIndexNoAdds(random, oldIndxeDir);
+      _TestUtil.rmDir(oldIndxeDir);
     }
   }
 
@@ -290,9 +250,10 @@ public class TestBackwardsCompatibility 
       if (VERBOSE) {
         System.out.println("TEST: oldName=" + oldNames[i]);
       }
-      unzip(getDataFile("index." + oldNames[i] + ".zip"), oldNames[i]);
-      changeIndexWithAdds(random, oldNames[i]);
-      rmDir(oldNames[i]);
+      File oldIndxeDir = _TestUtil.getTempDir(oldNames[i]);
+      _TestUtil.unzip(getDataFile("index." + oldNames[i] + ".zip"), oldIndxeDir);
+      changeIndexWithAdds(random, oldIndxeDir, oldNames[i]);
+      _TestUtil.rmDir(oldIndxeDir);
     }
   }
 
@@ -305,13 +266,11 @@ public class TestBackwardsCompatibility 
     }
   }
 
-  public void searchIndex(String dirName, String oldName) throws IOException {
+  public void searchIndex(File indexDir, String oldName) throws IOException {
     //QueryParser parser = new QueryParser("contents", new MockAnalyzer());
     //Query query = parser.parse("handle:1");
 
-    dirName = fullDir(dirName);
-
-    Directory dir = newFSDirectory(new File(dirName));
+    Directory dir = newFSDirectory(indexDir);
     IndexSearcher searcher = new IndexSearcher(dir, true);
     IndexReader reader = searcher.getIndexReader();
 
@@ -343,7 +302,7 @@ public class TestBackwardsCompatibility 
         }
 
         TermFreqVector tfv = reader.getTermFreqVector(i, "utf8");
-        assertNotNull("docID=" + i + " index=" + dirName, tfv);
+        assertNotNull("docID=" + i + " index=" + indexDir.getName(), tfv);
         assertTrue(tfv instanceof TermPositionVector);
       } else
         // Only ID 7 is deleted
@@ -376,11 +335,9 @@ public class TestBackwardsCompatibility 
     return v0 - v1;
   }
 
-  public void changeIndexWithAdds(Random random, String dirName) throws IOException {
-    String origDirName = dirName;
-    dirName = fullDir(dirName);
+  public void changeIndexWithAdds(Random random, File oldIndexDir, String origOldName) throws IOException {
 
-    Directory dir = newFSDirectory(new File(dirName));
+    Directory dir = newFSDirectory(oldIndexDir);
     // open writer
     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
     writer.setInfoStream(VERBOSE ? System.out : null);
@@ -391,7 +348,7 @@ public class TestBackwardsCompatibility 
 
     // make sure writer sees right total -- writer seems not to know about deletes in .del?
     final int expected;
-    if (compare(origDirName, "24") < 0) {
+    if (compare(origOldName, "24") < 0) {
       expected = 44;
     } else {
       expected = 45;
@@ -409,7 +366,7 @@ public class TestBackwardsCompatibility 
 
     // make sure we can do delete & setNorm against this segment:
     IndexReader reader = IndexReader.open(dir, false);
-    searcher = new IndexSearcher(reader);
+    searcher = newSearcher(reader);
     Term searchTerm = new Term("id", "6");
     int delCount = reader.deleteDocuments(searchTerm);
     assertEquals("wrong delete count", 1, delCount);
@@ -442,11 +399,9 @@ public class TestBackwardsCompatibility 
     dir.close();
   }
 
-  public void changeIndexNoAdds(Random random, String dirName) throws IOException {
-
-    dirName = fullDir(dirName);
+  public void changeIndexNoAdds(Random random, File oldIndexDir) throws IOException {
 
-    Directory dir = newFSDirectory(new File(dirName));
+    Directory dir = newFSDirectory(oldIndexDir);
 
     // make sure searching sees right # hits
     IndexSearcher searcher = new IndexSearcher(dir, true);
@@ -489,13 +444,12 @@ public class TestBackwardsCompatibility 
     dir.close();
   }
 
-  public void createIndex(Random random, String dirName, boolean doCFS) throws IOException {
-
-    rmDir(dirName);
-
-    dirName = fullDir(dirName);
+  public File createIndex(Random random, String dirName, boolean doCFS) throws IOException {
 
-    Directory dir = newFSDirectory(new File(dirName));
+    File indexDir = _TestUtil.getTempDir(dirName);
+    _TestUtil.rmDir(indexDir);
+    Directory dir = newFSDirectory(indexDir);
+    
     IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(10);
     ((LogMergePolicy) conf.getMergePolicy()).setUseCompoundFile(doCFS);
     IndexWriter writer = new IndexWriter(dir, conf);
@@ -522,17 +476,21 @@ public class TestBackwardsCompatibility 
     // Set one norm so we get a .s0 file:
     reader.setNorm(21, "content", conf.getSimilarityProvider().get("content").encodeNormValue(1.5f));
     reader.close();
+    dir.close();
+    
+    return indexDir;
   }
 
   /* Verifies that the expected file names were produced */
 
   public void testExactFileNames() throws IOException {
 
-    String outputDir = "lucene.backwardscompat0.index";
-    rmDir(outputDir);
+    String outputDirName = "lucene.backwardscompat0.index";
+    File outputDir = _TestUtil.getTempDir(outputDirName);
+    _TestUtil.rmDir(outputDir);
 
     try {
-      Directory dir = newFSDirectory(new File(fullDir(outputDir)));
+      Directory dir = newFSDirectory(outputDir);
 
       LogMergePolicy mergePolicy = newLogMergePolicy(true, 10);
       mergePolicy.setNoCFSRatio(1); // This test expects all of its segments to be in CFS
@@ -595,7 +553,7 @@ public class TestBackwardsCompatibility 
       }
       dir.close();
     } finally {
-      rmDir(outputDir);
+      _TestUtil.rmDir(outputDir);
     }
   }
 
@@ -636,23 +594,6 @@ public class TestBackwardsCompatibility 
     writer.addDocument(doc);
   }
 
-  private void rmDir(String dir) throws IOException {
-    File fileDir = new File(fullDir(dir));
-    if (fileDir.exists()) {
-      File[] files = fileDir.listFiles();
-      if (files != null) {
-        for (int i = 0; i < files.length; i++) {
-          files[i].delete();
-        }
-      }
-      fileDir.delete();
-    }
-  }
-
-  public static String fullDir(String dirName) throws IOException {
-    return new File(TEMP_DIR, dirName).getCanonicalPath();
-  }
-
   private int countDocs(DocsEnum docs) throws IOException {
     int count = 0;
     while((docs.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
@@ -664,9 +605,9 @@ public class TestBackwardsCompatibility 
   // flex: test basics of TermsEnum api on non-flex index
   public void testNextIntoWrongField() throws Exception {
     for(int i=0;i<oldNames.length;i++) {
-      unzip(getDataFile("index." + oldNames[i] + ".zip"), oldNames[i]);
-      String fullPath = fullDir(oldNames[i]);
-      Directory dir = newFSDirectory(new File(fullPath));
+      File oldIndexDir = _TestUtil.getTempDir(oldNames[i]);
+    	_TestUtil.unzip(getDataFile("index." + oldNames[i] + ".zip"), oldIndexDir);
+      Directory dir = newFSDirectory(oldIndexDir);
       IndexReader r = IndexReader.open(dir);
       TermsEnum terms = MultiFields.getFields(r).terms("content").iterator();
       BytesRef t = terms.next();
@@ -703,16 +644,16 @@ public class TestBackwardsCompatibility 
 
       r.close();
       dir.close();
-      rmDir(oldNames[i]);
+      _TestUtil.rmDir(oldIndexDir);
     }
   }
   
   public void testNumericFields() throws Exception {
     for(int i=0;i<oldNames.length;i++) {
       
-      unzip(getDataFile("index." + oldNames[i] + ".zip"), oldNames[i]);
-      String fullPath = fullDir(oldNames[i]);
-      Directory dir = newFSDirectory(new File(fullPath));
+      File oldIndexDir = _TestUtil.getTempDir(oldNames[i]);
+      _TestUtil.unzip(getDataFile("index." + oldNames[i] + ".zip"), oldIndexDir);
+      Directory dir = newFSDirectory(oldIndexDir);
       IndexSearcher searcher = new IndexSearcher(dir, true);
       
       for (int id=10; id<15; id++) {
@@ -747,7 +688,7 @@ public class TestBackwardsCompatibility 
       
       searcher.close();
       dir.close();
-      rmDir(oldNames[i]);
+      _TestUtil.rmDir(oldIndexDir);
     }
   }
 

Modified: lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestCodecs.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestCodecs.java?rev=1068718&r1=1068717&r2=1068718&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestCodecs.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestCodecs.java Wed Feb  9 01:03:49 2011
@@ -362,7 +362,7 @@ public class TestCodecs extends LuceneTe
 
   private ScoreDoc[] search(final IndexWriter writer, final Query q, final int n) throws IOException {
     final IndexReader reader = writer.getReader();
-    final IndexSearcher searcher = new IndexSearcher(reader);
+    final IndexSearcher searcher = newSearcher(reader);
     try {
       return searcher.search(q, null, n).scoreDocs;
     }
@@ -589,7 +589,7 @@ public class TestCodecs extends LuceneTe
 
     final int termIndexInterval = _TestUtil.nextInt(random, 13, 27);
     final SegmentCodecs codecInfo = SegmentCodecs.build(fieldInfos, CodecProvider.getDefault());
-    final SegmentWriteState state = new SegmentWriteState(null, dir, SEGMENT, fieldInfos, 10000, termIndexInterval, codecInfo);
+    final SegmentWriteState state = new SegmentWriteState(null, dir, SEGMENT, fieldInfos, 10000, termIndexInterval, codecInfo, null);
 
     final FieldsConsumer consumer = state.segmentCodecs.codec().fieldsConsumer(state);
     Arrays.sort(fields);

Modified: lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java?rev=1068718&r1=1068717&r2=1068718&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java Wed Feb  9 01:03:49 2011
@@ -132,11 +132,15 @@ public class TestConcurrentMergeSchedule
     IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(
         TEST_VERSION_CURRENT, new MockAnalyzer())
         .setMergePolicy(mp));
+    writer.setInfoStream(VERBOSE ? System.out : null);
 
     Document doc = new Document();
     Field idField = newField("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
     doc.add(idField);
     for(int i=0;i<10;i++) {
+      if (VERBOSE) {
+        System.out.println("\nTEST: cycle");
+      }
       for(int j=0;j<100;j++) {
         idField.setValue(Integer.toString(i*100+j));
         writer.addDocument(doc);
@@ -144,6 +148,9 @@ public class TestConcurrentMergeSchedule
 
       int delID = i;
       while(delID < 100*(1+i)) {
+        if (VERBOSE) {
+          System.out.println("TEST: del " + delID);
+        }
         writer.deleteDocuments(new Term("id", ""+delID));
         delID += 10;
       }

Modified: lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestDeletionPolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestDeletionPolicy.java?rev=1068718&r1=1068717&r2=1068718&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestDeletionPolicy.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestDeletionPolicy.java Wed Feb  9 01:03:49 2011
@@ -119,6 +119,9 @@ public class TestDeletionPolicy extends 
     }
 
     public void onInit(List<? extends IndexCommit> commits) throws IOException {
+      if (VERBOSE) {
+        System.out.println("TEST: onInit");
+      }
       verifyCommitOrder(commits);
       numOnInit++;
       // do no deletions on init
@@ -126,6 +129,9 @@ public class TestDeletionPolicy extends 
     }
 
     public void onCommit(List<? extends IndexCommit> commits) throws IOException {
+      if (VERBOSE) {
+        System.out.println("TEST: onCommit");
+      }
       verifyCommitOrder(commits);
       doDeletes(commits, true);
     }
@@ -200,8 +206,10 @@ public class TestDeletionPolicy extends 
     IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
         new MockAnalyzer())
         .setIndexDeletionPolicy(policy);
-    LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
-    lmp.setUseCompoundFile(true);
+    MergePolicy mp = conf.getMergePolicy();
+    if (mp instanceof LogMergePolicy) {
+      ((LogMergePolicy) mp).setUseCompoundFile(true);
+    }
     IndexWriter writer = new IndexWriter(dir, conf);
     writer.close();
 
@@ -215,8 +223,10 @@ public class TestDeletionPolicy extends 
       conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
           new MockAnalyzer()).setOpenMode(
           OpenMode.APPEND).setIndexDeletionPolicy(policy);
-      lmp = (LogMergePolicy) conf.getMergePolicy();
-      lmp.setUseCompoundFile(true);
+      mp = conf.getMergePolicy();
+      if (mp instanceof LogMergePolicy) {
+        ((LogMergePolicy) mp).setUseCompoundFile(true);
+      }
       writer = new IndexWriter(dir, conf);
       for(int j=0;j<17;j++) {
         addDoc(writer);
@@ -280,6 +290,10 @@ public class TestDeletionPolicy extends 
   public void testKeepAllDeletionPolicy() throws IOException {
     for(int pass=0;pass<2;pass++) {
 
+      if (VERBOSE) {
+        System.out.println("TEST: cycle pass=" + pass);
+      }
+
       boolean useCompoundFile = (pass % 2) != 0;
 
       // Never deletes a commit
@@ -292,34 +306,48 @@ public class TestDeletionPolicy extends 
           TEST_VERSION_CURRENT, new MockAnalyzer())
           .setIndexDeletionPolicy(policy).setMaxBufferedDocs(10)
           .setMergeScheduler(new SerialMergeScheduler());
-      LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
-      lmp.setUseCompoundFile(useCompoundFile);
-      lmp.setMergeFactor(10);
+      MergePolicy mp = conf.getMergePolicy();
+      if (mp instanceof LogMergePolicy) {
+        ((LogMergePolicy) mp).setUseCompoundFile(useCompoundFile);
+      }
       IndexWriter writer = new IndexWriter(dir, conf);
       for(int i=0;i<107;i++) {
         addDoc(writer);
       }
       writer.close();
 
-      conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
-          new MockAnalyzer()).setOpenMode(
-          OpenMode.APPEND).setIndexDeletionPolicy(policy);
-      lmp = (LogMergePolicy) conf.getMergePolicy();
-      lmp.setUseCompoundFile(useCompoundFile);
-      writer = new IndexWriter(dir, conf);
-      writer.optimize();
-      writer.close();
-
-      assertEquals(1, policy.numOnInit);
+      final boolean isOptimized;
+      {
+        IndexReader r = IndexReader.open(dir);
+        isOptimized = r.isOptimized();
+        r.close();
+      }
+      if (!isOptimized) {
+        conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
+                                    new MockAnalyzer()).setOpenMode(
+                                                                    OpenMode.APPEND).setIndexDeletionPolicy(policy);
+        mp = conf.getMergePolicy();
+        if (mp instanceof LogMergePolicy) {
+          ((LogMergePolicy) mp).setUseCompoundFile(useCompoundFile);
+        }
+        if (VERBOSE) {
+          System.out.println("TEST: open writer for optimize");
+        }
+        writer = new IndexWriter(dir, conf);
+        writer.setInfoStream(VERBOSE ? System.out : null);
+        writer.optimize();
+        writer.close();
+      }
+      assertEquals(isOptimized ? 0:1, policy.numOnInit);
 
       // If we are not auto committing then there should
       // be exactly 2 commits (one per close above):
-      assertEquals(2, policy.numOnCommit);
+      assertEquals(1 + (isOptimized ? 0:1), policy.numOnCommit);
 
       // Test listCommits
       Collection<IndexCommit> commits = IndexReader.listCommits(dir);
       // 2 from closing writer
-      assertEquals(2, commits.size());
+      assertEquals(1 + (isOptimized ? 0:1), commits.size());
 
       // Make sure we can open a reader on each commit:
       for (final IndexCommit commit : commits) {
@@ -480,8 +508,10 @@ public class TestDeletionPolicy extends 
           TEST_VERSION_CURRENT, new MockAnalyzer())
           .setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy)
           .setMaxBufferedDocs(10);
-      LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
-      lmp.setUseCompoundFile(useCompoundFile);
+      MergePolicy mp = conf.getMergePolicy();
+      if (mp instanceof LogMergePolicy) {
+        ((LogMergePolicy) mp).setUseCompoundFile(useCompoundFile);
+      }
       IndexWriter writer = new IndexWriter(dir, conf);
       for(int i=0;i<107;i++) {
         addDoc(writer);
@@ -490,8 +520,10 @@ public class TestDeletionPolicy extends 
 
       conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())
           .setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy);
-      lmp = (LogMergePolicy) conf.getMergePolicy();
-      lmp.setUseCompoundFile(useCompoundFile);
+      mp = conf.getMergePolicy();
+      if (mp instanceof LogMergePolicy) {
+        ((LogMergePolicy) mp).setUseCompoundFile(true);
+      }
       writer = new IndexWriter(dir, conf);
       writer.optimize();
       writer.close();
@@ -529,8 +561,10 @@ public class TestDeletionPolicy extends 
             TEST_VERSION_CURRENT, new MockAnalyzer())
             .setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy)
             .setMaxBufferedDocs(10);
-        LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
-        lmp.setUseCompoundFile(useCompoundFile);
+        MergePolicy mp = conf.getMergePolicy();
+        if (mp instanceof LogMergePolicy) {
+          ((LogMergePolicy) mp).setUseCompoundFile(useCompoundFile);
+        }
         IndexWriter writer = new IndexWriter(dir, conf);
         for(int i=0;i<17;i++) {
           addDoc(writer);
@@ -585,47 +619,65 @@ public class TestDeletionPolicy extends 
       Directory dir = newDirectory();
       IndexWriterConfig conf = newIndexWriterConfig(
           TEST_VERSION_CURRENT, new MockAnalyzer())
-          .setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy);
-      LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
-      lmp.setUseCompoundFile(useCompoundFile);
+        .setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy).setMergePolicy(newInOrderLogMergePolicy());
+      MergePolicy mp = conf.getMergePolicy();
+      if (mp instanceof LogMergePolicy) {
+        ((LogMergePolicy) mp).setUseCompoundFile(useCompoundFile);
+      }
       IndexWriter writer = new IndexWriter(dir, conf);
       writer.close();
       Term searchTerm = new Term("content", "aaa");        
       Query query = new TermQuery(searchTerm);
 
       for(int i=0;i<N+1;i++) {
+        if (VERBOSE) {
+          System.out.println("\nTEST: cycle i=" + i);
+        }
         conf = newIndexWriterConfig(
             TEST_VERSION_CURRENT, new MockAnalyzer())
             .setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy);
-        lmp = (LogMergePolicy) conf.getMergePolicy();
-        lmp.setUseCompoundFile(useCompoundFile);
+        mp = conf.getMergePolicy();
+        if (mp instanceof LogMergePolicy) {
+          ((LogMergePolicy) mp).setUseCompoundFile(useCompoundFile);
+        }
         writer = new IndexWriter(dir, conf);
         for(int j=0;j<17;j++) {
           addDoc(writer);
         }
         // this is a commit
+        if (VERBOSE) {
+          System.out.println("TEST: close writer");
+        }
         writer.close();
         IndexReader reader = IndexReader.open(dir, policy, false);
         reader.deleteDocument(3*i+1);
         reader.setNorm(4*i+1, "content", conf.getSimilarityProvider().get("content").encodeNormValue(2.0F));
-        IndexSearcher searcher = new IndexSearcher(reader);
+        IndexSearcher searcher = newSearcher(reader);
         ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
         assertEquals(16*(1+i), hits.length);
         // this is a commit
+        if (VERBOSE) {
+          System.out.println("TEST: close reader numOnCommit=" + policy.numOnCommit);
+        }
         reader.close();
         searcher.close();
       }
       conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())
           .setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy);
-      lmp = (LogMergePolicy) conf.getMergePolicy();
-      lmp.setUseCompoundFile(useCompoundFile);
+      mp = conf.getMergePolicy();
+      if (mp instanceof LogMergePolicy) {
+        ((LogMergePolicy) mp).setUseCompoundFile(useCompoundFile);
+      }
+      IndexReader r = IndexReader.open(dir);
+      final boolean wasOptimized = r.isOptimized();
+      r.close();
       writer = new IndexWriter(dir, conf);
       writer.optimize();
       // this is a commit
       writer.close();
 
       assertEquals(2*(N+1)+1, policy.numOnInit);
-      assertEquals(2*(N+2), policy.numOnCommit);
+      assertEquals(2*(N+2) - (wasOptimized ? 1:0), policy.numOnCommit);
 
       IndexSearcher searcher = new IndexSearcher(dir, false);
       ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
@@ -644,7 +696,7 @@ public class TestDeletionPolicy extends 
 
           // Work backwards in commits on what the expected
           // count should be.
-          searcher = new IndexSearcher(reader);
+          searcher = newSearcher(reader);
           hits = searcher.search(query, null, 1000).scoreDocs;
           if (i > 1) {
             if (i % 2 == 0) {
@@ -692,8 +744,10 @@ public class TestDeletionPolicy extends 
           TEST_VERSION_CURRENT, new MockAnalyzer())
           .setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy)
           .setMaxBufferedDocs(10);
-      LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
-      lmp.setUseCompoundFile(useCompoundFile);
+      MergePolicy mp = conf.getMergePolicy();
+      if (mp instanceof LogMergePolicy) {
+        ((LogMergePolicy) mp).setUseCompoundFile(useCompoundFile);
+      }
       IndexWriter writer = new IndexWriter(dir, conf);
       writer.close();
       Term searchTerm = new Term("content", "aaa");        
@@ -705,8 +759,10 @@ public class TestDeletionPolicy extends 
             TEST_VERSION_CURRENT, new MockAnalyzer())
             .setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy)
             .setMaxBufferedDocs(10);
-        lmp = (LogMergePolicy) conf.getMergePolicy();
-        lmp.setUseCompoundFile(useCompoundFile);
+        mp = conf.getMergePolicy();
+        if (mp instanceof LogMergePolicy) {
+          ((LogMergePolicy) mp).setUseCompoundFile(useCompoundFile);
+        }
         writer = new IndexWriter(dir, conf);
         for(int j=0;j<17;j++) {
           addDoc(writer);
@@ -716,7 +772,7 @@ public class TestDeletionPolicy extends 
         IndexReader reader = IndexReader.open(dir, policy, false);
         reader.deleteDocument(3);
         reader.setNorm(5, "content", conf.getSimilarityProvider().get("content").encodeNormValue(2.0F));
-        IndexSearcher searcher = new IndexSearcher(reader);
+        IndexSearcher searcher = newSearcher(reader);
         ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
         assertEquals(16, hits.length);
         // this is a commit
@@ -751,7 +807,7 @@ public class TestDeletionPolicy extends 
 
           // Work backwards in commits on what the expected
           // count should be.
-          searcher = new IndexSearcher(reader);
+          searcher = newSearcher(reader);
           hits = searcher.search(query, null, 1000).scoreDocs;
           assertEquals(expectedCount, hits.length);
           searcher.close();

Modified: lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestDocsAndPositions.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestDocsAndPositions.java?rev=1068718&r1=1068717&r2=1068718&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestDocsAndPositions.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestDocsAndPositions.java Wed Feb  9 01:03:49 2011
@@ -36,6 +36,7 @@ public class TestDocsAndPositions extend
   private String fieldName;
   private boolean usePayload;
 
+  @Override
   public void setUp() throws Exception {
     super.setUp();
     fieldName = "field" + random.nextInt();
@@ -130,11 +131,11 @@ public class TestDocsAndPositions extend
    * random. All positions for that number are saved up front and compared to
    * the enums positions.
    */
-  public void testRandomPositons() throws IOException {
+  public void testRandomPositions() throws IOException {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random, dir,
         newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(
-            MockTokenizer.WHITESPACE, true, usePayload)));
+            MockTokenizer.WHITESPACE, true, usePayload)).setMergePolicy(newInOrderLogMergePolicy()));
     int numDocs = 131;
     int max = 1051;
     int term = random.nextInt(max);
@@ -150,6 +151,10 @@ public class TestDocsAndPositions extend
           positions.add(Integer.valueOf(j));
         }
       }
+      if (positions.size() == 0) {
+        builder.append(term);
+        positions.add(3049);
+      }
       doc.add(newField(fieldName, builder.toString(), Field.Store.YES,
           Field.Index.ANALYZED));
       positionsInDoc[i] = positions.toArray(new Integer[0]);
@@ -211,7 +216,7 @@ public class TestDocsAndPositions extend
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random, dir,
         newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(
-            MockTokenizer.WHITESPACE, true, usePayload)));
+                                                                    MockTokenizer.WHITESPACE, true, usePayload)).setMergePolicy(newInOrderLogMergePolicy()));
     int numDocs = 499;
     int max = 15678;
     int term = random.nextInt(max);

Modified: lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestFieldsReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestFieldsReader.java?rev=1068718&r1=1068717&r2=1068718&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestFieldsReader.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestFieldsReader.java Wed Feb  9 01:03:49 2011
@@ -51,7 +51,7 @@ public class TestFieldsReader extends Lu
     DocHelper.setupDoc(testDoc);
     fieldInfos.add(testDoc);
     dir = newDirectory();
-    IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer());
+    IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy());
     ((LogMergePolicy) conf.getMergePolicy()).setUseCompoundFile(false);
     IndexWriter writer = new IndexWriter(dir, conf);
     writer.addDocument(testDoc);
@@ -291,7 +291,7 @@ public class TestFieldsReader extends Lu
     Directory tmpDir = newFSDirectory(file);
     assertTrue(tmpDir != null);
 
-    IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE);
+    IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE).setMergePolicy(newLogMergePolicy());
     ((LogMergePolicy) conf.getMergePolicy()).setUseCompoundFile(false);
     IndexWriter writer = new IndexWriter(tmpDir, conf);
     writer.addDocument(testDoc);

Modified: lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestFilterIndexReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestFilterIndexReader.java?rev=1068718&r1=1068717&r2=1068718&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestFilterIndexReader.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestFilterIndexReader.java Wed Feb  9 01:03:49 2011
@@ -38,9 +38,11 @@ public class TestFilterIndexReader exten
       TestFields(Fields in) {
         super(in);
       }
+      @Override
       public FieldsEnum iterator() throws IOException {
         return new TestFieldsEnum(super.iterator());
       }
+      @Override
       public Terms terms(String field) throws IOException {
         return new TestTerms(super.terms(field));
       }
@@ -51,6 +53,7 @@ public class TestFilterIndexReader exten
         super(in);
       }
 
+      @Override
       public TermsEnum iterator() throws IOException {
         return new TestTermsEnum(super.iterator());
       }
@@ -61,6 +64,7 @@ public class TestFilterIndexReader exten
         super(in);
       }
 
+      @Override
       public TermsEnum terms() throws IOException {
         return new TestTermsEnum(super.terms());
       }

Modified: lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestIndexReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestIndexReader.java?rev=1068718&r1=1068717&r2=1068718&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestIndexReader.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestIndexReader.java Wed Feb  9 01:03:49 2011
@@ -371,7 +371,7 @@ public class TestIndexReader extends Luc
         Directory dir = newDirectory();
         byte[] bin = new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9};
         
-        IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+        IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newInOrderLogMergePolicy()));
         
         for (int i = 0; i < 10; i++) {
           addDoc(writer, "document number " + (i + 1));
@@ -380,7 +380,7 @@ public class TestIndexReader extends Luc
           addDocumentWithTermVectorFields(writer);
         }
         writer.close();
-        writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
+        writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND).setMergePolicy(newInOrderLogMergePolicy()));
         Document doc = new Document();
         doc.add(new Field("bin1", bin));
         doc.add(new Field("junk", "junk text", Field.Store.NO, Field.Index.ANALYZED));
@@ -417,7 +417,7 @@ public class TestIndexReader extends Luc
         // force optimize
 
 
-        writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
+        writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND).setMergePolicy(newInOrderLogMergePolicy()));
         writer.optimize();
         writer.close();
         reader = IndexReader.open(dir, false);
@@ -900,7 +900,7 @@ public class TestIndexReader extends Luc
 
       {
         IndexReader r = IndexReader.open(startDir);
-        IndexSearcher searcher = new IndexSearcher(r);
+        IndexSearcher searcher = newSearcher(r);
         ScoreDoc[] hits = null;
         try {
           hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
@@ -908,6 +908,7 @@ public class TestIndexReader extends Luc
           e.printStackTrace();
           fail("exception when init searching: " + e);
         }
+        searcher.close();
         r.close();
       }
 
@@ -996,15 +997,6 @@ public class TestIndexReader extends Luc
             }
           }
 
-          // Whether we succeeded or failed, check that all
-          // un-referenced files were in fact deleted (ie,
-          // we did not create garbage).  Just create a
-          // new IndexFileDeleter, have it delete
-          // unreferenced files, then verify that in fact
-          // no files were deleted:
-          IndexWriter.unlock(dir);
-          TestIndexWriter.assertNoUnreferencedFiles(dir, "reader.close() failed to delete unreferenced files");
-
           // Finally, verify index is not corrupt, and, if
           // we succeeded, we see all docs changed, and if
           // we failed, we see either all docs or no docs
@@ -1032,7 +1024,7 @@ public class TestIndexReader extends Luc
           }
           */
 
-          IndexSearcher searcher = new IndexSearcher(newReader);
+          IndexSearcher searcher = newSearcher(newReader);
           ScoreDoc[] hits = null;
           try {
             hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
@@ -1171,7 +1163,7 @@ public class TestIndexReader extends Luc
 
     public void testMultiReaderDeletes() throws Exception {
       Directory dir = newDirectory();
-      RandomIndexWriter w = new RandomIndexWriter(random, dir);
+      RandomIndexWriter w= new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newInOrderLogMergePolicy()));
       Document doc = new Document();
       doc.add(newField("f", "doctor", Field.Store.NO, Field.Index.NOT_ANALYZED));
       w.addDocument(doc);
@@ -1905,4 +1897,42 @@ public class TestIndexReader extends Luc
       dir.close();
     }
   }
+
+  // LUCENE-2474
+  public void testReaderFinishedListener() throws Exception {
+    Directory dir = newDirectory();
+    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy()));
+    ((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(3);
+    writer.setInfoStream(VERBOSE ? System.out : null);
+    writer.addDocument(new Document());
+    writer.commit();
+    writer.addDocument(new Document());
+    writer.commit();
+    final IndexReader reader = writer.getReader();
+    final int[] closeCount = new int[1];
+    final IndexReader.ReaderFinishedListener listener = new IndexReader.ReaderFinishedListener() {
+      public void finished(IndexReader reader) {
+        closeCount[0]++;
+      }
+    };
+
+    reader.addReaderFinishedListener(listener);
+
+    reader.close();
+
+    // Just the top reader
+    assertEquals(1, closeCount[0]);
+    writer.close();
+
+    // Now also the subs
+    assertEquals(3, closeCount[0]);
+
+    IndexReader reader2 = IndexReader.open(dir);
+    reader2.addReaderFinishedListener(listener);
+
+    closeCount[0] = 0;
+    reader2.close();
+    assertEquals(3, closeCount[0]);
+    dir.close();
+  }
 }

Modified: lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java?rev=1068718&r1=1068717&r2=1068718&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java Wed Feb  9 01:03:49 2011
@@ -239,7 +239,7 @@ public class TestIndexReaderCloneNorms e
   private void createIndex(Random random, Directory dir) throws IOException {
     IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(
         TEST_VERSION_CURRENT, anlzr).setOpenMode(OpenMode.CREATE)
-        .setMaxBufferedDocs(5).setSimilarityProvider(similarityOne));
+                                     .setMaxBufferedDocs(5).setSimilarityProvider(similarityOne).setMergePolicy(newLogMergePolicy()));
     LogMergePolicy lmp = (LogMergePolicy) iw.getConfig().getMergePolicy();
     lmp.setMergeFactor(3);
     lmp.setUseCompoundFile(true);
@@ -293,7 +293,7 @@ public class TestIndexReaderCloneNorms e
       throws IOException {
     IndexWriterConfig conf = newIndexWriterConfig(
             TEST_VERSION_CURRENT, anlzr).setOpenMode(OpenMode.APPEND)
-            .setMaxBufferedDocs(5).setSimilarityProvider(similarityOne);
+            .setMaxBufferedDocs(5).setSimilarityProvider(similarityOne).setMergePolicy(newLogMergePolicy());
     LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
     lmp.setMergeFactor(3);
     lmp.setUseCompoundFile(compound);

Modified: lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestIndexReaderReopen.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestIndexReaderReopen.java?rev=1068718&r1=1068717&r2=1068718&view=diff
==============================================================================
--- lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestIndexReaderReopen.java (original)
+++ lucene/dev/branches/bulkpostings/lucene/src/test/org/apache/lucene/index/TestIndexReaderReopen.java Wed Feb  9 01:03:49 2011
@@ -174,7 +174,7 @@ public class TestIndexReaderReopen exten
   private void doTestReopenWithCommit (Random random, Directory dir, boolean withReopen) throws IOException {
     IndexWriter iwriter = new IndexWriter(dir, newIndexWriterConfig(
         TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(
-        OpenMode.CREATE).setMergeScheduler(new SerialMergeScheduler()));
+                                                              OpenMode.CREATE).setMergeScheduler(new SerialMergeScheduler()).setMergePolicy(newInOrderLogMergePolicy()));
     iwriter.commit();
     IndexReader reader = IndexReader.open(dir, false);
     try {
@@ -773,14 +773,14 @@ public class TestIndexReaderReopen exten
                 // not synchronized
                 IndexReader refreshed = r.reopen();
                 
-                IndexSearcher searcher = new IndexSearcher(refreshed);
+                IndexSearcher searcher = newSearcher(refreshed);
                 ScoreDoc[] hits = searcher.search(
                     new TermQuery(new Term("field1", "a" + rnd.nextInt(refreshed.maxDoc()))),
                     null, 1000).scoreDocs;
                 if (hits.length > 0) {
                   searcher.doc(hits[0].doc);
                 }
-                
+                searcher.close();
                 if (refreshed != r) {
                   refreshed.close();
                 }