You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by bu...@apache.org on 2011/02/22 02:01:11 UTC

svn commit: r1073192 [15/32] - in /lucene/dev/branches/realtime_search: ./ dev-tools/eclipse/ dev-tools/idea/.idea/ dev-tools/idea/lucene/contrib/ant/ dev-tools/idea/lucene/contrib/demo/ dev-tools/idea/lucene/contrib/highlighter/ dev-tools/idea/lucene/...

Modified: lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestCodecs.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestCodecs.java?rev=1073192&r1=1073191&r2=1073192&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestCodecs.java (original)
+++ lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestCodecs.java Tue Feb 22 01:00:39 2011
@@ -23,14 +23,14 @@ import java.util.HashSet;
 
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.document.Document;
-import org.apache.lucene.document.Field.Store;
 import org.apache.lucene.document.Field;
+import org.apache.lucene.document.Field.Store;
 import org.apache.lucene.index.codecs.CodecProvider;
 import org.apache.lucene.index.codecs.FieldsConsumer;
 import org.apache.lucene.index.codecs.FieldsProducer;
 import org.apache.lucene.index.codecs.PostingsConsumer;
-import org.apache.lucene.index.codecs.TermsConsumer;
 import org.apache.lucene.index.codecs.TermStats;
+import org.apache.lucene.index.codecs.TermsConsumer;
 import org.apache.lucene.index.codecs.mocksep.MockSepCodec;
 import org.apache.lucene.index.codecs.preflex.PreFlexCodec;
 import org.apache.lucene.search.DocIdSetIterator;
@@ -238,9 +238,9 @@ public class TestCodecs extends LuceneTe
     final FieldData[] fields = new FieldData[] {field};
 
     final Directory dir = newDirectory();
+    FieldInfos clonedFieldInfos = (FieldInfos) fieldInfos.clone();
     this.write(fieldInfos, dir, fields, true);
-    final SegmentInfo si = new SegmentInfo(SEGMENT, 10000, dir, false, true, SegmentCodecs.build(fieldInfos, CodecProvider.getDefault()), fieldInfos.hasVectors());
-    si.setHasProx(false);
+    final SegmentInfo si = new SegmentInfo(SEGMENT, 10000, dir, false, SegmentCodecs.build(clonedFieldInfos, CodecProvider.getDefault()), clonedFieldInfos);
 
     final FieldsProducer reader = si.getSegmentCodecs().codec().fieldsProducer(new SegmentReadState(dir, si, fieldInfos, 64, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR));
 
@@ -289,8 +289,10 @@ public class TestCodecs extends LuceneTe
     if (VERBOSE) {
       System.out.println("TEST: now write postings");
     }
+
+    FieldInfos clonedFieldInfos = (FieldInfos) fieldInfos.clone();
     this.write(fieldInfos, dir, fields, false);
-    final SegmentInfo si = new SegmentInfo(SEGMENT, 10000, dir, false, true, SegmentCodecs.build(fieldInfos, CodecProvider.getDefault()), fieldInfos.hasVectors());
+    final SegmentInfo si = new SegmentInfo(SEGMENT, 10000, dir, false, SegmentCodecs.build(clonedFieldInfos, CodecProvider.getDefault()), clonedFieldInfos);
 
     if (VERBOSE) {
       System.out.println("TEST: now read postings");
@@ -362,7 +364,7 @@ public class TestCodecs extends LuceneTe
 
   private ScoreDoc[] search(final IndexWriter writer, final Query q, final int n) throws IOException {
     final IndexReader reader = writer.getReader();
-    final IndexSearcher searcher = new IndexSearcher(reader);
+    final IndexSearcher searcher = newSearcher(reader);
     try {
       return searcher.search(q, null, n).scoreDocs;
     }
@@ -440,7 +442,7 @@ public class TestCodecs extends LuceneTe
         final FieldData field = fields[TestCodecs.random.nextInt(fields.length)];
         final TermsEnum termsEnum = termsDict.terms(field.fieldInfo.name).iterator();
 
-        if (si.getSegmentCodecs().codecs[field.fieldInfo.codecId] instanceof PreFlexCodec) {
+        if (si.getSegmentCodecs().codecs[field.fieldInfo.getCodecId()] instanceof PreFlexCodec) {
           // code below expects unicode sort order
           continue;
         }
@@ -589,12 +591,12 @@ public class TestCodecs extends LuceneTe
 
     final int termIndexInterval = _TestUtil.nextInt(random, 13, 27);
     final SegmentCodecs codecInfo = SegmentCodecs.build(fieldInfos, CodecProvider.getDefault());
-    final SegmentWriteState state = new SegmentWriteState(null, dir, SEGMENT, fieldInfos, 10000, termIndexInterval, codecInfo);
+    final SegmentWriteState state = new SegmentWriteState(null, dir, SEGMENT, fieldInfos, 10000, termIndexInterval, codecInfo, null);
 
     final FieldsConsumer consumer = state.segmentCodecs.codec().fieldsConsumer(state);
     Arrays.sort(fields);
     for (final FieldData field : fields) {
-      if (!allowPreFlex && codecInfo.codecs[field.fieldInfo.codecId] instanceof PreFlexCodec) {
+      if (!allowPreFlex && codecInfo.codecs[field.fieldInfo.getCodecId()] instanceof PreFlexCodec) {
         // code below expects unicode sort order
         continue;
       }

Modified: lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java?rev=1073192&r1=1073191&r2=1073192&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java (original)
+++ lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java Tue Feb 22 01:00:39 2011
@@ -132,11 +132,15 @@ public class TestConcurrentMergeSchedule
     IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(
         TEST_VERSION_CURRENT, new MockAnalyzer())
         .setMergePolicy(mp));
+    writer.setInfoStream(VERBOSE ? System.out : null);
 
     Document doc = new Document();
     Field idField = newField("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
     doc.add(idField);
     for(int i=0;i<10;i++) {
+      if (VERBOSE) {
+        System.out.println("\nTEST: cycle");
+      }
       for(int j=0;j<100;j++) {
         idField.setValue(Integer.toString(i*100+j));
         writer.addDocument(doc);
@@ -144,6 +148,9 @@ public class TestConcurrentMergeSchedule
 
       int delID = i;
       while(delID < 100*(1+i)) {
+        if (VERBOSE) {
+          System.out.println("TEST: del " + delID);
+        }
         writer.deleteDocuments(new Term("id", ""+delID));
         delID += 10;
       }

Modified: lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestDeletionPolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestDeletionPolicy.java?rev=1073192&r1=1073191&r2=1073192&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestDeletionPolicy.java (original)
+++ lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestDeletionPolicy.java Tue Feb 22 01:00:39 2011
@@ -119,6 +119,9 @@ public class TestDeletionPolicy extends 
     }
 
     public void onInit(List<? extends IndexCommit> commits) throws IOException {
+      if (VERBOSE) {
+        System.out.println("TEST: onInit");
+      }
       verifyCommitOrder(commits);
       numOnInit++;
       // do no deletions on init
@@ -126,6 +129,9 @@ public class TestDeletionPolicy extends 
     }
 
     public void onCommit(List<? extends IndexCommit> commits) throws IOException {
+      if (VERBOSE) {
+        System.out.println("TEST: onCommit");
+      }
       verifyCommitOrder(commits);
       doDeletes(commits, true);
     }
@@ -200,8 +206,10 @@ public class TestDeletionPolicy extends 
     IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
         new MockAnalyzer())
         .setIndexDeletionPolicy(policy);
-    LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
-    lmp.setUseCompoundFile(true);
+    MergePolicy mp = conf.getMergePolicy();
+    if (mp instanceof LogMergePolicy) {
+      ((LogMergePolicy) mp).setUseCompoundFile(true);
+    }
     IndexWriter writer = new IndexWriter(dir, conf);
     writer.close();
 
@@ -215,8 +223,10 @@ public class TestDeletionPolicy extends 
       conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
           new MockAnalyzer()).setOpenMode(
           OpenMode.APPEND).setIndexDeletionPolicy(policy);
-      lmp = (LogMergePolicy) conf.getMergePolicy();
-      lmp.setUseCompoundFile(true);
+      mp = conf.getMergePolicy();
+      if (mp instanceof LogMergePolicy) {
+        ((LogMergePolicy) mp).setUseCompoundFile(true);
+      }
       writer = new IndexWriter(dir, conf);
       for(int j=0;j<17;j++) {
         addDoc(writer);
@@ -280,6 +290,10 @@ public class TestDeletionPolicy extends 
   public void testKeepAllDeletionPolicy() throws IOException {
     for(int pass=0;pass<2;pass++) {
 
+      if (VERBOSE) {
+        System.out.println("TEST: cycle pass=" + pass);
+      }
+
       boolean useCompoundFile = (pass % 2) != 0;
 
       // Never deletes a commit
@@ -292,34 +306,48 @@ public class TestDeletionPolicy extends 
           TEST_VERSION_CURRENT, new MockAnalyzer())
           .setIndexDeletionPolicy(policy).setMaxBufferedDocs(10)
           .setMergeScheduler(new SerialMergeScheduler());
-      LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
-      lmp.setUseCompoundFile(useCompoundFile);
-      lmp.setMergeFactor(10);
+      MergePolicy mp = conf.getMergePolicy();
+      if (mp instanceof LogMergePolicy) {
+        ((LogMergePolicy) mp).setUseCompoundFile(useCompoundFile);
+      }
       IndexWriter writer = new IndexWriter(dir, conf);
       for(int i=0;i<107;i++) {
         addDoc(writer);
       }
       writer.close();
 
-      conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
-          new MockAnalyzer()).setOpenMode(
-          OpenMode.APPEND).setIndexDeletionPolicy(policy);
-      lmp = (LogMergePolicy) conf.getMergePolicy();
-      lmp.setUseCompoundFile(useCompoundFile);
-      writer = new IndexWriter(dir, conf);
-      writer.optimize();
-      writer.close();
-
-      assertEquals(1, policy.numOnInit);
+      final boolean isOptimized;
+      {
+        IndexReader r = IndexReader.open(dir);
+        isOptimized = r.isOptimized();
+        r.close();
+      }
+      if (!isOptimized) {
+        conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
+                                    new MockAnalyzer()).setOpenMode(
+                                                                    OpenMode.APPEND).setIndexDeletionPolicy(policy);
+        mp = conf.getMergePolicy();
+        if (mp instanceof LogMergePolicy) {
+          ((LogMergePolicy) mp).setUseCompoundFile(useCompoundFile);
+        }
+        if (VERBOSE) {
+          System.out.println("TEST: open writer for optimize");
+        }
+        writer = new IndexWriter(dir, conf);
+        writer.setInfoStream(VERBOSE ? System.out : null);
+        writer.optimize();
+        writer.close();
+      }
+      assertEquals(isOptimized ? 0:1, policy.numOnInit);
 
       // If we are not auto committing then there should
       // be exactly 2 commits (one per close above):
-      assertEquals(2, policy.numOnCommit);
+      assertEquals(1 + (isOptimized ? 0:1), policy.numOnCommit);
 
       // Test listCommits
       Collection<IndexCommit> commits = IndexReader.listCommits(dir);
       // 2 from closing writer
-      assertEquals(2, commits.size());
+      assertEquals(1 + (isOptimized ? 0:1), commits.size());
 
       // Make sure we can open a reader on each commit:
       for (final IndexCommit commit : commits) {
@@ -480,8 +508,10 @@ public class TestDeletionPolicy extends 
           TEST_VERSION_CURRENT, new MockAnalyzer())
           .setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy)
           .setMaxBufferedDocs(10);
-      LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
-      lmp.setUseCompoundFile(useCompoundFile);
+      MergePolicy mp = conf.getMergePolicy();
+      if (mp instanceof LogMergePolicy) {
+        ((LogMergePolicy) mp).setUseCompoundFile(useCompoundFile);
+      }
       IndexWriter writer = new IndexWriter(dir, conf);
       for(int i=0;i<107;i++) {
         addDoc(writer);
@@ -490,8 +520,10 @@ public class TestDeletionPolicy extends 
 
       conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())
           .setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy);
-      lmp = (LogMergePolicy) conf.getMergePolicy();
-      lmp.setUseCompoundFile(useCompoundFile);
+      mp = conf.getMergePolicy();
+      if (mp instanceof LogMergePolicy) {
+        ((LogMergePolicy) mp).setUseCompoundFile(true);
+      }
       writer = new IndexWriter(dir, conf);
       writer.optimize();
       writer.close();
@@ -529,8 +561,10 @@ public class TestDeletionPolicy extends 
             TEST_VERSION_CURRENT, new MockAnalyzer())
             .setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy)
             .setMaxBufferedDocs(10);
-        LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
-        lmp.setUseCompoundFile(useCompoundFile);
+        MergePolicy mp = conf.getMergePolicy();
+        if (mp instanceof LogMergePolicy) {
+          ((LogMergePolicy) mp).setUseCompoundFile(useCompoundFile);
+        }
         IndexWriter writer = new IndexWriter(dir, conf);
         for(int i=0;i<17;i++) {
           addDoc(writer);
@@ -585,47 +619,65 @@ public class TestDeletionPolicy extends 
       Directory dir = newDirectory();
       IndexWriterConfig conf = newIndexWriterConfig(
           TEST_VERSION_CURRENT, new MockAnalyzer())
-          .setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy);
-      LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
-      lmp.setUseCompoundFile(useCompoundFile);
+        .setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy).setMergePolicy(newInOrderLogMergePolicy());
+      MergePolicy mp = conf.getMergePolicy();
+      if (mp instanceof LogMergePolicy) {
+        ((LogMergePolicy) mp).setUseCompoundFile(useCompoundFile);
+      }
       IndexWriter writer = new IndexWriter(dir, conf);
       writer.close();
       Term searchTerm = new Term("content", "aaa");
       Query query = new TermQuery(searchTerm);
 
       for(int i=0;i<N+1;i++) {
+        if (VERBOSE) {
+          System.out.println("\nTEST: cycle i=" + i);
+        }
         conf = newIndexWriterConfig(
             TEST_VERSION_CURRENT, new MockAnalyzer())
             .setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy);
-        lmp = (LogMergePolicy) conf.getMergePolicy();
-        lmp.setUseCompoundFile(useCompoundFile);
+        mp = conf.getMergePolicy();
+        if (mp instanceof LogMergePolicy) {
+          ((LogMergePolicy) mp).setUseCompoundFile(useCompoundFile);
+        }
         writer = new IndexWriter(dir, conf);
         for(int j=0;j<17;j++) {
           addDoc(writer);
         }
         // this is a commit
+        if (VERBOSE) {
+          System.out.println("TEST: close writer");
+        }
         writer.close();
         IndexReader reader = IndexReader.open(dir, policy, false);
         reader.deleteDocument(3*i+1);
         reader.setNorm(4*i+1, "content", conf.getSimilarityProvider().get("content").encodeNormValue(2.0F));
-        IndexSearcher searcher = new IndexSearcher(reader);
+        IndexSearcher searcher = newSearcher(reader);
         ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
         assertEquals(16*(1+i), hits.length);
         // this is a commit
+        if (VERBOSE) {
+          System.out.println("TEST: close reader numOnCommit=" + policy.numOnCommit);
+        }
         reader.close();
         searcher.close();
       }
       conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())
           .setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy);
-      lmp = (LogMergePolicy) conf.getMergePolicy();
-      lmp.setUseCompoundFile(useCompoundFile);
+      mp = conf.getMergePolicy();
+      if (mp instanceof LogMergePolicy) {
+        ((LogMergePolicy) mp).setUseCompoundFile(useCompoundFile);
+      }
+      IndexReader r = IndexReader.open(dir);
+      final boolean wasOptimized = r.isOptimized();
+      r.close();
       writer = new IndexWriter(dir, conf);
       writer.optimize();
       // this is a commit
       writer.close();
 
       assertEquals(2*(N+1)+1, policy.numOnInit);
-      assertEquals(2*(N+2), policy.numOnCommit);
+      assertEquals(2*(N+2) - (wasOptimized ? 1:0), policy.numOnCommit);
 
       IndexSearcher searcher = new IndexSearcher(dir, false);
       ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
@@ -644,7 +696,7 @@ public class TestDeletionPolicy extends 
 
           // Work backwards in commits on what the expected
           // count should be.
-          searcher = new IndexSearcher(reader);
+          searcher = newSearcher(reader);
           hits = searcher.search(query, null, 1000).scoreDocs;
           if (i > 1) {
             if (i % 2 == 0) {
@@ -692,8 +744,10 @@ public class TestDeletionPolicy extends 
           TEST_VERSION_CURRENT, new MockAnalyzer())
           .setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy)
           .setMaxBufferedDocs(10);
-      LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
-      lmp.setUseCompoundFile(useCompoundFile);
+      MergePolicy mp = conf.getMergePolicy();
+      if (mp instanceof LogMergePolicy) {
+        ((LogMergePolicy) mp).setUseCompoundFile(useCompoundFile);
+      }
       IndexWriter writer = new IndexWriter(dir, conf);
       writer.close();
       Term searchTerm = new Term("content", "aaa");
@@ -705,8 +759,10 @@ public class TestDeletionPolicy extends 
             TEST_VERSION_CURRENT, new MockAnalyzer())
             .setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy)
             .setMaxBufferedDocs(10);
-        lmp = (LogMergePolicy) conf.getMergePolicy();
-        lmp.setUseCompoundFile(useCompoundFile);
+        mp = conf.getMergePolicy();
+        if (mp instanceof LogMergePolicy) {
+          ((LogMergePolicy) mp).setUseCompoundFile(useCompoundFile);
+        }
         writer = new IndexWriter(dir, conf);
         for(int j=0;j<17;j++) {
           addDoc(writer);
@@ -716,7 +772,7 @@ public class TestDeletionPolicy extends 
         IndexReader reader = IndexReader.open(dir, policy, false);
         reader.deleteDocument(3);
         reader.setNorm(5, "content", conf.getSimilarityProvider().get("content").encodeNormValue(2.0F));
-        IndexSearcher searcher = new IndexSearcher(reader);
+        IndexSearcher searcher = newSearcher(reader);
         ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
         assertEquals(16, hits.length);
         // this is a commit
@@ -751,7 +807,7 @@ public class TestDeletionPolicy extends 
 
           // Work backwards in commits on what the expected
           // count should be.
-          searcher = new IndexSearcher(reader);
+          searcher = newSearcher(reader);
           hits = searcher.search(query, null, 1000).scoreDocs;
           assertEquals(expectedCount, hits.length);
           searcher.close();

Modified: lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestDoc.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestDoc.java?rev=1073192&r1=1073191&r2=1073192&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestDoc.java (original)
+++ lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestDoc.java Tue Feb 22 01:00:39 2011
@@ -57,6 +57,9 @@ public class TestDoc extends LuceneTestC
     @Override
     public void setUp() throws Exception {
         super.setUp();
+        if (VERBOSE) {
+          System.out.println("TEST: setUp");
+        }
         workDir = new File(TEMP_DIR,"TestDoc");
         workDir.mkdirs();
 
@@ -201,8 +204,8 @@ public class TestDoc extends LuceneTestC
       r2.close();
 
       final SegmentInfo info = new SegmentInfo(merged, si1.docCount + si2.docCount, si1.dir,
-                                               false, merger.fieldInfos().hasProx(), merger.getSegmentCodecs(),
-                                               merger.fieldInfos().hasVectors());
+                                               false, merger.getSegmentCodecs(),
+                                               merger.fieldInfos());
 
       if (useCompoundFile) {
         Collection<String> filesToDelete = merger.createCompoundFile(merged + ".cfs", info);

Added: lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestDocsAndPositions.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestDocsAndPositions.java?rev=1073192&view=auto
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestDocsAndPositions.java (added)
+++ lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestDocsAndPositions.java Tue Feb 22 01:00:39 2011
@@ -0,0 +1,332 @@
+package org.apache.lucene.index;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+
+import org.apache.lucene.analysis.MockAnalyzer;
+import org.apache.lucene.analysis.MockTokenizer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.index.IndexReader.AtomicReaderContext;
+import org.apache.lucene.index.IndexReader.ReaderContext;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.Bits;
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util.ReaderUtil;
+
+public class TestDocsAndPositions extends LuceneTestCase {
+  private String fieldName;
+  private boolean usePayload;
+
+  @Override
+  public void setUp() throws Exception {
+    super.setUp();
+    fieldName = "field" + random.nextInt();
+    usePayload = random.nextBoolean();
+  }
+
+  /**
+   * Simple testcase for {@link DocsAndPositionsEnum}
+   */
+  public void testPositionsSimple() throws IOException {
+    Directory directory = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random, directory,
+        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(
+            MockTokenizer.WHITESPACE, true, usePayload)));
+    for (int i = 0; i < 39; i++) {
+      Document doc = new Document();
+      doc.add(newField(fieldName, "1 2 3 4 5 6 7 8 9 10 "
+          + "1 2 3 4 5 6 7 8 9 10 " + "1 2 3 4 5 6 7 8 9 10 "
+          + "1 2 3 4 5 6 7 8 9 10", Field.Store.YES, Field.Index.ANALYZED));
+      writer.addDocument(doc);
+    }
+    IndexReader reader = writer.getReader();
+    writer.close();
+
+    for (int i = 0; i < 39 * RANDOM_MULTIPLIER; i++) {
+      BytesRef bytes = new BytesRef("1");
+      ReaderContext topReaderContext = reader.getTopReaderContext();
+      AtomicReaderContext[] leaves = ReaderUtil.leaves(topReaderContext);
+      for (AtomicReaderContext atomicReaderContext : leaves) {
+        DocsAndPositionsEnum docsAndPosEnum = getDocsAndPositions(
+            atomicReaderContext.reader, bytes, null);
+        assertNotNull(docsAndPosEnum);
+        if (atomicReaderContext.reader.maxDoc() == 0) {
+          continue;
+        }
+        final int advance = docsAndPosEnum.advance(random.nextInt(atomicReaderContext.reader.maxDoc()));
+        do {
+          String msg = "Advanced to: " + advance + " current doc: "
+              + docsAndPosEnum.docID() + " usePayloads: " + usePayload;
+          assertEquals(msg, 4, docsAndPosEnum.freq());
+          assertEquals(msg, 0, docsAndPosEnum.nextPosition());
+          assertEquals(msg, 4, docsAndPosEnum.freq());
+          assertEquals(msg, 10, docsAndPosEnum.nextPosition());
+          assertEquals(msg, 4, docsAndPosEnum.freq());
+          assertEquals(msg, 20, docsAndPosEnum.nextPosition());
+          assertEquals(msg, 4, docsAndPosEnum.freq());
+          assertEquals(msg, 30, docsAndPosEnum.nextPosition());
+        } while (docsAndPosEnum.nextDoc() != DocsAndPositionsEnum.NO_MORE_DOCS);
+      }
+    }
+    reader.close();
+    directory.close();
+  }
+
+  public DocsAndPositionsEnum getDocsAndPositions(IndexReader reader,
+      BytesRef bytes, Bits skipDocs) throws IOException {
+      return reader.termPositionsEnum(null, fieldName, bytes);
+  }
+
+  public DocsEnum getDocsEnum(IndexReader reader, BytesRef bytes,
+      boolean freqs, Bits skipDocs) throws IOException {
+    int randInt = random.nextInt(10);
+    if (randInt == 0) { // once in a while throw in a positions enum
+      return getDocsAndPositions(reader, bytes, skipDocs);
+    } else {
+      return reader.termDocsEnum(skipDocs, fieldName, bytes);
+    } 
+  }
+
+  /**
+   * this test indexes random numbers within a range into a field and checks
+   * their occurrences by searching for a number from that range selected at
+   * random. All positions for that number are saved up front and compared to
+   * the enums positions.
+   */
+  public void testRandomPositions() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random, dir,
+        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(
+            MockTokenizer.WHITESPACE, true, usePayload)).setMergePolicy(newInOrderLogMergePolicy()));
+    int numDocs = 131;
+    int max = 1051;
+    int term = random.nextInt(max);
+    Integer[][] positionsInDoc = new Integer[numDocs][];
+    for (int i = 0; i < numDocs; i++) {
+      Document doc = new Document();
+      ArrayList<Integer> positions = new ArrayList<Integer>();
+      StringBuilder builder = new StringBuilder();
+      for (int j = 0; j < 3049; j++) {
+        int nextInt = random.nextInt(max);
+        builder.append(nextInt).append(" ");
+        if (nextInt == term) {
+          positions.add(Integer.valueOf(j));
+        }
+      }
+      if (positions.size() == 0) {
+        builder.append(term);
+        positions.add(3049);
+      }
+      doc.add(newField(fieldName, builder.toString(), Field.Store.YES,
+          Field.Index.ANALYZED));
+      positionsInDoc[i] = positions.toArray(new Integer[0]);
+      writer.addDocument(doc);
+    }
+
+    IndexReader reader = writer.getReader();
+    writer.close();
+
+    for (int i = 0; i < 39 * RANDOM_MULTIPLIER; i++) {
+      BytesRef bytes = new BytesRef("" + term);
+      ReaderContext topReaderContext = reader.getTopReaderContext();
+      AtomicReaderContext[] leaves = ReaderUtil.leaves(topReaderContext);
+      for (AtomicReaderContext atomicReaderContext : leaves) {
+        DocsAndPositionsEnum docsAndPosEnum = getDocsAndPositions(
+            atomicReaderContext.reader, bytes, null);
+        assertNotNull(docsAndPosEnum);
+        int initDoc = 0;
+        int maxDoc = atomicReaderContext.reader.maxDoc();
+        // initially advance or do next doc
+        if (random.nextBoolean()) {
+          initDoc = docsAndPosEnum.nextDoc();
+        } else {
+          initDoc = docsAndPosEnum.advance(random.nextInt(maxDoc));
+        }
+        // now run through the scorer and check if all positions are there...
+        do {
+          int docID = docsAndPosEnum.docID();
+          if (docID == DocsAndPositionsEnum.NO_MORE_DOCS) {
+            break;
+          }
+          Integer[] pos = positionsInDoc[atomicReaderContext.docBase + docID];
+          assertEquals(pos.length, docsAndPosEnum.freq());
+          // number of positions read should be random - don't read all of them
+          // allways
+          final int howMany = random.nextInt(20) == 0 ? pos.length
+              - random.nextInt(pos.length) : pos.length;
+          for (int j = 0; j < howMany; j++) {
+            assertEquals("iteration: " + i + " initDoc: " + initDoc + " doc: "
+                + docID + " base: " + atomicReaderContext.docBase
+                + " positions: " + Arrays.toString(pos) + " usePayloads: "
+                + usePayload, pos[j].intValue(), docsAndPosEnum.nextPosition());
+          }
+
+          if (random.nextInt(10) == 0) { // once is a while advance
+            docsAndPosEnum
+                .advance(docID + 1 + random.nextInt((maxDoc - docID)));
+          }
+
+        } while (docsAndPosEnum.nextDoc() != DocsAndPositionsEnum.NO_MORE_DOCS);
+      }
+
+    }
+    reader.close();
+    dir.close();
+  }
+
+  public void testRandomDocs() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random, dir,
+        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(
+                                                                    MockTokenizer.WHITESPACE, true, usePayload)).setMergePolicy(newInOrderLogMergePolicy()));
+    int numDocs = 499;
+    int max = 15678;
+    int term = random.nextInt(max);
+    int[] freqInDoc = new int[numDocs];
+    for (int i = 0; i < numDocs; i++) {
+      Document doc = new Document();
+      StringBuilder builder = new StringBuilder();
+      for (int j = 0; j < 199; j++) {
+        int nextInt = random.nextInt(max);
+        builder.append(nextInt).append(" ");
+        if (nextInt == term) {
+          freqInDoc[i]++;
+        }
+      }
+      doc.add(newField(fieldName, builder.toString(), Field.Store.YES,
+          Field.Index.ANALYZED));
+      writer.addDocument(doc);
+    }
+
+    IndexReader reader = writer.getReader();
+    writer.close();
+
+    for (int i = 0; i < 39 * RANDOM_MULTIPLIER; i++) {
+      BytesRef bytes = new BytesRef("" + term);
+      ReaderContext topReaderContext = reader.getTopReaderContext();
+      AtomicReaderContext[] leaves = ReaderUtil.leaves(topReaderContext);
+      for (AtomicReaderContext context : leaves) {
+        int maxDoc = context.reader.maxDoc();
+        DocsEnum docsAndPosEnum = getDocsEnum(context.reader, bytes, true, null);
+        if (findNext(freqInDoc, context.docBase, context.docBase + maxDoc) == Integer.MAX_VALUE) {
+          assertNull(docsAndPosEnum);
+          continue;
+        }
+        assertNotNull(docsAndPosEnum);
+        docsAndPosEnum.nextDoc();
+        for (int j = 0; j < maxDoc; j++) {
+          if (freqInDoc[context.docBase + j] != 0) {
+            assertEquals(j, docsAndPosEnum.docID());
+            assertEquals(docsAndPosEnum.freq(), freqInDoc[context.docBase +j]);
+            if (i % 2 == 0 && random.nextInt(10) == 0) {
+              int next = findNext(freqInDoc, context.docBase+j+1, context.docBase + maxDoc) - context.docBase;
+              int advancedTo = docsAndPosEnum.advance(next);
+              if (next >= maxDoc) {
+                assertEquals(DocsEnum.NO_MORE_DOCS, advancedTo);
+              } else {
+                assertTrue("advanced to: " +advancedTo + " but should be <= " + next, next >= advancedTo);  
+              }
+            } else {
+              docsAndPosEnum.nextDoc();
+            }
+          } 
+        }
+        assertEquals("docBase: " + context.docBase + " maxDoc: " + maxDoc + " " + docsAndPosEnum.getClass(), DocsEnum.NO_MORE_DOCS, docsAndPosEnum.docID());
+      }
+      
+    }
+
+    reader.close();
+    dir.close();
+  }
+  
+  private static int findNext(int[] docs, int pos, int max) {
+    for (int i = pos; i < max; i++) {
+      if( docs[i] != 0) {
+        return i;
+      }
+    }
+    return Integer.MAX_VALUE;
+  }
+
+  /**
+   * tests retrieval of positions for terms that have a large number of
+   * occurrences to force test of buffer refill during positions iteration.
+   */
+  public void testLargeNumberOfPositions() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random, dir,
+        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(
+            MockTokenizer.WHITESPACE, true, usePayload)));
+    int howMany = 1000;
+    for (int i = 0; i < 39; i++) {
+      Document doc = new Document();
+      StringBuilder builder = new StringBuilder();
+      for (int j = 0; j < howMany; j++) {
+        if (j % 2 == 0) {
+          builder.append("even ");
+        } else {
+          builder.append("odd ");
+        }
+      }
+      doc.add(newField(fieldName, builder.toString(), Field.Store.YES,
+          Field.Index.ANALYZED));
+      writer.addDocument(doc);
+    }
+
+    // now do seaches
+    IndexReader reader = writer.getReader();
+    writer.close();
+
+    for (int i = 0; i < 39 * RANDOM_MULTIPLIER; i++) {
+      BytesRef bytes = new BytesRef("even");
+
+      ReaderContext topReaderContext = reader.getTopReaderContext();
+      AtomicReaderContext[] leaves = ReaderUtil.leaves(topReaderContext);
+      for (AtomicReaderContext atomicReaderContext : leaves) {
+        DocsAndPositionsEnum docsAndPosEnum = getDocsAndPositions(
+            atomicReaderContext.reader, bytes, null);
+        assertNotNull(docsAndPosEnum);
+
+        int initDoc = 0;
+        int maxDoc = atomicReaderContext.reader.maxDoc();
+        // initially advance or do next doc
+        if (random.nextBoolean()) {
+          initDoc = docsAndPosEnum.nextDoc();
+        } else {
+          initDoc = docsAndPosEnum.advance(random.nextInt(maxDoc));
+        }
+        String msg = "Iteration: " + i + " initDoc: " + initDoc + " payloads: "
+            + usePayload;
+        assertEquals(howMany / 2, docsAndPosEnum.freq());
+        for (int j = 0; j < howMany; j += 2) {
+          assertEquals("position missmatch index: " + j + " with freq: "
+              + docsAndPosEnum.freq() + " -- " + msg, j,
+              docsAndPosEnum.nextPosition());
+        }
+      }
+    }
+    reader.close();
+    dir.close();
+  }
+
+}

Modified: lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestDocumentWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestDocumentWriter.java?rev=1073192&r1=1073191&r2=1073192&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestDocumentWriter.java (original)
+++ lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestDocumentWriter.java Tue Feb 22 01:00:39 2011
@@ -25,20 +25,20 @@ import org.apache.lucene.analysis.MockAn
 import org.apache.lucene.analysis.MockTokenizer;
 import org.apache.lucene.analysis.TokenFilter;
 import org.apache.lucene.analysis.TokenStream;
+import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
 import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
 import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
-import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
-import org.apache.lucene.document.Fieldable;
 import org.apache.lucene.document.Field.Index;
 import org.apache.lucene.document.Field.Store;
 import org.apache.lucene.document.Field.TermVector;
+import org.apache.lucene.document.Fieldable;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.AttributeSource;
+import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util._TestUtil;
-import org.apache.lucene.util.BytesRef;
 
 public class TestDocumentWriter extends LuceneTestCase {
   private Directory dir;
@@ -98,8 +98,7 @@ public class TestDocumentWriter extends 
 
     // test that the norms are not present in the segment if
     // omitNorms is true
-    for (int i = 0; i < reader.core.fieldInfos.size(); i++) {
-      FieldInfo fi = reader.core.fieldInfos.fieldInfo(i);
+    for (FieldInfo fi : reader.core.fieldInfos) {
       if (fi.isIndexed) {
         assertTrue(fi.omitNorms == !reader.hasNorms(fi.name));
       }

Modified: lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestFieldsReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestFieldsReader.java?rev=1073192&r1=1073191&r2=1073192&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestFieldsReader.java (original)
+++ lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestFieldsReader.java Tue Feb 22 01:00:39 2011
@@ -51,7 +51,7 @@ public class TestFieldsReader extends Lu
     DocHelper.setupDoc(testDoc);
     fieldInfos.add(testDoc);
     dir = newDirectory();
-    IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer());
+    IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy());
     ((LogMergePolicy) conf.getMergePolicy()).setUseCompoundFile(false);
     IndexWriter writer = new IndexWriter(dir, conf);
     writer.addDocument(testDoc);
@@ -291,7 +291,7 @@ public class TestFieldsReader extends Lu
     Directory tmpDir = newFSDirectory(file);
     assertTrue(tmpDir != null);
 
-    IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE);
+    IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE).setMergePolicy(newLogMergePolicy());
     ((LogMergePolicy) conf.getMergePolicy()).setUseCompoundFile(false);
     IndexWriter writer = new IndexWriter(tmpDir, conf);
     writer.addDocument(testDoc);

Modified: lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestFilterIndexReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestFilterIndexReader.java?rev=1073192&r1=1073191&r2=1073192&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestFilterIndexReader.java (original)
+++ lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestFilterIndexReader.java Tue Feb 22 01:00:39 2011
@@ -38,9 +38,11 @@ public class TestFilterIndexReader exten
       TestFields(Fields in) {
         super(in);
       }
+      @Override
       public FieldsEnum iterator() throws IOException {
         return new TestFieldsEnum(super.iterator());
       }
+      @Override
       public Terms terms(String field) throws IOException {
         return new TestTerms(super.terms(field));
       }
@@ -51,6 +53,7 @@ public class TestFilterIndexReader exten
         super(in);
       }
 
+      @Override
       public TermsEnum iterator() throws IOException {
         return new TestTermsEnum(super.iterator());
       }
@@ -61,6 +64,7 @@ public class TestFilterIndexReader exten
         super(in);
       }
 
+      @Override
       public TermsEnum terms() throws IOException {
         return new TestTermsEnum(super.terms());
       }

Modified: lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexFileDeleter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexFileDeleter.java?rev=1073192&r1=1073191&r2=1073192&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexFileDeleter.java (original)
+++ lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexFileDeleter.java Tue Feb 22 01:00:39 2011
@@ -92,10 +92,9 @@ public class TestIndexFileDeleter extend
     CompoundFileReader cfsReader = new CompoundFileReader(dir, "_2.cfs");
     FieldInfos fieldInfos = new FieldInfos(cfsReader, "_2.fnm");
     int contentFieldIndex = -1;
-    for(i=0;i<fieldInfos.size();i++) {
-      FieldInfo fi = fieldInfos.fieldInfo(i);
+    for (FieldInfo fi : fieldInfos) {
       if (fi.name.equals("content")) {
-        contentFieldIndex = i;
+        contentFieldIndex = fi.number;
         break;
       }
     }

Modified: lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexReader.java?rev=1073192&r1=1073191&r2=1073192&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexReader.java (original)
+++ lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexReader.java Tue Feb 22 01:00:39 2011
@@ -360,7 +360,7 @@ public class TestIndexReader extends Luc
 
         // CREATE A NEW READER and re-test
         reader = IndexReader.open(dir, false);
-        assertEquals("deleted docFreq", 100, reader.docFreq(searchTerm));
+        assertEquals("deleted docFreq", 0, reader.docFreq(searchTerm));
         assertTermDocsCount("deleted termDocs", reader, searchTerm, 0);
         reader.close();
         reader2.close();
@@ -371,7 +371,7 @@ public class TestIndexReader extends Luc
         Directory dir = newDirectory();
         byte[] bin = new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9};
 
-        IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+        IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newInOrderLogMergePolicy()));
 
         for (int i = 0; i < 10; i++) {
           addDoc(writer, "document number " + (i + 1));
@@ -380,7 +380,7 @@ public class TestIndexReader extends Luc
           addDocumentWithTermVectorFields(writer);
         }
         writer.close();
-        writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
+        writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND).setMergePolicy(newInOrderLogMergePolicy()));
         Document doc = new Document();
         doc.add(new Field("bin1", bin));
         doc.add(new Field("junk", "junk text", Field.Store.NO, Field.Index.ANALYZED));
@@ -417,7 +417,7 @@ public class TestIndexReader extends Luc
         // force optimize
 
 
-        writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
+        writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND).setMergePolicy(newInOrderLogMergePolicy()));
         writer.optimize();
         writer.close();
         reader = IndexReader.open(dir, false);
@@ -697,7 +697,6 @@ public class TestIndexReader extends Luc
 
         // CREATE A NEW READER and re-test
         reader = IndexReader.open(dir, false);
-        assertEquals("deleted docFreq", 100, reader.docFreq(searchTerm));
         assertEquals("deleted docFreq", 100, reader.docFreq(searchTerm2));
         assertTermDocsCount("deleted termDocs", reader, searchTerm, 0);
         assertTermDocsCount("deleted termDocs", reader, searchTerm2, 100);
@@ -838,7 +837,6 @@ public class TestIndexReader extends Luc
       writer.close();
       IndexReader reader = IndexReader.open(dir, false);
       reader.deleteDocument(0);
-      reader.deleteDocument(1);
       reader.close();
       reader = IndexReader.open(dir, false);
       reader.undeleteAll();
@@ -855,7 +853,6 @@ public class TestIndexReader extends Luc
       writer.close();
       IndexReader reader = IndexReader.open(dir, false);
       reader.deleteDocument(0);
-      reader.deleteDocument(1);
       reader.close();
       reader = IndexReader.open(dir, false);
       reader.undeleteAll();
@@ -903,7 +900,7 @@ public class TestIndexReader extends Luc
 
       {
         IndexReader r = IndexReader.open(startDir);
-        IndexSearcher searcher = new IndexSearcher(r);
+        IndexSearcher searcher = newSearcher(r);
         ScoreDoc[] hits = null;
         try {
           hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
@@ -911,6 +908,7 @@ public class TestIndexReader extends Luc
           e.printStackTrace();
           fail("exception when init searching: " + e);
         }
+        searcher.close();
         r.close();
       }
 
@@ -999,15 +997,6 @@ public class TestIndexReader extends Luc
             }
           }
 
-          // Whether we succeeded or failed, check that all
-          // un-referenced files were in fact deleted (ie,
-          // we did not create garbage).  Just create a
-          // new IndexFileDeleter, have it delete
-          // unreferenced files, then verify that in fact
-          // no files were deleted:
-          IndexWriter.unlock(dir);
-          TestIndexWriter.assertNoUnreferencedFiles(dir, "reader.close() failed to delete unreferenced files");
-
           // Finally, verify index is not corrupt, and, if
           // we succeeded, we see all docs changed, and if
           // we failed, we see either all docs or no docs
@@ -1035,7 +1024,7 @@ public class TestIndexReader extends Luc
           }
           */
 
-          IndexSearcher searcher = new IndexSearcher(newReader);
+          IndexSearcher searcher = newSearcher(newReader);
           ScoreDoc[] hits = null;
           try {
             hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
@@ -1174,7 +1163,7 @@ public class TestIndexReader extends Luc
 
     public void testMultiReaderDeletes() throws Exception {
       Directory dir = newDirectory();
-      RandomIndexWriter w = new RandomIndexWriter(random, dir);
+      RandomIndexWriter w= new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newInOrderLogMergePolicy()));
       Document doc = new Document();
       doc.add(newField("f", "doctor", Field.Store.NO, Field.Index.NOT_ANALYZED));
       w.addDocument(doc);
@@ -1290,9 +1279,6 @@ public class TestIndexReader extends Luc
 
         // Open another reader to confirm that everything is deleted
         reader2 = IndexReader.open(dir, false);
-        assertEquals("reopened 2", 100, reader2.docFreq(searchTerm1));
-        assertEquals("reopened 2", 100, reader2.docFreq(searchTerm2));
-        assertEquals("reopened 2", 100, reader2.docFreq(searchTerm3));
         assertTermDocsCount("reopened 2", reader2, searchTerm1, 0);
         assertTermDocsCount("reopened 2", reader2, searchTerm2, 0);
         assertTermDocsCount("reopened 2", reader2, searchTerm3, 100);
@@ -1911,4 +1897,42 @@ public class TestIndexReader extends Luc
       dir.close();
     }
   }
+
+  // LUCENE-2474
+  public void testReaderFinishedListener() throws Exception {
+    Directory dir = newDirectory();
+    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy()));
+    ((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(3);
+    writer.setInfoStream(VERBOSE ? System.out : null);
+    writer.addDocument(new Document());
+    writer.commit();
+    writer.addDocument(new Document());
+    writer.commit();
+    final IndexReader reader = writer.getReader();
+    final int[] closeCount = new int[1];
+    final IndexReader.ReaderFinishedListener listener = new IndexReader.ReaderFinishedListener() {
+      public void finished(IndexReader reader) {
+        closeCount[0]++;
+      }
+    };
+
+    reader.addReaderFinishedListener(listener);
+
+    reader.close();
+
+    // Just the top reader
+    assertEquals(1, closeCount[0]);
+    writer.close();
+
+    // Now also the subs
+    assertEquals(3, closeCount[0]);
+
+    IndexReader reader2 = IndexReader.open(dir);
+    reader2.addReaderFinishedListener(listener);
+
+    closeCount[0] = 0;
+    reader2.close();
+    assertEquals(3, closeCount[0]);
+    dir.close();
+  }
 }

Modified: lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java?rev=1073192&r1=1073191&r2=1073192&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java (original)
+++ lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java Tue Feb 22 01:00:39 2011
@@ -43,7 +43,7 @@ public class TestIndexReaderCloneNorms e
 
   private class SimilarityOne extends DefaultSimilarity {
     @Override
-    public float computeNorm(String fieldName, FieldInvertState state) {
+    public float computeNorm(FieldInvertState state) {
       // diable length norm
       return state.getBoost();
     }
@@ -106,13 +106,17 @@ public class TestIndexReaderCloneNorms e
     Directory dir3 = newDirectory();
 
     createIndex(random, dir3);
+    if (VERBOSE) {
+      System.out.println("TEST: now addIndexes/optimize");
+    }
     IndexWriter iw = new IndexWriter(
         dir3,
         newIndexWriterConfig(TEST_VERSION_CURRENT, anlzr).
             setOpenMode(OpenMode.APPEND).
             setMaxBufferedDocs(5).
-            setMergePolicy(newLogMergePolicy(3))
+        setMergePolicy(newLogMergePolicy(3))
     );
+    iw.setInfoStream(VERBOSE ? System.out : null);
     iw.addIndexes(dir1, dir2);
     iw.optimize();
     iw.close();
@@ -146,6 +150,9 @@ public class TestIndexReaderCloneNorms e
 
   // try cloning and reopening the norms
   private void doTestNorms(Random random, Directory dir) throws IOException {
+    if (VERBOSE) {
+      System.out.println("TEST: now doTestNorms");
+    }
     addDocs(random, dir, 12, true);
     IndexReader ir = IndexReader.open(dir, false);
     verifyIndex(ir);
@@ -237,13 +244,20 @@ public class TestIndexReaderCloneNorms e
   }
   
   private void createIndex(Random random, Directory dir) throws IOException {
+    if (VERBOSE) {
+      System.out.println("TEST: createIndex");
+    }
     IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(
         TEST_VERSION_CURRENT, anlzr).setOpenMode(OpenMode.CREATE)
-        .setMaxBufferedDocs(5).setSimilarityProvider(similarityOne));
+                                     .setMaxBufferedDocs(5).setSimilarityProvider(similarityOne).setMergePolicy(newLogMergePolicy()));
+
     LogMergePolicy lmp = (LogMergePolicy) iw.getConfig().getMergePolicy();
     lmp.setMergeFactor(3);
     lmp.setUseCompoundFile(true);
     iw.close();
+    if (VERBOSE) {
+      System.out.println("TEST: done createIndex");
+    }
   }
 
   private void modifyNormsForF1(IndexReader ir) throws IOException {
@@ -293,11 +307,12 @@ public class TestIndexReaderCloneNorms e
       throws IOException {
     IndexWriterConfig conf = newIndexWriterConfig(
             TEST_VERSION_CURRENT, anlzr).setOpenMode(OpenMode.APPEND)
-            .setMaxBufferedDocs(5).setSimilarityProvider(similarityOne);
+            .setMaxBufferedDocs(5).setSimilarityProvider(similarityOne).setMergePolicy(newLogMergePolicy());
     LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
     lmp.setMergeFactor(3);
     lmp.setUseCompoundFile(compound);
     IndexWriter iw = new IndexWriter(dir, conf);
+    iw.setInfoStream(VERBOSE ? System.out : null);
     for (int i = 0; i < ndocs; i++) {
       iw.addDocument(newDoc());
     }

Modified: lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexReaderReopen.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexReaderReopen.java?rev=1073192&r1=1073191&r2=1073192&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexReaderReopen.java (original)
+++ lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexReaderReopen.java Tue Feb 22 01:00:39 2011
@@ -174,7 +174,7 @@ public class TestIndexReaderReopen exten
   private void doTestReopenWithCommit (Random random, Directory dir, boolean withReopen) throws IOException {
     IndexWriter iwriter = new IndexWriter(dir, newIndexWriterConfig(
         TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(
-        OpenMode.CREATE).setMergeScheduler(new SerialMergeScheduler()));
+                                                              OpenMode.CREATE).setMergeScheduler(new SerialMergeScheduler()).setMergePolicy(newInOrderLogMergePolicy()));
     iwriter.commit();
     IndexReader reader = IndexReader.open(dir, false);
     try {
@@ -773,14 +773,14 @@ public class TestIndexReaderReopen exten
                 // not synchronized
                 IndexReader refreshed = r.reopen();
                 
-                IndexSearcher searcher = new IndexSearcher(refreshed);
+                IndexSearcher searcher = newSearcher(refreshed);
                 ScoreDoc[] hits = searcher.search(
                     new TermQuery(new Term("field1", "a" + rnd.nextInt(refreshed.maxDoc()))),
                     null, 1000).scoreDocs;
                 if (hits.length > 0) {
                   searcher.doc(hits[0].doc);
                 }
-                
+                searcher.close();
                 if (refreshed != r) {
                   refreshed.close();
                 }
@@ -1211,7 +1211,6 @@ public class TestIndexReaderReopen exten
 
     IndexReader r = IndexReader.open(dir, false);
     assertEquals(0, r.numDocs());
-    assertEquals(4, r.maxDoc());
 
     Collection<IndexCommit> commits = IndexReader.listCommits(dir);
     for (final IndexCommit commit : commits) {

Modified: lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexWriter.java?rev=1073192&r1=1073191&r2=1073192&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexWriter.java (original)
+++ lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexWriter.java Tue Feb 22 01:00:39 2011
@@ -101,19 +101,12 @@ public class TestIndexWriter extends Luc
         }
         reader.close();
 
-        // test doc count before segments are merged/index is optimized
-        writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
-        assertEquals(100, writer.maxDoc());
-        writer.close();
-
         reader = IndexReader.open(dir, true);
-        assertEquals(100, reader.maxDoc());
         assertEquals(60, reader.numDocs());
         reader.close();
 
         // optimize the index and check that the new doc count is correct
         writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
-        assertEquals(100, writer.maxDoc());
         assertEquals(60, writer.numDocs());
         writer.optimize();
         assertEquals(60, writer.maxDoc());
@@ -260,7 +253,7 @@ public class TestIndexWriter extends Luc
     public void testOptimizeTempSpaceUsage() throws IOException {
 
       MockDirectoryWrapper dir = newDirectory();
-      IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(10));
+      IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(10).setMergePolicy(newLogMergePolicy()));
       if (VERBOSE) {
         System.out.println("TEST: config1=" + writer.getConfig());
       }
@@ -293,7 +286,7 @@ public class TestIndexWriter extends Luc
       // Import to use same term index interval else a
       // smaller one here could increase the disk usage and
       // cause a false failure:
-      writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND).setTermIndexInterval(termIndexInterval));
+      writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND).setTermIndexInterval(termIndexInterval).setMergePolicy(newLogMergePolicy()));
       writer.setInfoStream(VERBOSE ? System.out : null);
       writer.optimize();
       writer.close();
@@ -968,7 +961,7 @@ public class TestIndexWriter extends Luc
       Directory dir = newDirectory();
       IndexWriterConfig conf = newIndexWriterConfig(
           TEST_VERSION_CURRENT, new MockAnalyzer())
-          .setMaxBufferedDocs(2);
+        .setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy());
       ((LogMergePolicy) conf.getMergePolicy()).setMergeFactor(2);
       IndexWriter iw = new IndexWriter(dir, conf);
       Document document = new Document();
@@ -1010,7 +1003,7 @@ public class TestIndexWriter extends Luc
     Directory dir = newDirectory();
     IndexWriterConfig conf = newIndexWriterConfig(
         TEST_VERSION_CURRENT, new MockAnalyzer())
-        .setMergeScheduler(new MyMergeScheduler()).setMaxBufferedDocs(2);
+      .setMergeScheduler(new MyMergeScheduler()).setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy());
     LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
     lmp.setMaxMergeDocs(20);
     lmp.setMergeFactor(2);
@@ -1033,11 +1026,11 @@ public class TestIndexWriter extends Luc
       if (VERBOSE) {
         System.out.println("TEST: iter=" + i);
       }
-      IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2));
+      IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy()));
       writer.setInfoStream(VERBOSE ? System.out : null);
-      LogMergePolicy lmp = (LogMergePolicy) writer.getConfig().getMergePolicy();
-      lmp.setMergeFactor(2);
-      lmp.setUseCompoundFile(false);
+      //LogMergePolicy lmp = (LogMergePolicy) writer.getConfig().getMergePolicy();
+      //lmp.setMergeFactor(2);
+      //lmp.setUseCompoundFile(false);
       Document doc = new Document();
       String contents = "aa bb cc dd ee ff gg hh ii jj kk";
 
@@ -1071,8 +1064,8 @@ public class TestIndexWriter extends Luc
 
       if (0 == i % 4) {
         writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
-        LogMergePolicy lmp2 = (LogMergePolicy) writer.getConfig().getMergePolicy();
-        lmp2.setUseCompoundFile(false);
+        //LogMergePolicy lmp2 = (LogMergePolicy) writer.getConfig().getMergePolicy();
+        //lmp2.setUseCompoundFile(false);
         writer.optimize();
         writer.close();
       }
@@ -1097,10 +1090,7 @@ public class TestIndexWriter extends Luc
           newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
               setOpenMode(OpenMode.CREATE).
               setMaxBufferedDocs(2).
-              // have to use compound file to prevent running out of
-              // descripters when newDirectory returns a file-system
-              // backed directory:
-              setMergePolicy(newLogMergePolicy(false, 10))
+              setMergePolicy(newLogMergePolicy())
       );
       writer.setInfoStream(VERBOSE ? System.out : null);
 
@@ -1166,7 +1156,7 @@ public class TestIndexWriter extends Luc
         reader.close();
 
         // Reopen
-        writer = new IndexWriter(directory, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
+        writer = new IndexWriter(directory, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
         writer.setInfoStream(VERBOSE ? System.out : null);
       }
       writer.close();
@@ -1236,7 +1226,7 @@ public class TestIndexWriter extends Luc
     writer.close();
     dir.close();
   }
-  
+
   // LUCENE-325: test expungeDeletes, when 2 singular merges
   // are required
   public void testExpungeDeletes() throws IOException {
@@ -1244,8 +1234,8 @@ public class TestIndexWriter extends Luc
     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
         TEST_VERSION_CURRENT, new MockAnalyzer())
         .setMaxBufferedDocs(2).setRAMBufferSizeMB(
-            IndexWriterConfig.DISABLE_AUTO_FLUSH));
-
+                                                  IndexWriterConfig.DISABLE_AUTO_FLUSH));
+    writer.setInfoStream(VERBOSE ? System.out : null);
     Document document = new Document();
 
     document = new Document();
@@ -1268,7 +1258,7 @@ public class TestIndexWriter extends Luc
     assertEquals(8, ir.numDocs());
     ir.close();
 
-    writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
+    writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy()));
     assertEquals(8, writer.numDocs());
     assertEquals(10, writer.maxDoc());
     writer.expungeDeletes();
@@ -1431,7 +1421,6 @@ public class TestIndexWriter extends Luc
     w.close();
 
     IndexReader ir = IndexReader.open(dir, true);
-    assertEquals(1, ir.maxDoc());
     assertEquals(0, ir.numDocs());
     ir.close();
 
@@ -2010,7 +1999,6 @@ public class TestIndexWriter extends Luc
             }
             IndexWriterConfig conf = newIndexWriterConfig(
                                                           TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2);
-            ((LogMergePolicy) conf.getMergePolicy()).setMergeFactor(2);
             w = new IndexWriter(dir, conf);
 
             Document doc = new Document();
@@ -2229,8 +2217,6 @@ public class TestIndexWriter extends Luc
     Directory dir = newDirectory();
     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
         TEST_VERSION_CURRENT, new MockAnalyzer()));
-    LogMergePolicy lmp = (LogMergePolicy) writer.getConfig().getMergePolicy();
-    lmp.setUseCompoundFile(false);
     ByteArrayOutputStream bos = new ByteArrayOutputStream(1024);
     writer.setInfoStream(new PrintStream(bos));
     writer.addDocument(new Document());
@@ -2247,7 +2233,8 @@ public class TestIndexWriter extends Luc
     final int NUM_THREADS = 5;
     final double RUN_SEC = 0.5;
     final Directory dir = newDirectory();
-    final RandomIndexWriter w = new RandomIndexWriter(random, dir);
+    final RandomIndexWriter w = new RandomIndexWriter(random, dir, newIndexWriterConfig(
+                                                                                        TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy()));
     _TestUtil.reduceOpenFiles(w.w);
     w.commit();
     final AtomicBoolean failed = new AtomicBoolean();
@@ -2589,7 +2576,7 @@ public class TestIndexWriter extends Luc
           count++;
         }
       }
-      assertTrue("flush happened too quickly during " + (doIndexing ? "indexing" : "deleting") + " count=" + count, count > 2500);
+      assertTrue("flush happened too quickly during " + (doIndexing ? "indexing" : "deleting") + " count=" + count, count > 1500);
     }
     w.close();
     dir.close();
@@ -2629,9 +2616,11 @@ public class TestIndexWriter extends Luc
     Directory dir = newDirectory();
     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
         TEST_VERSION_CURRENT, new MockAnalyzer())
-        .setMaxBufferedDocs(2));
+                                         .setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy()));
     String[] files = dir.listAll();
 
+    writer.setInfoStream(VERBOSE ? System.out : null);
+
     // Creating over empty dir should not create any files,
     // or, at most the write.lock file
     final int extraFileCount;
@@ -2653,8 +2642,9 @@ public class TestIndexWriter extends Luc
     doc = new Document();
     doc.add(newField("c", "val", Store.YES, Index.ANALYZED, TermVector.WITH_POSITIONS_OFFSETS));
     writer.addDocument(doc);
+
     // The second document should cause a flush.
-    assertTrue("flush should have occurred and files created", dir.listAll().length > 5 + extraFileCount);
+    assertTrue("flush should have occurred and files should have been created", dir.listAll().length > 5 + extraFileCount);
 
     // After rollback, IW should remove all files
     writer.rollback();
@@ -2794,6 +2784,9 @@ public class TestIndexWriter extends Luc
       final String id = ""+i;
       idField.setValue(id);
       docs.put(id, doc);
+      if (VERBOSE) {
+        System.out.println("TEST: add doc id=" + id);
+      }
 
       for(int field: fieldIDs) {
         final String s;
@@ -2812,7 +2805,7 @@ public class TestIndexWriter extends Luc
       if (rand.nextInt(5) == 3 && i > 0) {
         final String delID = ""+rand.nextInt(i);
         if (VERBOSE) {
-          System.out.println("TEST: delete doc " + delID);
+          System.out.println("TEST: delete doc id=" + delID);
         }
         w.deleteDocuments(new Term("id", delID));
         docs.remove(delID);
@@ -2827,7 +2820,7 @@ public class TestIndexWriter extends Luc
 
       for(int x=0;x<2;x++) {
         IndexReader r = w.getReader();
-        IndexSearcher s = new IndexSearcher(r);
+        IndexSearcher s = newSearcher(r);
 
         if (VERBOSE) {
           System.out.println("TEST: cycle x=" + x + " r=" + r);
@@ -2835,6 +2828,9 @@ public class TestIndexWriter extends Luc
 
         for(int iter=0;iter<1000*RANDOM_MULTIPLIER;iter++) {
           String testID = idsList[rand.nextInt(idsList.length)];
+          if (VERBOSE) {
+            System.out.println("TEST: test id=" + testID);
+          }
           TopDocs hits = s.search(new TermQuery(new Term("id", testID)), 1);
           assertEquals(1, hits.totalHits);
           Document doc = r.document(hits.scoreDocs[0].doc);
@@ -2843,6 +2839,7 @@ public class TestIndexWriter extends Luc
             assertEquals("doc " + testID + ", field f" + fieldCount + " is wrong", docExp.get("f"+i),  doc.get("f"+i));
           }
         }
+        s.close();
         r.close();
         w.optimize();
       }
@@ -2854,7 +2851,7 @@ public class TestIndexWriter extends Luc
   public void testNoUnwantedTVFiles() throws Exception {
 
     Directory dir = newDirectory();
-    IndexWriter indexWriter = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setRAMBufferSizeMB(0.01));
+    IndexWriter indexWriter = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setRAMBufferSizeMB(0.01).setMergePolicy(newLogMergePolicy()));
     ((LogMergePolicy) indexWriter.getConfig().getMergePolicy()).setUseCompoundFile(false);
 
     String BIG="alskjhlaksjghlaksjfhalksvjepgjioefgjnsdfjgefgjhelkgjhqewlrkhgwlekgrhwelkgjhwelkgrhwlkejg";
@@ -2883,4 +2880,36 @@ public class TestIndexWriter extends Luc
 
     dir.close();
   }
+
+  public void testDeleteAllSlowly() throws Exception {
+    final Directory dir = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random, dir);
+    final int NUM_DOCS = 1000 * RANDOM_MULTIPLIER;
+    final List<Integer> ids = new ArrayList<Integer>(NUM_DOCS);
+    for(int id=0;id<NUM_DOCS;id++) {
+      ids.add(id);
+    }
+    Collections.shuffle(ids, random);
+    for(int id : ids) {
+      Document doc = new Document();
+      doc.add(newField("id", ""+id, Field.Index.NOT_ANALYZED));
+      w.addDocument(doc);
+    }
+    Collections.shuffle(ids, random);
+    int upto = 0;
+    while(upto < ids.size()) {
+      final int left = ids.size() - upto;
+      final int inc = Math.min(left, _TestUtil.nextInt(random, 1, 20));
+      final int limit = upto + inc;
+      while(upto < limit) {
+        w.deleteDocuments(new Term("id", ""+ids.get(upto++)));
+      }
+      final IndexReader r = w.getReader();
+      assertEquals(NUM_DOCS - upto, r.numDocs());
+      r.close();
+    }
+
+    w.close();
+    dir.close();
+  }
 }

Modified: lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexWriterConfig.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexWriterConfig.java?rev=1073192&r1=1073191&r2=1073192&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexWriterConfig.java (original)
+++ lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexWriterConfig.java Tue Feb 22 01:00:39 2011
@@ -224,6 +224,23 @@ public class TestIndexWriterConfig exten
       // this is expected
     }
 
+    // Test setReaderTermsIndexDivisor
+    try {
+      conf.setReaderTermsIndexDivisor(0);
+      fail("should not have succeeded to set termsIndexDivisor to 0");
+    } catch (IllegalArgumentException e) {
+      // this is expected
+    }
+    
+    // Setting to -1 is ok
+    conf.setReaderTermsIndexDivisor(-1);
+    try {
+      conf.setReaderTermsIndexDivisor(-2);
+      fail("should not have succeeded to set termsIndexDivisor to < -1");
+    } catch (IllegalArgumentException e) {
+      // this is expected
+    }
+    
     assertEquals(IndexWriterConfig.DEFAULT_MAX_THREAD_STATES, conf.getMaxThreadStates());
     conf.setIndexerThreadPool(new ThreadAffinityDocumentsWriterThreadPool(5));
     assertEquals(5, conf.getMaxThreadStates());

Modified: lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexWriterDelete.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexWriterDelete.java?rev=1073192&r1=1073191&r2=1073192&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexWriterDelete.java (original)
+++ lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexWriterDelete.java Tue Feb 22 01:00:39 2011
@@ -81,7 +81,7 @@ public class TestIndexWriterDelete exten
     IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
         TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
         .setMaxBufferedDeleteTerms(2));
-
+    modifier.setInfoStream(VERBOSE ? System.out : null);
     int id = 0;
     int value = 100;
 
@@ -157,8 +157,6 @@ public class TestIndexWriterDelete exten
       assertEquals(0, modifier.getSegmentCount());
       modifier.commit();
 
-      modifier.commit();
-
       IndexReader reader = IndexReader.open(dir, true);
       assertEquals(1, reader.numDocs());
 
@@ -567,7 +565,7 @@ public class TestIndexWriterDelete exten
                + e);
         }
 
-        IndexSearcher searcher = new IndexSearcher(newReader);
+        IndexSearcher searcher = newSearcher(newReader);
         ScoreDoc[] hits = null;
         try {
           hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
@@ -684,7 +682,7 @@ public class TestIndexWriterDelete exten
 
     MockDirectoryWrapper dir = newDirectory();
     IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
-                                                                     TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(2).setReaderPooling(false));
+                                                                     TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(2).setReaderPooling(false).setMergePolicy(newLogMergePolicy()));
     modifier.setInfoStream(VERBOSE ? System.out : null);
 
     LogMergePolicy lmp = (LogMergePolicy) modifier.getConfig().getMergePolicy();

Modified: lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java?rev=1073192&r1=1073191&r2=1073192&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java (original)
+++ lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java Tue Feb 22 01:00:39 2011
@@ -289,6 +289,7 @@ public class TestIndexWriterExceptions e
   public void testExceptionDocumentsWriterInit() throws IOException {
     Directory dir = newDirectory();
     MockIndexWriter2 w = new MockIndexWriter2(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
+    w.setInfoStream(VERBOSE ? System.out : null);
     Document doc = new Document();
     doc.add(newField("field", "a field", Field.Store.YES,
                       Field.Index.ANALYZED));
@@ -360,7 +361,7 @@ public class TestIndexWriterExceptions e
   public void testExceptionOnMergeInit() throws IOException {
     Directory dir = newDirectory();
     IndexWriterConfig conf = newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())
-      .setMaxBufferedDocs(2).setMergeScheduler(new ConcurrentMergeScheduler());
+      .setMaxBufferedDocs(2).setMergeScheduler(new ConcurrentMergeScheduler()).setMergePolicy(newLogMergePolicy());
     ((LogMergePolicy) conf.getMergePolicy()).setMergeFactor(2);
     MockIndexWriter3 w = new MockIndexWriter3(dir, conf);
     w.doFail = true;
@@ -528,7 +529,7 @@ public class TestIndexWriterExceptions e
         System.out.println("TEST: cycle i=" + i);
       }
       MockDirectoryWrapper dir = newDirectory();
-      IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer));
+      IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer).setMergePolicy(newLogMergePolicy()));
       writer.setInfoStream(VERBOSE ? System.out : null);
 
       // don't allow a sudden merge to clean up the deleted
@@ -568,24 +569,25 @@ public class TestIndexWriterExceptions e
         System.out.println("TEST: open reader");
       }
       IndexReader reader = IndexReader.open(dir, true);
-      int expected = 3+(1-i)*2;
-      assertEquals(expected, reader.docFreq(new Term("contents", "here")));
-      assertEquals(expected, reader.maxDoc());
-      int numDel = 0;
-      final Bits delDocs = MultiFields.getDeletedDocs(reader);
-      assertNotNull(delDocs);
-      for(int j=0;j<reader.maxDoc();j++) {
-        if (delDocs.get(j))
-          numDel++;
-        else {
-          reader.document(j);
-          reader.getTermFreqVectors(j);
+      if (i == 0) { 
+        int expected = 5;
+        assertEquals(expected, reader.docFreq(new Term("contents", "here")));
+        assertEquals(expected, reader.maxDoc());
+        int numDel = 0;
+        final Bits delDocs = MultiFields.getDeletedDocs(reader);
+        assertNotNull(delDocs);
+        for(int j=0;j<reader.maxDoc();j++) {
+          if (delDocs.get(j))
+            numDel++;
+          else {
+            reader.document(j);
+            reader.getTermFreqVectors(j);
+          }
         }
+        assertEquals(1, numDel);
       }
       reader.close();
 
-      assertEquals(1, numDel);
-
       writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT,
           analyzer).setMaxBufferedDocs(10));
       doc = new Document();
@@ -597,10 +599,10 @@ public class TestIndexWriterExceptions e
       writer.close();
 
       reader = IndexReader.open(dir, true);
-      expected = 19+(1-i)*2;
+      int expected = 19+(1-i)*2;
       assertEquals(expected, reader.docFreq(new Term("contents", "here")));
       assertEquals(expected, reader.maxDoc());
-      numDel = 0;
+      int numDel = 0;
       assertNull(MultiFields.getDeletedDocs(reader));
       for(int j=0;j<reader.maxDoc();j++) {
         reader.document(j);
@@ -844,7 +846,7 @@ public class TestIndexWriterExceptions e
 
   public void testOptimizeExceptions() throws IOException {
     Directory startDir = newDirectory();
-    IndexWriterConfig conf = newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2);
+    IndexWriterConfig conf = newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy());
     ((LogMergePolicy) conf.getMergePolicy()).setMergeFactor(100);
     IndexWriter w = new IndexWriter(startDir, conf);
     for(int i=0;i<27;i++)
@@ -982,7 +984,8 @@ public class TestIndexWriterExceptions e
   // latest segments file and make sure we get an
   // IOException trying to open the index:
   public void testSimulatedCorruptIndex1() throws IOException {
-      Directory dir = newDirectory();
+      MockDirectoryWrapper dir = newDirectory();
+      dir.setCheckIndexOnClose(false); // we are corrupting it!
 
       IndexWriter writer = null;
 
@@ -1029,8 +1032,8 @@ public class TestIndexWriterExceptions e
   // files and make sure we get an IOException trying to
   // open the index:
   public void testSimulatedCorruptIndex2() throws IOException {
-      Directory dir = newDirectory();
-
+      MockDirectoryWrapper dir = newDirectory();
+      dir.setCheckIndexOnClose(false); // we are corrupting it!
       IndexWriter writer = null;
 
       writer  = new IndexWriter(

Modified: lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java?rev=1073192&r1=1073191&r2=1073192&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java (original)
+++ lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java Tue Feb 22 01:00:39 2011
@@ -104,7 +104,7 @@ public class TestIndexWriterMergePolicy 
         dir,
         newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
             setMaxBufferedDocs(10).
-            setMergePolicy(newLogMergePolicy())
+            setMergePolicy(newInOrderLogMergePolicy())
     );
 
     for (int i = 0; i < 250; i++) {

Modified: lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexWriterMerging.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexWriterMerging.java?rev=1073192&r1=1073191&r2=1073192&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexWriterMerging.java (original)
+++ lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexWriterMerging.java Tue Feb 22 01:00:39 2011
@@ -58,7 +58,7 @@ public class TestIndexWriterMerging exte
     IndexWriter writer = new IndexWriter(
         merged,
         newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
-            setMergePolicy(newLogMergePolicy(2))
+            setMergePolicy(newInOrderLogMergePolicy(2))
     );
     writer.setInfoStream(VERBOSE ? System.out : null);
     writer.addIndexes(indexA, indexB);
@@ -66,7 +66,6 @@ public class TestIndexWriterMerging exte
     writer.close();
 
     fail = verifyIndex(merged, 0);
-    merged.close();
 
     assertFalse("The merged index is invalid", fail);
     indexA.close();
@@ -102,7 +101,7 @@ public class TestIndexWriterMerging exte
         newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
             setOpenMode(OpenMode.CREATE).
             setMaxBufferedDocs(2).
-            setMergePolicy(newLogMergePolicy(2))
+            setMergePolicy(newInOrderLogMergePolicy(2))
     );
 
     for (int i = start; i < (start + numDocs); i++)

Modified: lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java?rev=1073192&r1=1073191&r2=1073192&view=diff
==============================================================================
--- lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java (original)
+++ lucene/dev/branches/realtime_search/lucene/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java Tue Feb 22 01:00:39 2011
@@ -177,7 +177,7 @@ public class TestIndexWriterOnDiskFull e
     IndexReader reader = IndexReader.open(startDir, true);
     assertEquals("first docFreq", 57, reader.docFreq(searchTerm));
     
-    IndexSearcher searcher = new IndexSearcher(reader);
+    IndexSearcher searcher = newSearcher(reader);
     ScoreDoc[] hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
     assertEquals("first number of hits", 57, hits.length);
     searcher.close();
@@ -232,7 +232,7 @@ public class TestIndexWriterOnDiskFull e
         
         // Make a new dir that will enforce disk usage:
         MockDirectoryWrapper dir = new MockDirectoryWrapper(random, new RAMDirectory(startDir));
-        writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
+        writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
         IOException err = null;
         writer.setInfoStream(VERBOSE ? System.out : null);
 
@@ -360,7 +360,7 @@ public class TestIndexWriterOnDiskFull e
             }
           }
           
-          searcher = new IndexSearcher(reader);
+          searcher = newSearcher(reader);
           try {
             hits = searcher.search(new TermQuery(searchTerm), null, END_COUNT).scoreDocs;
           } catch (IOException e) {
@@ -401,10 +401,10 @@ public class TestIndexWriterOnDiskFull e
           // required is at most 2X total input size of
           // indices so let's make sure:
           assertTrue("max free Directory space required exceeded 1X the total input index sizes during " + methodName +
-              ": max temp usage = " + (dir.getMaxUsedSizeInBytes()-startDiskUsage) + " bytes; " +
-              "starting disk usage = " + startDiskUsage + " bytes; " +
-              "input index disk usage = " + inputDiskUsage + " bytes",
-              (dir.getMaxUsedSizeInBytes()-startDiskUsage) < 2*(startDiskUsage + inputDiskUsage));
+                     ": max temp usage = " + (dir.getMaxUsedSizeInBytes()-startDiskUsage) + " bytes vs limit=" + (2*(startDiskUsage + inputDiskUsage)) +
+                     "; starting disk usage = " + startDiskUsage + " bytes; " +
+                     "input index disk usage = " + inputDiskUsage + " bytes",
+                     (dir.getMaxUsedSizeInBytes()-startDiskUsage) < 2*(startDiskUsage + inputDiskUsage));
         }
         
         // Make sure we don't hit disk full during close below:
@@ -464,11 +464,11 @@ public class TestIndexWriterOnDiskFull e
             setReaderPooling(true).
             setMergePolicy(newLogMergePolicy(2))
     );
+    _TestUtil.keepFullyDeletedSegments(w);
 
     Document doc = new Document();
     doc.add(newField("f", "doctor who", Field.Store.YES, Field.Index.ANALYZED));
     w.addDocument(doc);
-
     w.commit();
 
     w.deleteDocuments(new Term("f", "who"));