You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by si...@apache.org on 2019/03/20 10:28:16 UTC

[lucene-solr] branch master updated: LUCENE-8671: Load FST off-heap if reader is not opened from an index writer (#610)

This is an automated email from the ASF dual-hosted git repository.

simonw pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git


The following commit(s) were added to refs/heads/master by this push:
     new 14175c4  LUCENE-8671: Load FST off-heap if reader is not opened from an index writer (#610)
14175c4 is described below

commit 14175c46d2cd46f3e50c5e73e90534f71009900f
Author: Simon Willnauer <si...@apache.org>
AuthorDate: Wed Mar 20 11:28:10 2019 +0100

    LUCENE-8671: Load FST off-heap if reader is not opened from an index writer (#610)
    
    Today we never load FSTs of ID-like fields off-heap since we need
    very fast access for updates. Yet, a reader that is not loaded from
    an IndexWriter can also leave the FST on disk. This change adds
    this information to SegmentReadState to allow the postings format
    to make this decision without configuration.
---
 lucene/CHANGES.txt                                 |  5 ++
 .../codecs/blocktree/BlockTreeTermsReader.java     |  2 +-
 .../lucene/codecs/blocktree/FieldReader.java       | 15 ++++-
 .../java/org/apache/lucene/index/CheckIndex.java   |  2 +-
 .../apache/lucene/index/DefaultIndexingChain.java  |  2 +-
 .../org/apache/lucene/index/ReadersAndUpdates.java |  4 +-
 .../apache/lucene/index/SegmentCoreReaders.java    |  4 +-
 .../org/apache/lucene/index/SegmentDocValues.java  |  2 +-
 .../org/apache/lucene/index/SegmentMerger.java     |  2 +-
 .../org/apache/lucene/index/SegmentReadState.java  | 13 +++-
 .../org/apache/lucene/index/SegmentReader.java     |  4 +-
 .../lucene/index/StandardDirectoryReader.java      |  4 +-
 .../codecs/lucene50/TestBlockPostingsFormat.java   | 71 ++++++++++++++++++++++
 .../test/org/apache/lucene/index/TestCodecs.java   |  4 +-
 .../lucene/index/TestDemoParallelLeafReader.java   |  2 +-
 .../src/test/org/apache/lucene/index/TestDoc.java  |  6 +-
 .../apache/lucene/index/TestDocumentWriter.java    |  8 +--
 .../index/TestIndexWriterThreadsToSegments.java    |  2 +-
 .../org/apache/lucene/index/TestSegmentMerger.java |  6 +-
 .../org/apache/lucene/index/TestSegmentReader.java |  2 +-
 .../apache/lucene/index/TestSegmentTermDocs.java   |  6 +-
 .../lucene/index/BaseIndexFileFormatTestCase.java  |  2 +-
 .../apache/lucene/index/RandomPostingsTester.java  |  2 +-
 23 files changed, 132 insertions(+), 38 deletions(-)

diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index a1ceb1d..5638c56 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -68,6 +68,11 @@ Improvements
 * LUCENE-8631: The Korean's user dictionary now picks the longest-matching word and discards
   the other matches. (Yeongsu Kim via Jim Ferenczi)
 
+Changes in Runtime Behavior
+
+* LUCENE-8671: Load FST off-heap also for ID-like fields if reader is not opened
+  from an IndexWriter. (Simon Willnauer)
+
 Other
 
 * LUCENE-8680: Refactor EdgeTree#relateTriangle method. (Ignacio Vera)
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/blocktree/BlockTreeTermsReader.java b/lucene/core/src/java/org/apache/lucene/codecs/blocktree/BlockTreeTermsReader.java
index 0076f14..2f7689c 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/blocktree/BlockTreeTermsReader.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/blocktree/BlockTreeTermsReader.java
@@ -193,7 +193,7 @@ public final class BlockTreeTermsReader extends FieldsProducer {
         final long indexStartFP = indexIn.readVLong();
         FieldReader previous = fields.put(fieldInfo.name,       
                                           new FieldReader(this, fieldInfo, numTerms, rootCode, sumTotalTermFreq, sumDocFreq, docCount,
-                                                          indexStartFP, longsSize, indexIn, minTerm, maxTerm));
+                                                          indexStartFP, longsSize, indexIn, minTerm, maxTerm, state.openedFromWriter));
         if (previous != null) {
           throw new CorruptIndexException("duplicate field: " + fieldInfo.name, termsIn);
         }
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/blocktree/FieldReader.java b/lucene/core/src/java/org/apache/lucene/codecs/blocktree/FieldReader.java
index 41073fd..751ea42 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/blocktree/FieldReader.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/blocktree/FieldReader.java
@@ -63,10 +63,11 @@ public final class FieldReader extends Terms implements Accountable {
   final BlockTreeTermsReader parent;
 
   final FST<BytesRef> index;
+  final boolean isFSTOffHeap;
   //private boolean DEBUG;
 
   FieldReader(BlockTreeTermsReader parent, FieldInfo fieldInfo, long numTerms, BytesRef rootCode, long sumTotalTermFreq, long sumDocFreq, int docCount,
-              long indexStartFP, int longsSize, IndexInput indexIn, BytesRef minTerm, BytesRef maxTerm) throws IOException {
+              long indexStartFP, int longsSize, IndexInput indexIn, BytesRef minTerm, BytesRef maxTerm, boolean openedFromWriter) throws IOException {
     assert numTerms > 0;
     this.fieldInfo = fieldInfo;
     //DEBUG = BlockTreeTermsReader.DEBUG && fieldInfo.name.equals("id");
@@ -91,7 +92,8 @@ public final class FieldReader extends Terms implements Accountable {
       clone.seek(indexStartFP);
       // Initialize FST offheap if index is MMapDirectory and
       // docCount != sumDocFreq implying field is not primary key
-      if (clone instanceof ByteBufferIndexInput && this.docCount != this.sumDocFreq) {
+      isFSTOffHeap = clone instanceof ByteBufferIndexInput && ((this.docCount != this.sumDocFreq) || openedFromWriter == false);
+      if (isFSTOffHeap) {
         index = new FST<>(clone, ByteSequenceOutputs.getSingleton(), new OffHeapFSTStore());
       } else {
         index = new FST<>(clone, ByteSequenceOutputs.getSingleton());
@@ -108,6 +110,7 @@ public final class FieldReader extends Terms implements Accountable {
       */
     } else {
       index = null;
+      isFSTOffHeap = false;
     }
   }
 
@@ -212,4 +215,12 @@ public final class FieldReader extends Terms implements Accountable {
   public String toString() {
     return "BlockTreeTerms(seg=" + parent.segment +" terms=" + numTerms + ",postings=" + sumDocFreq + ",positions=" + sumTotalTermFreq + ",docs=" + docCount + ")";
   }
+
+  /**
+   * Returns <code>true</code> iff the FST is read off-heap.
+   */
+  public boolean isFstOffHeap() {
+    return isFSTOffHeap;
+  }
+
 }
diff --git a/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java b/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java
index 8193b5f..fcda464 100644
--- a/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java
+++ b/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java
@@ -673,7 +673,7 @@ public final class CheckIndex implements Closeable {
         long startOpenReaderNS = System.nanoTime();
         if (infoStream != null)
           infoStream.print("    test: open reader.........");
-        reader = new SegmentReader(info, sis.getIndexCreatedVersionMajor(), IOContext.DEFAULT);
+        reader = new SegmentReader(info, sis.getIndexCreatedVersionMajor(), false, IOContext.DEFAULT);
         msg(infoStream, String.format(Locale.ROOT, "OK [took %.3f sec]", nsToSec(System.nanoTime()-startOpenReaderNS)));
 
         segInfoStat.openReaderPassed = true;
diff --git a/lucene/core/src/java/org/apache/lucene/index/DefaultIndexingChain.java b/lucene/core/src/java/org/apache/lucene/index/DefaultIndexingChain.java
index 8c4145f..f507157 100644
--- a/lucene/core/src/java/org/apache/lucene/index/DefaultIndexingChain.java
+++ b/lucene/core/src/java/org/apache/lucene/index/DefaultIndexingChain.java
@@ -131,7 +131,7 @@ final class DefaultIndexingChain extends DocConsumer {
     if (docState.infoStream.isEnabled("IW")) {
       docState.infoStream.message("IW", ((System.nanoTime()-t0)/1000000) + " msec to write norms");
     }
-    SegmentReadState readState = new SegmentReadState(state.directory, state.segmentInfo, state.fieldInfos, IOContext.READ, state.segmentSuffix);
+    SegmentReadState readState = new SegmentReadState(state.directory, state.segmentInfo, state.fieldInfos, true, IOContext.READ, state.segmentSuffix);
     
     t0 = System.nanoTime();
     writeDocValues(state, sortMap);
diff --git a/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java b/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java
index 9afff9c..0f47ed2 100644
--- a/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java
+++ b/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java
@@ -169,7 +169,7 @@ final class ReadersAndUpdates {
   public synchronized SegmentReader getReader(IOContext context) throws IOException {
     if (reader == null) {
       // We steal returned ref:
-      reader = new SegmentReader(info, indexCreatedVersionMajor, context);
+      reader = new SegmentReader(info, indexCreatedVersionMajor, true, context);
       pendingDeletes.onNewReader(reader, info);
     }
 
@@ -536,7 +536,7 @@ final class ReadersAndUpdates {
       // IndexWriter.commitMergedDeletes).
       final SegmentReader reader;
       if (this.reader == null) {
-        reader = new SegmentReader(info, indexCreatedVersionMajor, IOContext.READONCE);
+        reader = new SegmentReader(info, indexCreatedVersionMajor, true, IOContext.READONCE);
         pendingDeletes.onNewReader(reader, info);
       } else {
         reader = this.reader;
diff --git a/lucene/core/src/java/org/apache/lucene/index/SegmentCoreReaders.java b/lucene/core/src/java/org/apache/lucene/index/SegmentCoreReaders.java
index 0e7b9e4..21e6c82 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SegmentCoreReaders.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SegmentCoreReaders.java
@@ -89,7 +89,7 @@ final class SegmentCoreReaders {
   private final Set<IndexReader.ClosedListener> coreClosedListeners = 
       Collections.synchronizedSet(new LinkedHashSet<IndexReader.ClosedListener>());
   
-  SegmentCoreReaders(Directory dir, SegmentCommitInfo si, IOContext context) throws IOException {
+  SegmentCoreReaders(Directory dir, SegmentCommitInfo si, boolean openedFromWriter, IOContext context) throws IOException {
 
     final Codec codec = si.info.getCodec();
     final Directory cfsDir; // confusing name: if (cfs) it's the cfsdir, otherwise it's the segment's directory.
@@ -107,7 +107,7 @@ final class SegmentCoreReaders {
 
       coreFieldInfos = codec.fieldInfosFormat().read(cfsDir, si.info, "", context);
       
-      final SegmentReadState segmentReadState = new SegmentReadState(cfsDir, si.info, coreFieldInfos, context);
+      final SegmentReadState segmentReadState = new SegmentReadState(cfsDir, si.info, coreFieldInfos, openedFromWriter, context);
       final PostingsFormat format = codec.postingsFormat();
       // Ask codec for its Fields
       fields = format.fieldsProducer(segmentReadState);
diff --git a/lucene/core/src/java/org/apache/lucene/index/SegmentDocValues.java b/lucene/core/src/java/org/apache/lucene/index/SegmentDocValues.java
index aa44f8a..fee0050 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SegmentDocValues.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SegmentDocValues.java
@@ -46,7 +46,7 @@ final class SegmentDocValues {
     }
 
     // set SegmentReadState to list only the fields that are relevant to that gen
-    SegmentReadState srs = new SegmentReadState(dvDir, si.info, infos, IOContext.READ, segmentSuffix);
+    SegmentReadState srs = new SegmentReadState(dvDir, si.info, infos, false, IOContext.READ, segmentSuffix);
     DocValuesFormat dvFormat = si.info.getCodec().docValuesFormat();
     return new RefCount<DocValuesProducer>(dvFormat.fieldsProducer(srs)) {
       @SuppressWarnings("synthetic-access")
diff --git a/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java b/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java
index 6554cc5..a91cea2 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java
@@ -112,7 +112,7 @@ final class SegmentMerger {
     final SegmentWriteState segmentWriteState = new SegmentWriteState(mergeState.infoStream, directory, mergeState.segmentInfo,
                                                                       mergeState.mergeFieldInfos, null, context);
     final SegmentReadState segmentReadState = new SegmentReadState(directory, mergeState.segmentInfo, mergeState.mergeFieldInfos,
-                                                                   IOContext.READ, segmentWriteState.segmentSuffix);
+        true, IOContext.READ, segmentWriteState.segmentSuffix);
 
     if (mergeState.mergeFieldInfos.hasNorms()) {
       if (mergeState.infoStream.isEnabled("SM")) {
diff --git a/lucene/core/src/java/org/apache/lucene/index/SegmentReadState.java b/lucene/core/src/java/org/apache/lucene/index/SegmentReadState.java
index 73f5042..0c66762 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SegmentReadState.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SegmentReadState.java
@@ -49,23 +49,29 @@ public class SegmentReadState {
    *  {@link IndexFileNames#segmentFileName(String,String,String)}). */
   public final String segmentSuffix;
 
+  /**
+   * True iff this SegmentReadState is opened from an IndexWriter.
+   */
+  public final boolean openedFromWriter;
+
   /** Create a {@code SegmentReadState}. */
   public SegmentReadState(Directory dir, SegmentInfo info,
-      FieldInfos fieldInfos, IOContext context) {
-    this(dir, info, fieldInfos,  context, "");
+                          FieldInfos fieldInfos, boolean openedFromWriter, IOContext context) {
+    this(dir, info, fieldInfos, openedFromWriter, context, "");
   }
   
   /** Create a {@code SegmentReadState}. */
   public SegmentReadState(Directory dir,
                           SegmentInfo info,
                           FieldInfos fieldInfos,
-                          IOContext context,
+                          boolean openedFromWriter, IOContext context,
                           String segmentSuffix) {
     this.directory = dir;
     this.segmentInfo = info;
     this.fieldInfos = fieldInfos;
     this.context = context;
     this.segmentSuffix = segmentSuffix;
+    this.openedFromWriter = openedFromWriter;
   }
 
   /** Create a {@code SegmentReadState}. */
@@ -75,6 +81,7 @@ public class SegmentReadState {
     this.segmentInfo = other.segmentInfo;
     this.fieldInfos = other.fieldInfos;
     this.context = other.context;
+    this.openedFromWriter = other.openedFromWriter;
     this.segmentSuffix = newSegmentSuffix;
   }
 }
diff --git a/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java b/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java
index b368b96..c0ae337 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java
@@ -72,7 +72,7 @@ public final class SegmentReader extends CodecReader {
    * @throws CorruptIndexException if the index is corrupt
    * @throws IOException if there is a low-level IO error
    */
-  SegmentReader(SegmentCommitInfo si, int createdVersionMajor, IOContext context) throws IOException {
+  SegmentReader(SegmentCommitInfo si, int createdVersionMajor, boolean openedFromWriter, IOContext context) throws IOException {
     this.si = si.clone();
     this.originalSi = si;
     this.metaData = new LeafMetaData(createdVersionMajor, si.info.getMinVersion(), si.info.getIndexSort());
@@ -80,7 +80,7 @@ public final class SegmentReader extends CodecReader {
     // We pull liveDocs/DV updates from disk:
     this.isNRT = false;
     
-    core = new SegmentCoreReaders(si.info.dir, si, context);
+    core = new SegmentCoreReaders(si.info.dir, si, openedFromWriter, context);
     segDocValues = new SegmentDocValues();
     
     boolean success = false;
diff --git a/lucene/core/src/java/org/apache/lucene/index/StandardDirectoryReader.java b/lucene/core/src/java/org/apache/lucene/index/StandardDirectoryReader.java
index 5b2b049..0c3f4d7 100644
--- a/lucene/core/src/java/org/apache/lucene/index/StandardDirectoryReader.java
+++ b/lucene/core/src/java/org/apache/lucene/index/StandardDirectoryReader.java
@@ -63,7 +63,7 @@ public final class StandardDirectoryReader extends DirectoryReader {
         boolean success = false;
         try {
           for (int i = sis.size()-1; i >= 0; i--) {
-            readers[i] = new SegmentReader(sis.info(i), sis.getIndexCreatedVersionMajor(), IOContext.READ);
+            readers[i] = new SegmentReader(sis.info(i), sis.getIndexCreatedVersionMajor(), false, IOContext.READ);
           }
 
           // This may throw CorruptIndexException if there are too many docs, so
@@ -176,7 +176,7 @@ public final class StandardDirectoryReader extends DirectoryReader {
         SegmentReader newReader;
         if (oldReader == null || commitInfo.info.getUseCompoundFile() != oldReader.getSegmentInfo().info.getUseCompoundFile()) {
           // this is a new reader; in case we hit an exception we can decRef it safely
-          newReader = new SegmentReader(commitInfo, infos.getIndexCreatedVersionMajor(), IOContext.READ);
+          newReader = new SegmentReader(commitInfo, infos.getIndexCreatedVersionMajor(), false, IOContext.READ);
           newReaders[i] = newReader;
         } else {
           if (oldReader.isNRT) {
diff --git a/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestBlockPostingsFormat.java b/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestBlockPostingsFormat.java
index dec5eca..094415b 100644
--- a/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestBlockPostingsFormat.java
+++ b/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestBlockPostingsFormat.java
@@ -18,6 +18,7 @@ package org.apache.lucene.codecs.lucene50;
 
 
 import java.io.IOException;
+import java.nio.file.Path;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
@@ -35,11 +36,15 @@ import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.Impact;
 import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.Term;
 import org.apache.lucene.store.ByteArrayDataInput;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IOContext;
 import org.apache.lucene.store.IndexInput;
 import org.apache.lucene.store.IndexOutput;
+import org.apache.lucene.store.MMapDirectory;
+import org.apache.lucene.store.SimpleFSDirectory;
 import org.apache.lucene.util.TestUtil;
 
 /**
@@ -52,6 +57,72 @@ public class TestBlockPostingsFormat extends BasePostingsFormatTestCase {
   protected Codec getCodec() {
     return codec;
   }
+
+  public void testFstOffHeap() throws IOException {
+    Path tempDir = createTempDir();
+    try (Directory d = MMapDirectory.open(tempDir)) {
+      try (IndexWriter w = new IndexWriter(d, new IndexWriterConfig(new MockAnalyzer(random())))) {
+        DirectoryReader readerFromWriter = DirectoryReader.open(w);
+        for (int i = 0; i < 50; i++) {
+          Document doc = new Document();
+          doc.add(newStringField("id", "" + i, Field.Store.NO));
+          doc.add(newStringField("field", Character.toString((char) (97 + i)), Field.Store.NO));
+          doc.add(newStringField("field", Character.toString((char) (98 + i)), Field.Store.NO));
+          if (rarely()) {
+            w.addDocument(doc);
+          } else {
+            w.updateDocument(new Term("id", "" + i), doc);
+          }
+          if (random().nextBoolean()) {
+            w.commit();
+          }
+
+          if (random().nextBoolean()) {
+            DirectoryReader newReader = DirectoryReader.openIfChanged(readerFromWriter);
+            if (newReader != null) {
+              readerFromWriter.close();
+              readerFromWriter = newReader;
+            }
+            for (LeafReaderContext leaf : readerFromWriter.leaves()) {
+              FieldReader field = (FieldReader) leaf.reader().terms("field");
+              FieldReader id = (FieldReader) leaf.reader().terms("id");
+              assertFalse(id.isFstOffHeap());
+              assertTrue(field.isFstOffHeap());
+            }
+          }
+        }
+        readerFromWriter.close();
+
+        w.forceMerge(1);
+        try (DirectoryReader r = DirectoryReader.open(w)) {
+          assertEquals(1, r.leaves().size());
+          FieldReader field = (FieldReader) r.leaves().get(0).reader().terms("field");
+          FieldReader id = (FieldReader) r.leaves().get(0).reader().terms("id");
+          assertFalse(id.isFstOffHeap());
+          assertTrue(field.isFstOffHeap());
+        }
+        w.commit();
+        try (DirectoryReader r = DirectoryReader.open(d)) {
+          assertEquals(1, r.leaves().size());
+          FieldReader field = (FieldReader) r.leaves().get(0).reader().terms("field");
+          FieldReader id = (FieldReader) r.leaves().get(0).reader().terms("id");
+          assertTrue(id.isFstOffHeap());
+          assertTrue(field.isFstOffHeap());
+        }
+      }
+    }
+
+    try (Directory d = new SimpleFSDirectory(tempDir)) {
+      // test auto
+      try (DirectoryReader r = DirectoryReader.open(d)) {
+        assertEquals(1, r.leaves().size());
+        FieldReader field = (FieldReader) r.leaves().get(0).reader().terms("field");
+        FieldReader id = (FieldReader) r.leaves().get(0).reader().terms("id");
+        assertFalse(id.isFstOffHeap());
+        assertFalse(field.isFstOffHeap());
+      }
+    }
+  }
   
   /** Make sure the final sub-block(s) are not skipped. */
   public void testFinalBlock() throws Exception {
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestCodecs.java b/lucene/core/src/test/org/apache/lucene/index/TestCodecs.java
index 6094ce6..f48ae6e 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestCodecs.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestCodecs.java
@@ -222,7 +222,7 @@ public class TestCodecs extends LuceneTestCase {
     final SegmentInfo si = new SegmentInfo(dir, Version.LATEST, Version.LATEST, SEGMENT, 10000, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
     
     this.write(si, fieldInfos, dir, fields);
-    final FieldsProducer reader = codec.postingsFormat().fieldsProducer(new SegmentReadState(dir, si, fieldInfos, newIOContext(random())));
+    final FieldsProducer reader = codec.postingsFormat().fieldsProducer(new SegmentReadState(dir, si, fieldInfos, false, newIOContext(random())));
 
     final Iterator<String> fieldsEnum = reader.iterator();
     String fieldName = fieldsEnum.next();
@@ -282,7 +282,7 @@ public class TestCodecs extends LuceneTestCase {
     if (VERBOSE) {
       System.out.println("TEST: now read postings");
     }
-    final FieldsProducer terms = codec.postingsFormat().fieldsProducer(new SegmentReadState(dir, si, fieldInfos, newIOContext(random())));
+    final FieldsProducer terms = codec.postingsFormat().fieldsProducer(new SegmentReadState(dir, si, fieldInfos, false, newIOContext(random())));
 
     final Verify[] threads = new Verify[NUM_TEST_THREADS-1];
     for(int i=0;i<NUM_TEST_THREADS-1;i++) {
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java b/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java
index 80a089f..8f5e05b 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java
@@ -415,7 +415,7 @@ public class TestDemoParallelLeafReader extends LuceneTestCase {
 
             SegmentInfos infos = SegmentInfos.readLatestCommit(dir);
             assert infos.size() == 1;
-            final LeafReader parLeafReader = new SegmentReader(infos.info(0), Version.LATEST.major, IOContext.DEFAULT);
+            final LeafReader parLeafReader = new SegmentReader(infos.info(0), Version.LATEST.major, false, IOContext.DEFAULT);
 
             //checkParallelReader(leaf, parLeafReader, schemaGen);
 
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDoc.java b/lucene/core/src/test/org/apache/lucene/index/TestDoc.java
index d7eea7a..c0787fa 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestDoc.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestDoc.java
@@ -213,8 +213,8 @@ public class TestDoc extends LuceneTestCase {
   private SegmentCommitInfo merge(Directory dir, SegmentCommitInfo si1, SegmentCommitInfo si2, String merged, boolean useCompoundFile)
     throws Exception {
     IOContext context = newIOContext(random(), new IOContext(new MergeInfo(-1, -1, false, -1)));
-    SegmentReader r1 = new SegmentReader(si1, Version.LATEST.major, context);
-    SegmentReader r2 = new SegmentReader(si2, Version.LATEST.major, context);
+    SegmentReader r1 = new SegmentReader(si1, Version.LATEST.major, false, context);
+    SegmentReader r2 = new SegmentReader(si2, Version.LATEST.major, false, context);
 
     final Codec codec = Codec.getDefault();
     TrackingDirectoryWrapper trackingDir = new TrackingDirectoryWrapper(si1.info.dir);
@@ -244,7 +244,7 @@ public class TestDoc extends LuceneTestCase {
 
   private void printSegment(PrintWriter out, SegmentCommitInfo si)
     throws Exception {
-    SegmentReader reader = new SegmentReader(si, Version.LATEST.major, newIOContext(random()));
+    SegmentReader reader = new SegmentReader(si, Version.LATEST.major, false, newIOContext(random()));
 
     for (int i = 0; i < reader.numDocs(); i++)
       out.println(reader.document(i));
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDocumentWriter.java b/lucene/core/src/test/org/apache/lucene/index/TestDocumentWriter.java
index 54e6d26..8588cb2 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestDocumentWriter.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestDocumentWriter.java
@@ -63,7 +63,7 @@ public class TestDocumentWriter extends LuceneTestCase {
     SegmentCommitInfo info = writer.newestSegment();
     writer.close();
     //After adding the document, we should be able to read it back in
-    SegmentReader reader = new SegmentReader(info, Version.LATEST.major, newIOContext(random()));
+    SegmentReader reader = new SegmentReader(info, Version.LATEST.major, false, newIOContext(random()));
     assertTrue(reader != null);
     Document doc = reader.document(0);
     assertTrue(doc != null);
@@ -124,7 +124,7 @@ public class TestDocumentWriter extends LuceneTestCase {
     writer.commit();
     SegmentCommitInfo info = writer.newestSegment();
     writer.close();
-    SegmentReader reader = new SegmentReader(info, Version.LATEST.major, newIOContext(random()));
+    SegmentReader reader = new SegmentReader(info, Version.LATEST.major, false, newIOContext(random()));
 
     PostingsEnum termPositions = MultiTerms.getTermPostingsEnum(reader, "repeated", new BytesRef("repeated"));
     assertTrue(termPositions.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
@@ -195,7 +195,7 @@ public class TestDocumentWriter extends LuceneTestCase {
     writer.commit();
     SegmentCommitInfo info = writer.newestSegment();
     writer.close();
-    SegmentReader reader = new SegmentReader(info, Version.LATEST.major, newIOContext(random()));
+    SegmentReader reader = new SegmentReader(info, Version.LATEST.major, false, newIOContext(random()));
 
     PostingsEnum termPositions = MultiTerms.getTermPostingsEnum(reader, "f1", new BytesRef("a"));
     assertTrue(termPositions.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
@@ -237,7 +237,7 @@ public class TestDocumentWriter extends LuceneTestCase {
     writer.commit();
     SegmentCommitInfo info = writer.newestSegment();
     writer.close();
-    SegmentReader reader = new SegmentReader(info, Version.LATEST.major, newIOContext(random()));
+    SegmentReader reader = new SegmentReader(info, Version.LATEST.major, false, newIOContext(random()));
 
     PostingsEnum termPositions = reader.postings(new Term("preanalyzed", "term1"), PostingsEnum.ALL);
     assertTrue(termPositions.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterThreadsToSegments.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterThreadsToSegments.java
index 4339d3e..f8dbf81 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterThreadsToSegments.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterThreadsToSegments.java
@@ -332,7 +332,7 @@ public class TestIndexWriterThreadsToSegments extends LuceneTestCase {
               SegmentInfo si = TestUtil.getDefaultCodec().segmentInfoFormat().read(dir, segName, id, IOContext.DEFAULT);
               si.setCodec(codec);
               SegmentCommitInfo sci = new SegmentCommitInfo(si, 0, 0, -1, -1, -1);
-              SegmentReader sr = new SegmentReader(sci, Version.LATEST.major, IOContext.DEFAULT);
+              SegmentReader sr = new SegmentReader(sci, Version.LATEST.major, false, IOContext.DEFAULT);
               try {
                 thread0Count += sr.docFreq(new Term("field", "threadID0"));
                 thread1Count += sr.docFreq(new Term("field", "threadID1"));
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestSegmentMerger.java b/lucene/core/src/test/org/apache/lucene/index/TestSegmentMerger.java
index 610523a..0ac82ba 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestSegmentMerger.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestSegmentMerger.java
@@ -60,8 +60,8 @@ public class TestSegmentMerger extends LuceneTestCase {
     SegmentCommitInfo info1 = DocHelper.writeDoc(random(), merge1Dir, doc1);
     DocHelper.setupDoc(doc2);
     SegmentCommitInfo info2 = DocHelper.writeDoc(random(), merge2Dir, doc2);
-    reader1 = new SegmentReader(info1, Version.LATEST.major, newIOContext(random()));
-    reader2 = new SegmentReader(info2, Version.LATEST.major, newIOContext(random()));
+    reader1 = new SegmentReader(info1, Version.LATEST.major, false, newIOContext(random()));
+    reader2 = new SegmentReader(info2, Version.LATEST.major, false, newIOContext(random()));
   }
 
   @Override
@@ -98,7 +98,7 @@ public class TestSegmentMerger extends LuceneTestCase {
                                                          mergeState.segmentInfo,
                                                          0, 0, -1L, -1L, -1L),
                                                    Version.LATEST.major,
-                                                   newIOContext(random()));
+        false, newIOContext(random()));
     assertTrue(mergedReader != null);
     assertTrue(mergedReader.numDocs() == 2);
     Document newDoc1 = mergedReader.document(0);
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestSegmentReader.java b/lucene/core/src/test/org/apache/lucene/index/TestSegmentReader.java
index 074a74a..2f45488 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestSegmentReader.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestSegmentReader.java
@@ -43,7 +43,7 @@ public class TestSegmentReader extends LuceneTestCase {
     dir = newDirectory();
     DocHelper.setupDoc(testDoc);
     SegmentCommitInfo info = DocHelper.writeDoc(random(), dir, testDoc);
-    reader = new SegmentReader(info, Version.LATEST.major, IOContext.READ);
+    reader = new SegmentReader(info, Version.LATEST.major, false, IOContext.READ);
   }
   
   @Override
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestSegmentTermDocs.java b/lucene/core/src/test/org/apache/lucene/index/TestSegmentTermDocs.java
index 279e3ac..99853f7 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestSegmentTermDocs.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestSegmentTermDocs.java
@@ -54,7 +54,7 @@ public class TestSegmentTermDocs extends LuceneTestCase {
 
   public void testTermDocs() throws IOException {
     //After adding the document, we should be able to read it back in
-    SegmentReader reader = new SegmentReader(info, Version.LATEST.major, newIOContext(random()));
+    SegmentReader reader = new SegmentReader(info, Version.LATEST.major, false, newIOContext(random()));
     assertTrue(reader != null);
 
     TermsEnum terms = reader.terms(DocHelper.TEXT_FIELD_2_KEY).iterator();
@@ -72,7 +72,7 @@ public class TestSegmentTermDocs extends LuceneTestCase {
   public void testBadSeek() throws IOException {
     {
       //After adding the document, we should be able to read it back in
-      SegmentReader reader = new SegmentReader(info, Version.LATEST.major, newIOContext(random()));
+      SegmentReader reader = new SegmentReader(info, Version.LATEST.major, false, newIOContext(random()));
       assertTrue(reader != null);
       PostingsEnum termDocs = TestUtil.docs(random(), reader,
           "textField2",
@@ -85,7 +85,7 @@ public class TestSegmentTermDocs extends LuceneTestCase {
     }
     {
       //After adding the document, we should be able to read it back in
-      SegmentReader reader = new SegmentReader(info, Version.LATEST.major, newIOContext(random()));
+      SegmentReader reader = new SegmentReader(info, Version.LATEST.major, false, newIOContext(random()));
       assertTrue(reader != null);
       PostingsEnum termDocs = TestUtil.docs(random(), reader,
           "junk",
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java
index 5eecc77..395b920 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java
@@ -355,7 +355,7 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
                                                          segmentInfo, fieldInfos,
                                                          null, new IOContext(new FlushInfo(1, 20)));
     
-    SegmentReadState readState = new SegmentReadState(dir, segmentInfo, fieldInfos, IOContext.READ);
+    SegmentReadState readState = new SegmentReadState(dir, segmentInfo, fieldInfos, false, IOContext.READ);
 
     // PostingsFormat
     NormsProducer fakeNorms = new NormsProducer() {
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/RandomPostingsTester.java b/lucene/test-framework/src/java/org/apache/lucene/index/RandomPostingsTester.java
index d5cd53c..324dab9 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/RandomPostingsTester.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/RandomPostingsTester.java
@@ -748,7 +748,7 @@ public class RandomPostingsTester {
 
     currentFieldInfos = newFieldInfos;
 
-    SegmentReadState readState = new SegmentReadState(dir, segmentInfo, newFieldInfos, IOContext.READ);
+    SegmentReadState readState = new SegmentReadState(dir, segmentInfo, newFieldInfos, false, IOContext.READ);
 
     return codec.postingsFormat().fieldsProducer(readState);
   }