You are viewing a plain text version of this content. The canonical link for it is here.
Posted to java-commits@lucene.apache.org by mi...@apache.org on 2009/07/10 19:08:19 UTC

svn commit: r793039 - in /lucene/java/trunk: ./ src/java/org/apache/lucene/index/ src/test/org/apache/lucene/index/

Author: mikemccand
Date: Fri Jul 10 17:08:19 2009
New Revision: 793039

URL: http://svn.apache.org/viewvc?rev=793039&view=rev
Log:
LUCENE-1726: move unchanged readers to private CoreReaders class that's shared across cloned SegmentReaders

Added:
    lucene/java/trunk/src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java   (with props)
Modified:
    lucene/java/trunk/common-build.xml
    lucene/java/trunk/src/java/org/apache/lucene/index/IndexWriter.java
    lucene/java/trunk/src/java/org/apache/lucene/index/SegmentMerger.java
    lucene/java/trunk/src/java/org/apache/lucene/index/SegmentReader.java
    lucene/java/trunk/src/java/org/apache/lucene/index/SegmentTermDocs.java
    lucene/java/trunk/src/java/org/apache/lucene/index/SegmentTermPositions.java
    lucene/java/trunk/src/test/org/apache/lucene/index/TestDocumentWriter.java
    lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexWriterReader.java
    lucene/java/trunk/src/test/org/apache/lucene/index/TestLazyProxSkipping.java

Modified: lucene/java/trunk/common-build.xml
URL: http://svn.apache.org/viewvc/lucene/java/trunk/common-build.xml?rev=793039&r1=793038&r2=793039&view=diff
==============================================================================
--- lucene/java/trunk/common-build.xml (original)
+++ lucene/java/trunk/common-build.xml Fri Jul 10 17:08:19 2009
@@ -42,7 +42,7 @@
   <property name="Name" value="Lucene"/>
   <property name="dev.version" value="2.9-dev"/>
   <property name="version" value="${dev.version}"/>
-  <property name="compatibility.tag" value="lucene_2_4_back_compat_tests_20090704"/>
+  <property name="compatibility.tag" value="lucene_2_4_back_compat_tests_20090710"/>
   <property name="spec.version" value="${version}"/>	
   <property name="year" value="2000-${current.year}"/>
   <property name="final.name" value="lucene-${name}-${version}"/>

Modified: lucene/java/trunk/src/java/org/apache/lucene/index/IndexWriter.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/java/org/apache/lucene/index/IndexWriter.java?rev=793039&r1=793038&r2=793039&view=diff
==============================================================================
--- lucene/java/trunk/src/java/org/apache/lucene/index/IndexWriter.java (original)
+++ lucene/java/trunk/src/java/org/apache/lucene/index/IndexWriter.java Fri Jul 10 17:08:19 2009
@@ -4905,7 +4905,7 @@
         }
 
         for(int i=0;i<numSegments;i++) {
-          merge.readersClone[i].openDocStores(merge.readers[i]);
+          merge.readersClone[i].openDocStores();
         }
 
         // Clear DSS

Modified: lucene/java/trunk/src/java/org/apache/lucene/index/SegmentMerger.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/java/org/apache/lucene/index/SegmentMerger.java?rev=793039&r1=793038&r2=793039&view=diff
==============================================================================
--- lucene/java/trunk/src/java/org/apache/lucene/index/SegmentMerger.java (original)
+++ lucene/java/trunk/src/java/org/apache/lucene/index/SegmentMerger.java Fri Jul 10 17:08:19 2009
@@ -253,7 +253,7 @@
       if (reader instanceof SegmentReader) {
         SegmentReader segmentReader = (SegmentReader) reader;
         boolean same = true;
-        FieldInfos segmentFieldInfos = segmentReader.getFieldInfos();
+        FieldInfos segmentFieldInfos = segmentReader.fieldInfos();
         int numFieldInfos = segmentFieldInfos.size();
         for (int j = 0; same && j < numFieldInfos; j++) {
           same = fieldInfos.fieldName(j).equals(segmentFieldInfos.fieldName(j));
@@ -285,7 +285,7 @@
       // with the fieldInfos of the last segment in this
       // case, to keep that numbering.
       final SegmentReader sr = (SegmentReader) readers.get(readers.size()-1);
-      fieldInfos = (FieldInfos) sr.fieldInfos.clone();
+      fieldInfos = (FieldInfos) sr.core.fieldInfos.clone();
     } else {
       fieldInfos = new FieldInfos();		  // merge field names
     }
@@ -294,7 +294,7 @@
       IndexReader reader = (IndexReader) iter.next();
       if (reader instanceof SegmentReader) {
         SegmentReader segmentReader = (SegmentReader) reader;
-        FieldInfos readerFieldInfos = segmentReader.getFieldInfos();
+        FieldInfos readerFieldInfos = segmentReader.fieldInfos();
         int numReaderFieldInfos = readerFieldInfos.size();
         for (int j = 0; j < numReaderFieldInfos; j++) {
           FieldInfo fi = readerFieldInfos.fieldInfo(j);
@@ -468,7 +468,7 @@
         final SegmentReader matchingSegmentReader = matchingSegmentReaders[idx++];
         TermVectorsReader matchingVectorsReader = null;
         if (matchingSegmentReader != null) {
-          TermVectorsReader vectorsReader = matchingSegmentReader.termVectorsReaderOrig;
+          TermVectorsReader vectorsReader = matchingSegmentReader.getTermVectorsReaderOrig();
 
           // If the TV* files are an older format then they cannot read raw docs:
           if (vectorsReader != null && vectorsReader.canReadRawDocs()) {

Modified: lucene/java/trunk/src/java/org/apache/lucene/index/SegmentReader.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/java/org/apache/lucene/index/SegmentReader.java?rev=793039&r1=793038&r2=793039&view=diff
==============================================================================
--- lucene/java/trunk/src/java/org/apache/lucene/index/SegmentReader.java (original)
+++ lucene/java/trunk/src/java/org/apache/lucene/index/SegmentReader.java Fri Jul 10 17:08:19 2009
@@ -40,18 +40,12 @@
 
 /** @version $Id */
 class SegmentReader extends IndexReader implements Cloneable {
-  protected Directory directory;
   protected boolean readOnly;
 
-  private String segment;
   private SegmentInfo si;
   private int readBufferSize;
 
-  FieldInfos fieldInfos;
-  private FieldsReader fieldsReaderOrig = null;
   CloseableThreadLocal fieldsReaderLocal = new FieldsReaderLocal();
-  TermInfosReader tis;
-  TermVectorsReader termVectorsReaderOrig = null;
   CloseableThreadLocal termVectorsLocal = new CloseableThreadLocal();
 
   BitVector deletedDocs = null;
@@ -64,31 +58,189 @@
   private boolean rollbackDeletedDocsDirty = false;
   private boolean rollbackNormsDirty = false;
   private int rollbackPendingDeleteCount;
-  IndexInput freqStream;
-  IndexInput proxStream;
 
   // optionally used for the .nrm file shared by multiple norms
   private IndexInput singleNormStream;
   private Ref singleNormRef;
 
-  // Counts how many other reader share the core objects
-  // (freqStream, proxStream, tis, etc.) of this reader;
-  // when coreRef drops to 0, these core objects may be
-  // closed.  A given insance of SegmentReader may be
-  // closed, even those it shares core objects with other
-  // SegmentReaders:
-  private Ref coreRef = new Ref();
-
-  // Compound File Reader when based on a compound file segment
-  CompoundFileReader cfsReader = null;
-  CompoundFileReader storeCFSReader = null;
+  CoreReaders core;
+
+  // Holds core readers that are shared (unchanged) when
+  // SegmentReader is cloned or reopened
+  static final class CoreReaders {
+
+    // Counts how many other reader share the core objects
+    // (freqStream, proxStream, tis, etc.) of this reader;
+    // when coreRef drops to 0, these core objects may be
+    // closed.  A given insance of SegmentReader may be
+    // closed, even those it shares core objects with other
+    // SegmentReaders:
+    private final Ref ref = new Ref();
+
+    final String segment;
+    final FieldInfos fieldInfos;
+    final IndexInput freqStream;
+    final IndexInput proxStream;
+
+    final Directory dir;
+    final Directory cfsDir;
+    final int readBufferSize;
+
+    TermInfosReader tis;
+    FieldsReader fieldsReaderOrig;
+    TermVectorsReader termVectorsReaderOrig;
+    CompoundFileReader cfsReader;
+    CompoundFileReader storeCFSReader;
+
+    CoreReaders(Directory dir, SegmentInfo si, int readBufferSize) throws IOException {
+      segment = si.name;
+      this.readBufferSize = readBufferSize;
+      this.dir = dir;
+
+      boolean success = false;
+
+      try {
+        Directory dir0 = dir;
+        if (si.getUseCompoundFile()) {
+          cfsReader = new CompoundFileReader(dir, segment + "." + IndexFileNames.COMPOUND_FILE_EXTENSION, readBufferSize);
+          dir0 = cfsReader;
+        }
+        cfsDir = dir0;
+
+        fieldInfos = new FieldInfos(cfsDir, segment + "." + IndexFileNames.FIELD_INFOS_EXTENSION);
+
+        tis = new TermInfosReader(cfsDir, segment, fieldInfos, readBufferSize);
+
+        // make sure that all index files have been read or are kept open
+        // so that if an index update removes them we'll still have them
+        freqStream = cfsDir.openInput(segment + "." + IndexFileNames.FREQ_EXTENSION, readBufferSize);
+
+        if (fieldInfos.hasProx()) {
+          proxStream = cfsDir.openInput(segment + "." + IndexFileNames.PROX_EXTENSION, readBufferSize);
+        } else {
+          proxStream = null;
+        }
+        success = true;
+      } finally {
+        if (!success) {
+          decRef();
+        }
+      }
+    }
+
+    synchronized TermVectorsReader getTermVectorsReaderOrig() {
+      return termVectorsReaderOrig;
+    }
+
+    synchronized FieldsReader getFieldsReaderOrig() {
+      return fieldsReaderOrig;
+    }
+
+    synchronized void incRef() {
+      ref.incRef();
+    }
+
+    synchronized Directory getCFSReader() {
+      return cfsReader;
+    }
+
+    synchronized void decRef() throws IOException {
+
+      if (ref.decRef() == 0) {
+
+        // close everything, nothing is shared anymore with other readers
+        if (tis != null) {
+          tis.close();
+          // null so if an app hangs on to us we still free most ram
+          tis = null;
+        }
+        
+        if (freqStream != null) {
+          freqStream.close();
+        }
+
+        if (proxStream != null) {
+          proxStream.close();
+        }
+
+        if (termVectorsReaderOrig != null) {
+          termVectorsReaderOrig.close();
+        }
+  
+        if (fieldsReaderOrig != null) {
+          fieldsReaderOrig.close();
+        }
+  
+        if (cfsReader != null) {
+          cfsReader.close();
+        }
   
+        if (storeCFSReader != null) {
+          storeCFSReader.close();
+        }
+      }
+    }
+
+    synchronized void openDocStores(SegmentInfo si) throws IOException {
+
+      assert si.name.equals(segment);
+
+      if (fieldsReaderOrig == null) {
+        final Directory storeDir;
+        if (si.getDocStoreOffset() != -1) {
+          if (si.getDocStoreIsCompoundFile()) {
+            assert storeCFSReader == null;
+            storeCFSReader = new CompoundFileReader(dir,
+                                                    si.getDocStoreSegment() + "." + IndexFileNames.COMPOUND_FILE_STORE_EXTENSION,
+                                                    readBufferSize);
+            storeDir = storeCFSReader;
+            assert storeDir != null;
+          } else {
+            storeDir = dir;
+            assert storeDir != null;
+          }
+        } else if (si.getUseCompoundFile()) {
+          // In some cases, we were originally opened when CFS
+          // was not used, but then we are asked to open doc
+          // stores after the segment has switched to CFS
+          if (cfsReader == null) {
+            cfsReader = new CompoundFileReader(dir, segment + "." + IndexFileNames.COMPOUND_FILE_EXTENSION, readBufferSize);
+          }
+          storeDir = cfsReader;
+          assert storeDir != null;
+        } else {
+          storeDir = dir;
+          assert storeDir != null;
+        }
+
+        final String storesSegment;
+        if (si.getDocStoreOffset() != -1) {
+          storesSegment = si.getDocStoreSegment();
+        } else {
+          storesSegment = segment;
+        }
+
+        fieldsReaderOrig = new FieldsReader(storeDir, storesSegment, fieldInfos, readBufferSize,
+                                            si.getDocStoreOffset(), si.docCount);
+
+        // Verify two sources of "maxDoc" agree:
+        if (si.getDocStoreOffset() == -1 && fieldsReaderOrig.size() != si.docCount) {
+          throw new CorruptIndexException("doc counts differ for segment " + segment + ": fieldsReader shows " + fieldsReaderOrig.size() + " but segmentInfo shows " + si.docCount);
+        }
+
+        if (fieldInfos.hasVectors()) { // open term vector files only as needed
+          termVectorsReaderOrig = new TermVectorsReader(storeDir, storesSegment, fieldInfos, readBufferSize, si.getDocStoreOffset(), si.docCount);
+        }
+      }
+    }
+  }
+
   /**
    * Sets the initial value 
    */
   private class FieldsReaderLocal extends CloseableThreadLocal {
     protected Object initialValue() {
-      return (FieldsReader) fieldsReaderOrig.clone();
+      return core.getFieldsReaderOrig().clone();
     }
   }
   
@@ -421,41 +573,19 @@
     } catch (Exception e) {
       throw new RuntimeException("cannot load SegmentReader class: " + e, e);
     }
-    instance.directory = dir;
     instance.readOnly = readOnly;
-    instance.segment = si.name;
     instance.si = si;
     instance.readBufferSize = readBufferSize;
 
     boolean success = false;
 
     try {
-      // Use compound file directory for some files, if it exists
-      Directory cfsDir = instance.directory();
-      if (si.getUseCompoundFile()) {
-        instance.cfsReader = new CompoundFileReader(instance.directory(), instance.segment + "." + IndexFileNames.COMPOUND_FILE_EXTENSION, readBufferSize);
-        cfsDir = instance.cfsReader;
-      }
-
-      instance.fieldInfos = new FieldInfos(cfsDir, instance.segment + ".fnm");
-
+      instance.core = new CoreReaders(dir, si, readBufferSize);
       if (doOpenStores) {
-        instance.openDocStores();
+        instance.core.openDocStores(si);
       }
-
-      boolean anyProx = instance.fieldInfos.hasProx();
-
-      instance.tis = new TermInfosReader(cfsDir, instance.segment, instance.fieldInfos, readBufferSize);
-
       instance.loadDeletedDocs();
-
-      // make sure that all index files have been read or are kept open
-      // so that if an index update removes them we'll still have them
-      instance.freqStream = cfsDir.openInput(instance.segment + ".frq", readBufferSize);
-      if (anyProx)
-        instance.proxStream = cfsDir.openInput(instance.segment + ".prx", readBufferSize);
-      instance.openNorms(cfsDir, readBufferSize);
-
+      instance.openNorms(instance.core.cfsDir, readBufferSize);
       success = true;
     } finally {
 
@@ -471,64 +601,8 @@
     return instance;
   }
 
-  synchronized void openDocStores(SegmentReader orig) throws IOException {
-    if (fieldsReaderOrig == null) {
-      orig.openDocStores();
-      
-      fieldsReaderOrig = orig.fieldsReaderOrig;
-      termVectorsReaderOrig = orig.termVectorsReaderOrig;
-      storeCFSReader = orig.storeCFSReader;
-      cfsReader = orig.cfsReader;
-    }
-  }
-
-  synchronized void openDocStores() throws IOException {
-    if (fieldsReaderOrig == null) {
-      final Directory storeDir;
-      if (si.getDocStoreOffset() != -1) {
-        if (si.getDocStoreIsCompoundFile()) {
-          storeCFSReader = new CompoundFileReader(directory(),
-                                                  si.getDocStoreSegment() + "." + IndexFileNames.COMPOUND_FILE_STORE_EXTENSION,
-                                                  readBufferSize);
-          storeDir = storeCFSReader;
-          assert storeDir != null;
-        } else {
-          storeDir = directory();
-          assert storeDir != null;
-        }
-      } else if (si.getUseCompoundFile()) {
-        // In some cases, we were originally opened when CFS
-        // was not used, but then we are asked to open doc
-        // stores after the segment has switched to CFS
-        if (cfsReader == null) {
-          cfsReader = new CompoundFileReader(directory(), segment + "." + IndexFileNames.COMPOUND_FILE_EXTENSION, readBufferSize);
-        }
-        storeDir = cfsReader;
-        assert storeDir != null;
-      } else {
-        storeDir = directory();
-        assert storeDir != null;
-      }
-
-      final String storesSegment;
-      if (si.getDocStoreOffset() != -1) {
-        storesSegment = si.getDocStoreSegment();
-      } else {
-        storesSegment = segment;
-      }
-
-      fieldsReaderOrig = new FieldsReader(storeDir, storesSegment, fieldInfos, readBufferSize,
-                                          si.getDocStoreOffset(), si.docCount);
-
-      // Verify two sources of "maxDoc" agree:
-      if (si.getDocStoreOffset() == -1 && fieldsReaderOrig.size() != si.docCount) {
-        throw new CorruptIndexException("doc counts differ for segment " + si.name + ": fieldsReader shows " + fieldsReaderOrig.size() + " but segmentInfo shows " + si.docCount);
-      }
-
-      if (fieldInfos.hasVectors()) { // open term vector files only as needed
-        termVectorsReaderOrig = new TermVectorsReader(storeDir, storesSegment, fieldInfos, readBufferSize, si.getDocStoreOffset(), si.docCount);
-      }
-    }
+  void openDocStores() throws IOException {
+    core.openDocStores(si);
   }
 
   private void loadDeletedDocs() throws IOException {
@@ -586,8 +660,8 @@
                                   && (!si.hasDeletions() || this.si.getDelFileName().equals(si.getDelFileName()));
     boolean normsUpToDate = true;
     
-    boolean[] fieldNormsChanged = new boolean[fieldInfos.size()];
-    final int fieldCount = fieldInfos.size();
+    boolean[] fieldNormsChanged = new boolean[core.fieldInfos.size()];
+    final int fieldCount = core.fieldInfos.size();
     for (int i = 0; i < fieldCount; i++) {
       if (!this.si.getNormFileName(i).equals(si.getNormFileName(i))) {
         normsUpToDate = false;
@@ -618,23 +692,12 @@
 
     boolean success = false;
     try {
-      coreRef.incRef();
-      clone.coreRef = coreRef;
+      core.incRef();
+      clone.core = core;
       clone.readOnly = openReadOnly;
-      clone.directory = directory;
       clone.si = si;
-      clone.segment = segment;
       clone.readBufferSize = readBufferSize;
-      clone.cfsReader = cfsReader;
-      clone.storeCFSReader = storeCFSReader;
 
-      clone.fieldInfos = fieldInfos;
-      clone.tis = tis;
-      clone.freqStream = freqStream;
-      clone.proxStream = proxStream;
-      clone.termVectorsReaderOrig = termVectorsReaderOrig;
-      clone.fieldsReaderOrig = fieldsReaderOrig;
-      
       if (!openReadOnly && hasChanges) {
         // My pending changes transfer to the new reader
         clone.pendingDeleteCount = pendingDeleteCount;
@@ -670,16 +733,16 @@
 
         // Clone unchanged norms to the cloned reader
         if (doClone || !fieldNormsChanged[i]) {
-          final String curField = fieldInfos.fieldInfo(i).name;
+          final String curField = core.fieldInfos.fieldInfo(i).name;
           Norm norm = (Norm) this.norms.get(curField);
           if (norm != null)
             clone.norms.put(curField, norm.clone());
         }
       }
-      
+
       // If we are not cloning, then this will open anew
       // any norms that have changed:
-      clone.openNorms(si.getUseCompoundFile() ? cfsReader : directory(), readBufferSize);
+      clone.openNorms(si.getUseCompoundFile() ? core.getCFSReader() : directory(), readBufferSize);
 
       success = true;
     } finally {
@@ -716,7 +779,7 @@
       }
 
       if (normsDirty) {               // re-write norms
-        si.setNumFields(fieldInfos.size());
+        si.setNumFields(core.fieldInfos.size());
         Iterator it = norms.values().iterator();
         while (it.hasNext()) {
           Norm norm = (Norm) it.next();
@@ -734,7 +797,7 @@
   FieldsReader getFieldsReader() {
     return (FieldsReader) fieldsReaderLocal.get();
   }
-  
+
   protected void doClose() throws IOException {
     termVectorsLocal.close();
     fieldsReaderLocal.close();
@@ -749,32 +812,8 @@
     while (it.hasNext()) {
       ((Norm) it.next()).decRef();
     }
-
-    if (coreRef.decRef() == 0) {
-
-      // close everything, nothing is shared anymore with other readers
-      if (tis != null) {
-        tis.close();
-        // null so if an app hangs on to us we still free most ram
-        tis = null;
-      }
-      
-      if (freqStream != null)
-        freqStream.close();
-      if (proxStream != null)
-        proxStream.close();
-  
-      if (termVectorsReaderOrig != null)
-        termVectorsReaderOrig.close();
-  
-      if (fieldsReaderOrig != null)
-        fieldsReaderOrig.close();
-  
-      if (cfsReader != null)
-        cfsReader.close();
-  
-      if (storeCFSReader != null)
-        storeCFSReader.close();
+    if (core != null) {
+      core.decRef();
     }
   }
 
@@ -837,16 +876,16 @@
 
   public TermEnum terms() {
     ensureOpen();
-    return tis.terms();
+    return core.tis.terms();
   }
 
   public TermEnum terms(Term t) throws IOException {
     ensureOpen();
-    return tis.terms(t);
+    return core.tis.terms(t);
   }
 
-  FieldInfos getFieldInfos() {
-    return fieldInfos;
+  FieldInfos fieldInfos() {
+    return core.fieldInfos;
   }
 
   public Document document(int n, FieldSelector fieldSelector) throws CorruptIndexException, IOException {
@@ -878,7 +917,7 @@
 
   public int docFreq(Term t) throws IOException {
     ensureOpen();
-    TermInfo ti = tis.get(t);
+    TermInfo ti = core.tis.get(t);
     if (ti != null)
       return ti.docFreq;
     else
@@ -899,11 +938,11 @@
   }
 
   public void setTermInfosIndexDivisor(int indexDivisor) throws IllegalStateException {
-    tis.setIndexDivisor(indexDivisor);
+    core.tis.setIndexDivisor(indexDivisor);
   }
 
   public int getTermInfosIndexDivisor() {
-    return tis.getIndexDivisor();
+    return core.tis.getIndexDivisor();
   }
 
   /**
@@ -913,8 +952,8 @@
     ensureOpen();
 
     Set fieldSet = new HashSet();
-    for (int i = 0; i < fieldInfos.size(); i++) {
-      FieldInfo fi = fieldInfos.fieldInfo(i);
+    for (int i = 0; i < core.fieldInfos.size(); i++) {
+      FieldInfo fi = core.fieldInfos.fieldInfo(i);
       if (fieldOption == IndexReader.FieldOption.ALL) {
         fieldSet.add(fi.name);
       }
@@ -1018,8 +1057,8 @@
   private void openNorms(Directory cfsDir, int readBufferSize) throws IOException {
     long nextNormSeek = SegmentMerger.NORMS_HEADER.length; //skip header (header unused for now)
     int maxDoc = maxDoc();
-    for (int i = 0; i < fieldInfos.size(); i++) {
-      FieldInfo fi = fieldInfos.fieldInfo(i);
+    for (int i = 0; i < core.fieldInfos.size(); i++) {
+      FieldInfo fi = core.fieldInfos.fieldInfo(i);
       if (norms.containsKey(fi.name)) {
         // in case this SegmentReader is being re-opened, we might be able to
         // reuse some norm instances and skip loading them here
@@ -1085,19 +1124,27 @@
    * Create a clone from the initial TermVectorsReader and store it in the ThreadLocal.
    * @return TermVectorsReader
    */
-  private TermVectorsReader getTermVectorsReader() {
-    assert termVectorsReaderOrig != null;
-    TermVectorsReader tvReader = (TermVectorsReader)termVectorsLocal.get();
+  TermVectorsReader getTermVectorsReader() {
+    TermVectorsReader tvReader = (TermVectorsReader) termVectorsLocal.get();
     if (tvReader == null) {
-      try {
-        tvReader = (TermVectorsReader)termVectorsReaderOrig.clone();
-      } catch (CloneNotSupportedException cnse) {
+      TermVectorsReader orig = core.getTermVectorsReaderOrig();
+      if (orig == null) {
         return null;
+      } else {
+        try {
+          tvReader = (TermVectorsReader) orig.clone();
+        } catch (CloneNotSupportedException cnse) {
+          return null;
+        }
       }
       termVectorsLocal.set(tvReader);
     }
     return tvReader;
   }
+
+  TermVectorsReader getTermVectorsReaderOrig() {
+    return core.getTermVectorsReaderOrig();
+  }
   
   /** Return a term frequency vector for the specified document and field. The
    *  vector returned contains term numbers and frequencies for all terms in
@@ -1108,8 +1155,8 @@
   public TermFreqVector getTermFreqVector(int docNumber, String field) throws IOException {
     // Check if this field is invalid or has no stored term vector
     ensureOpen();
-    FieldInfo fi = fieldInfos.fieldInfo(field);
-    if (fi == null || !fi.storeTermVector || termVectorsReaderOrig == null) 
+    FieldInfo fi = core.fieldInfos.fieldInfo(field);
+    if (fi == null || !fi.storeTermVector) 
       return null;
     
     TermVectorsReader termVectorsReader = getTermVectorsReader();
@@ -1122,13 +1169,12 @@
 
   public void getTermFreqVector(int docNumber, String field, TermVectorMapper mapper) throws IOException {
     ensureOpen();
-    FieldInfo fi = fieldInfos.fieldInfo(field);
-    if (fi == null || !fi.storeTermVector || termVectorsReaderOrig == null)
+    FieldInfo fi = core.fieldInfos.fieldInfo(field);
+    if (fi == null || !fi.storeTermVector)
       return;
 
     TermVectorsReader termVectorsReader = getTermVectorsReader();
-    if (termVectorsReader == null)
-    {
+    if (termVectorsReader == null) {
       return;
     }
 
@@ -1139,8 +1185,6 @@
 
   public void getTermFreqVector(int docNumber, TermVectorMapper mapper) throws IOException {
     ensureOpen();
-    if (termVectorsReaderOrig == null)
-      return;
 
     TermVectorsReader termVectorsReader = getTermVectorsReader();
     if (termVectorsReader == null)
@@ -1158,8 +1202,6 @@
    */
   public TermFreqVector[] getTermFreqVectors(int docNumber) throws IOException {
     ensureOpen();
-    if (termVectorsReaderOrig == null)
-      return null;
     
     TermVectorsReader termVectorsReader = getTermVectorsReader();
     if (termVectorsReader == null)
@@ -1168,16 +1210,11 @@
     return termVectorsReader.get(docNumber);
   }
   
-  /** Returns the field infos of this segment */
-  FieldInfos fieldInfos() {
-    return fieldInfos;
-  }
-  
   /**
    * Return the name of the segment this reader is reading.
    */
   public String getSegmentName() {
-    return segment;
+    return core.segment;
   }
   
   /**
@@ -1220,18 +1257,18 @@
     // Don't ensureOpen here -- in certain cases, when a
     // cloned/reopened reader needs to commit, it may call
     // this method on the closed original reader
-    return directory;
+    return core.dir;
   }
 
   // This is necessary so that cloned SegmentReaders (which
   // share the underlying postings data) will map to the
   // same entry in the FieldCache.  See LUCENE-1579.
   public final Object getFieldCacheKey() {
-    return freqStream;
+    return core.freqStream;
   }
 
   public long getUniqueTermCount() {
-    return tis.size();
+    return core.tis.size();
   }
 
   /**
@@ -1257,4 +1294,5 @@
 
     throw new IllegalArgumentException(reader + " is not a SegmentReader or a single-segment DirectoryReader");
   }
+
 }

Modified: lucene/java/trunk/src/java/org/apache/lucene/index/SegmentTermDocs.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/java/org/apache/lucene/index/SegmentTermDocs.java?rev=793039&r1=793038&r2=793039&view=diff
==============================================================================
--- lucene/java/trunk/src/java/org/apache/lucene/index/SegmentTermDocs.java (original)
+++ lucene/java/trunk/src/java/org/apache/lucene/index/SegmentTermDocs.java Fri Jul 10 17:08:19 2009
@@ -45,16 +45,16 @@
   
   protected SegmentTermDocs(SegmentReader parent) {
     this.parent = parent;
-    this.freqStream = (IndexInput) parent.freqStream.clone();
+    this.freqStream = (IndexInput) parent.core.freqStream.clone();
     synchronized (parent) {
       this.deletedDocs = parent.deletedDocs;
     }
-    this.skipInterval = parent.tis.getSkipInterval();
-    this.maxSkipLevels = parent.tis.getMaxSkipLevels();
+    this.skipInterval = parent.core.tis.getSkipInterval();
+    this.maxSkipLevels = parent.core.tis.getMaxSkipLevels();
   }
 
   public void seek(Term term) throws IOException {
-    TermInfo ti = parent.tis.get(term);
+    TermInfo ti = parent.core.tis.get(term);
     seek(ti, term);
   }
 
@@ -63,13 +63,13 @@
     Term term;
     
     // use comparison of fieldinfos to verify that termEnum belongs to the same segment as this SegmentTermDocs
-    if (termEnum instanceof SegmentTermEnum && ((SegmentTermEnum) termEnum).fieldInfos == parent.fieldInfos) {        // optimized case
+    if (termEnum instanceof SegmentTermEnum && ((SegmentTermEnum) termEnum).fieldInfos == parent.core.fieldInfos) {        // optimized case
       SegmentTermEnum segmentTermEnum = ((SegmentTermEnum) termEnum);
       term = segmentTermEnum.term();
       ti = segmentTermEnum.termInfo();
     } else  {                                         // punt case
       term = termEnum.term();
-      ti = parent.tis.get(term);        
+      ti = parent.core.tis.get(term);
     }
     
     seek(ti, term);
@@ -77,7 +77,7 @@
 
   void seek(TermInfo ti, Term term) throws IOException {
     count = 0;
-    FieldInfo fi = parent.fieldInfos.fieldInfo(term.field);
+    FieldInfo fi = parent.core.fieldInfos.fieldInfo(term.field);
     currentFieldOmitTermFreqAndPositions = (fi != null) ? fi.omitTermFreqAndPositions : false;
     currentFieldStoresPayloads = (fi != null) ? fi.storePayloads : false;
     if (ti == null) {

Modified: lucene/java/trunk/src/java/org/apache/lucene/index/SegmentTermPositions.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/java/org/apache/lucene/index/SegmentTermPositions.java?rev=793039&r1=793038&r2=793039&view=diff
==============================================================================
--- lucene/java/trunk/src/java/org/apache/lucene/index/SegmentTermPositions.java (original)
+++ lucene/java/trunk/src/java/org/apache/lucene/index/SegmentTermPositions.java Fri Jul 10 17:08:19 2009
@@ -146,7 +146,7 @@
   private void lazySkip() throws IOException {
     if (proxStream == null) {
       // clone lazily
-      proxStream = (IndexInput)parent.proxStream.clone();
+      proxStream = (IndexInput) parent.core.proxStream.clone();
     }
     
     // we might have to skip the current payload

Modified: lucene/java/trunk/src/test/org/apache/lucene/index/TestDocumentWriter.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/index/TestDocumentWriter.java?rev=793039&r1=793038&r2=793039&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/index/TestDocumentWriter.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/index/TestDocumentWriter.java Fri Jul 10 17:08:19 2009
@@ -97,8 +97,8 @@
 
     // test that the norms are not present in the segment if
     // omitNorms is true
-    for (int i = 0; i < reader.fieldInfos.size(); i++) {
-      FieldInfo fi = reader.fieldInfos.fieldInfo(i);
+    for (int i = 0; i < reader.core.fieldInfos.size(); i++) {
+      FieldInfo fi = reader.core.fieldInfos.fieldInfo(i);
       if (fi.isIndexed) {
         assertTrue(fi.omitNorms == !reader.hasNorms(fi.name));
       }

Modified: lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexWriterReader.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexWriterReader.java?rev=793039&r1=793038&r2=793039&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexWriterReader.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexWriterReader.java Fri Jul 10 17:08:19 2009
@@ -42,7 +42,7 @@
 public class TestIndexWriterReader extends LuceneTestCase {
   static PrintStream infoStream;
 
-  private static class HeavyAtomicInt {
+  public static class HeavyAtomicInt {
     private int value;
     public HeavyAtomicInt(int start) {
       value = start;

Modified: lucene/java/trunk/src/test/org/apache/lucene/index/TestLazyProxSkipping.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/index/TestLazyProxSkipping.java?rev=793039&r1=793038&r2=793039&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/index/TestLazyProxSkipping.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/index/TestLazyProxSkipping.java Fri Jul 10 17:08:19 2009
@@ -43,12 +43,24 @@
     private String term1 = "xx";
     private String term2 = "yy";
     private String term3 = "zz";
+
+    private class SeekCountingDirectory extends RAMDirectory {
+      public IndexInput openInput(String name) throws IOException {
+        IndexInput ii = super.openInput(name);
+        if (name.endsWith(".prx")) {
+          // we decorate the proxStream with a wrapper class that allows to count the number of calls of seek()
+          ii = new SeeksCountingStream(ii);
+        }
+        return ii;
+      }
+    }
     
     private void createIndex(int numHits) throws IOException {
         int numDocs = 500;
         
-        Directory directory = new RAMDirectory();
+        Directory directory = new SeekCountingDirectory();
         IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+        writer.setUseCompoundFile(false);
         writer.setMaxBufferedDocs(10);
         for (int i = 0; i < numDocs; i++) {
             Document doc = new Document();
@@ -74,9 +86,6 @@
         
         SegmentReader reader = SegmentReader.getOnlySegmentReader(directory);
 
-        // we decorate the proxStream with a wrapper class that allows to count the number of calls of seek()
-        reader.proxStream = new SeeksCountingStream(reader.proxStream);
-        
         this.searcher = new IndexSearcher(reader);        
     }
     
@@ -96,6 +105,7 @@
         assertEquals(numHits, hits.length);
         
         // check if the number of calls of seek() does not exceed the number of hits
+        assertTrue(this.seeksCounter > 0);
         assertTrue(this.seeksCounter <= numHits + 1);
     }
     

Added: lucene/java/trunk/src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java?rev=793039&view=auto
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java (added)
+++ lucene/java/trunk/src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java Fri Jul 10 17:08:19 2009
@@ -0,0 +1,111 @@
+package org.apache.lucene.index;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.util.Random;
+
+import org.apache.lucene.analysis.WhitespaceAnalyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.index.TestIndexWriterReader.HeavyAtomicInt;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.MockRAMDirectory;
+import org.apache.lucene.util.LuceneTestCase;
+
+public class TestNRTReaderWithThreads extends LuceneTestCase {
+  Random random = new Random();
+  HeavyAtomicInt seq = new HeavyAtomicInt(1);
+
+  public void testIndexing() throws Exception {
+    Directory mainDir = new MockRAMDirectory();
+    IndexWriter writer = new IndexWriter(mainDir, new WhitespaceAnalyzer(),
+        IndexWriter.MaxFieldLength.LIMITED);
+    writer.setUseCompoundFile(false);
+    IndexReader reader = writer.getReader(); // start pooling readers
+    reader.close();
+    writer.setMergeFactor(2);
+    writer.setMaxBufferedDocs(10);
+    RunThread[] indexThreads = new RunThread[4];
+    for (int x=0; x < indexThreads.length; x++) {
+      indexThreads[x] = new RunThread(x % 2, writer);
+      indexThreads[x].setName("Thread " + x);
+      indexThreads[x].start();
+    }    
+    long startTime = System.currentTimeMillis();
+    long duration = 5*1000;
+    while ((System.currentTimeMillis() - startTime) < duration) {
+      Thread.sleep(100);
+    }
+    int delCount = 0;
+    int addCount = 0;
+    for (int x=0; x < indexThreads.length; x++) {
+      indexThreads[x].run = false;
+      assertTrue(indexThreads[x].ex == null);
+      addCount += indexThreads[x].addCount;
+      delCount += indexThreads[x].delCount;
+    }
+    for (int x=0; x < indexThreads.length; x++) {
+      indexThreads[x].join();
+    }
+    //System.out.println("addCount:"+addCount);
+    //System.out.println("delCount:"+delCount);
+    writer.close();
+    mainDir.close();
+  }
+
+  public class RunThread extends Thread {
+    IndexWriter writer;
+    boolean run = true;
+    Throwable ex;
+    int delCount = 0;
+    int addCount = 0;
+    int type;
+
+    public RunThread(int type, IndexWriter writer) {
+      this.type = type;
+      this.writer = writer;
+    }
+
+    public void run() {
+      try {
+        while (run) {
+          //int n = random.nextInt(2);
+          if (type == 0) {
+            int i = seq.addAndGet(1);
+            Document doc = TestIndexWriterReader.createDocument(i, "index1", 10);
+            writer.addDocument(doc);
+            addCount++;
+          } else if (type == 1) {
+            // we may or may not delete because the term may not exist,
+            // however we're opening and closing the reader rapidly
+            IndexReader reader = writer.getReader();
+            int id = random.nextInt(seq.intValue());
+            Term term = new Term("id", Integer.toString(id));
+            int count = TestIndexWriterReader.count(term, reader);
+            writer.deleteDocuments(term);
+            reader.close();
+            delCount += count;
+          }
+        }
+      } catch (Throwable ex) {
+        ex.printStackTrace(System.out);
+        this.ex = ex;
+        run = false;
+      }
+    }
+  }
+}

Propchange: lucene/java/trunk/src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java
------------------------------------------------------------------------------
    svn:eol-style = native