You are viewing a plain text version of this content. The canonical link for it is here.
Posted to java-user@lucene.apache.org by Wojtek212 <wo...@gmail.com> on 2008/08/01 02:51:39 UTC

FileNotFoundException during indexing

Hi,
I'm sometimes receiving FileNotFoundExceptions during indexing.

java.io.FileNotFoundException: /tmp/content/3615.0-3618.0/_3p.fnm (No such
file or directory)
	at
com.test.vcssearch.DefaultServiceIndexer$2.run(DefaultServiceIndexer.java:245)
	at java.lang.Thread.run(Thread.java:595)
Caused by: com.test.search.IndexingException: java.io.FileNotFoundException:
/tmp/content/3615.0-3618.0/_3p.fnm (No such file or directory)
	at
com.test.search.impl.lucene.IndexManager.removeDocuments(IndexManager.java:293)
	at
com.test.search.impl.lucene.IndexManager.removeDocuments(IndexManager.java:199)
	at com.test.search.impl.lucene.IndexManager.reindex(IndexManager.java:250)
	at com.testsearch.impl.lucene.IndexManager.reindex(IndexManager.java:301)
	at
com.test.vcssearch.DefaultServiceIndexer$2.run(DefaultServiceIndexer.java:239)
	... 1 more
Caused by: java.io.FileNotFoundException: /tmp/content/3615.0-3618.0/_3p.fnm
(No such file or directory)
	at java.io.RandomAccessFile.open(Native Method)
	at java.io.RandomAccessFile.<init>(RandomAccessFile.java:212)
	at
org.apache.lucene.store.FSIndexInput$Descriptor.<init>(FSDirectory.java:497)
	at org.apache.lucene.store.FSIndexInput.<init>(FSDirectory.java:522)
	at org.apache.lucene.store.FSDirectory.openInput(FSDirectory.java:434)
	at
org.apache.lucene.index.CompoundFileWriter.copyFile(CompoundFileWriter.java:204)
	at
org.apache.lucene.index.CompoundFileWriter.close(CompoundFileWriter.java:169)
	at
org.apache.lucene.index.SegmentMerger.createCompoundFile(SegmentMerger.java:153)
	at org.apache.lucene.index.IndexWriter.mergeSegments(IndexWriter.java:1601)
	at org.apache.lucene.index.IndexWriter.optimize(IndexWriter.java:900)
	at
com.test.search.impl.lucene.IndexManager.removeDocuments(IndexManager.java:282)

I have a class with few methods of reindexing, adding and deleting.
Everything is synchronized. Here is the example:

public class IndexManager {

   private final Object lock = new Object();

   private final Analyzer analyzer;

   private final FileProxy index;

   IndexManager(FileProxy index,
                      boolean create) throws IndexingException {
       this.analyzer = new TestAnalyzer();
       this.index = index;
       if (!create && !index.exists()) {
           create = true;
       }

       IndexWriter writer = null;
       try {
           // this checks the directory is unlocked
           writer = createIndexWriter(create);
       } catch (IOException e) {
           throw new IndexingException(e);
       } finally {
           if (writer != null) {
               try {
                   writer.close();
               } catch (IOException e) {
                   LOGGER.error("Cannot close index writer", e);
               }
           }
       }
   }

   public void reindex(Document[] documents) throws IndexingException {
       synchronized (lock) {
           removeDocuments(documents);
           String[] ids = new String[documents.length];
           for (int i = 0; i < documents.length; i++) {
               ids[i] = documents[i].getDocumentID();
           }
           addDocuments(documents);
       }
   }

   private IndexWriter createIndexWriter() throws IOException {
       return createIndexWriter(false);
   }

   private IndexWriter createIndexWriter(boolean create) throws IOException
{

       Directory directory = FileProxyDirectory.getDirectory(index, create);
       if (IndexReader.isLocked(directory)) {
           IndexReader.unlock(directory);
       }
       return new IndexWriter(directory,analyzer,create);
   }

   public void addDocuments(Document[] documents)
           throws IndexingException {
       synchronized (lock) {
           IndexWriter indexWriter = null;
           try {
               try {
                   indexWriter =  createIndexWriter();
                   for (int i = 0; i < documents.length; i++) {
                       // add a document ID for future management
                       if (documents[i].getDocumentID() == null) {
                           String msg = EXCEPTION_LOCALIZER.format(
                                   "document-id-not-set");
                           LOGGER.error(msg);
                           throw new IndexingException(msg);
                       }
                       LuceneDocument luceneDoc = (LuceneDocument)
documents[i];
                      
indexWriter.addDocument(luceneDoc.getBackingDocument());
                   }
               } finally {
                   if (indexWriter != null) {
                       try {
                           indexWriter.close();
                       } catch (IOException e) {
                           LOGGER.error("Cannot close index writer", e);
                       }
                   }
               }
           } catch (IOException e) {
               throw new IndexingException(e);
           }
       }
   }

   public boolean[] removeDocuments(String[] documentIDs) throws
IndexingException {
       boolean[] results = new boolean[documentIDs.length];

       // Batching the removal of a group of documents is more efficient due
       // to the requirement of closing the reader
       synchronized (lock) {
           try {
               IndexWriter indexWriter = null;
               try {
                   indexWriter = createIndexWriter();
                   for (int i = 0; i < documentIDs.length; i++) {
                       Term term = new Term(SearchConstants.DOCUMENT_ID,
                               documentIDs[i]);

                       indexWriter.deleteDocuments(term);
                       results[i] = true;
                   }
                   indexWriter.optimize();
               } finally {
                   if (indexWriter != null) {
                       try {
                           indexWriter.close();
                       } catch (IOException e) {
                           LOGGER.error("Cannot close index writer", e);
                       }
                   }
               }
           } catch (IOException e) {
               throw new IndexingException(e);
           }
       }
       return results;
   }

}

This class is used by many threads. Some of them can add
documents, some can delete and reindex. After any operation IndexWriter is
closed.
I'm using lucene 2.1.0 but even after upgrade to 2.3.2 there is still the
exception.
I don't use searching during indexing but the exception occurs. Does anybody
have idea what can be a reason?
-- 
View this message in context: http://www.nabble.com/FileNotFoundException-during-indexing-tp18766343p18766343.html
Sent from the Lucene - Java Users mailing list archive at Nabble.com.


---------------------------------------------------------------------
To unsubscribe, e-mail: java-user-unsubscribe@lucene.apache.org
For additional commands, e-mail: java-user-help@lucene.apache.org


Re: FileNotFoundException during indexing

Posted by Michael McCandless <lu...@mikemccandless.com>.
Wojtek212 wrote:

> You were right I had 2 IndexWriters. I've checked again and it  
> turned out I
> had 2 IndexManagers loaded by  2 different classloaders, so even if  
> stored
> it in static Map, it didn't help.

Phew!  That's tricky (two different classloaders).  Good sleuthing

> Anyway thanks for help.

You're welcome!

> But I have last question. Is it correct if I use
> IndexSearcher during wrking IndexWriter? Or should these operations be
> synchronized?

It's fine to use these two at once.  IndexSearcher will search the  
point-in-time snapshot as of when it was opened, so it will see none  
of the changes done by IndexWriter until you reopen it.

Mike

---------------------------------------------------------------------
To unsubscribe, e-mail: java-user-unsubscribe@lucene.apache.org
For additional commands, e-mail: java-user-help@lucene.apache.org


Re: FileNotFoundException during indexing

Posted by Wojtek212 <wo...@gmail.com>.
Hi Mike,
You were right I had 2 IndexWriters. I've checked again and it turned out I
had 2 IndexManagers loaded by  2 different classloaders, so even if stored
it in static Map, it didn't help.

Anyway thanks for help. But I have last question. Is it correct if I use
IndexSearcher during wrking IndexWriter? Or should these operations be
synchronized?
-- 
View this message in context: http://www.nabble.com/FileNotFoundException-during-indexing-tp18766343p18796641.html
Sent from the Lucene - Java Users mailing list archive at Nabble.com.


---------------------------------------------------------------------
To unsubscribe, e-mail: java-user-unsubscribe@lucene.apache.org
For additional commands, e-mail: java-user-help@lucene.apache.org


Re: FileNotFoundException during indexing

Posted by Michael McCandless <lu...@mikemccandless.com>.
 From this log I can see you do in fact have two IndexWriters open at  
the same time (see how IW 6 and IW 42 have intermingled log lines  
right before the exception).

Are you sure you're not still unlocking the index?  Without unlocking  
the index, and if you're using either Simple or NativeFSLockFactory  
then the 2nd IndexWriter should have hit LockObtainFailedException.

Are you certain only one instance of your LuceneIndexManager class  
exists at once?

Mike

On Aug 1, 2008, at 10:17 AM, Wojtek212 wrote:

>
> Here is Lucene log with first exceptions that occured (FSDirectory  
> with
> NativeFSLockFactory).
>
> IFD [Thread-79]: setInfoStream
> deletionPolicy 
> =org.apache.lucene.index.KeepOnlyLastCommitDeletionPolicy@1a00d25
> IW 4 [Thread-79]: setInfoStream:
> dir=org.apache.lucene.store.FSDirectory@/tmp/content/3615.0-3618.0
> autoCommit=true
> mergePolicy=org.apache.lucene.index.LogByteSizeMergePolicy@bf033a
> mergeScheduler=org.apache.lucene.index.ConcurrentMergeScheduler@6cf504
> ramBufferSizeMB=16.0 maxBuffereDocs=-1 maxBuffereDeleteTerms=-1
> maxFieldLength=10000 index=_1:c28 _2:c1
> IW 4 [Thread-79]: optimize: index now _1:c28 _2:c1
> IW 4 [Thread-79]:   flush: segment=null docStoreSegment=null
> docStoreOffset=0 flushDocs=false flushDeletes=true  
> flushDocStores=false
> numDocs=0 numBufDelTerms=1
> IW 4 [Thread-79]:   index before flush _1:c28 _2:c1
> IW 4 [Thread-79]: flush 1 buffered deleted terms and 0 deleted  
> docIDs on 2
> segments.
> IW 4 [Thread-79]: checkpoint: wrote segments file "segments_9"
> IFD [Thread-79]: now checkpoint "segments_9" [2 segments ; isCommit  
> = true]
> IFD [Thread-79]: deleteCommits: now remove commit "segments_8"
> IFD [Thread-79]: delete "segments_8"
> IW 4 [Thread-79]: LMP: findMerges: 2 segments
> IW 4 [Thread-79]: LMP:   level -1.0 to 4.1957335: 2 segments
> IW 4 [Thread-79]: CMS: now merge
> IW 4 [Thread-79]: CMS:   index: _1:c28 _2:c1
> IW 4 [Thread-79]: CMS:   no more merges pending; now return
> IW 4 [Thread-79]: add merge to pendingMerges: _1:c28 _2:c1  
> [optimize] [total
> 1 pending]
> IW 4 [Thread-79]: CMS: now merge
> IW 4 [Thread-79]: CMS:   index: _1:c28 _2:c1
> IW 4 [Thread-79]: CMS:   consider merge _1:c28 _2:c1 into _3  
> [optimize]
> IW 2 [Thread-80]: CMS:   merge thread: done
> IW 4 [Thread-79]: CMS:     launch new thread [Thread-82]
> IW 4 [Thread-79]: CMS:   no more merges pending; now return
> IW 4 [Thread-82]: CMS:   merge thread: start
> IW 4 [Thread-82]: now merge
>  merge=_1:c28 _2:c1 into _3 [optimize]
>  index=_1:c28 _2:c1
> IW 4 [Thread-82]: merging _1:c28 _2:c1 into _3 [optimize]
> IW 4 [Thread-82]: merge: total 28 docs
> IW 4 [Thread-82]: commitMerge: _1:c28 _2:c1 into _3 [optimize]
> IW 4 [Thread-82]: commitMerge _1:c28 _2:c1 into _3 [optimize]
> IW 4 [Thread-82]: checkpoint: wrote segments file "segments_a"
> IFD [Thread-82]: now checkpoint "segments_a" [1 segments ; isCommit  
> = true]
> IFD [Thread-82]: deleteCommits: now remove commit "segments_9"
> IFD [Thread-82]: delete "segments_9"
> IFD [Thread-82]: delete "_1.cfs"
> IFD [Thread-82]: delete "_1_1.del"
> IFD [Thread-82]: delete "_2.cfs"
> IW 4 [Thread-82]: checkpoint: wrote segments file "segments_b"
> IFD [Thread-82]: now checkpoint "segments_b" [1 segments ; isCommit  
> = true]
> IFD [Thread-82]: deleteCommits: now remove commit "segments_a"
> IFD [Thread-82]: delete "segments_a"
> IFD [Thread-82]: delete "_3.fnm"
> IFD [Thread-82]: delete "_3.frq"
> IFD [Thread-82]: delete "_3.prx"
> IFD [Thread-82]: delete "_3.tis"
> IFD [Thread-82]: delete "_3.tii"
> IFD [Thread-82]: delete "_3.nrm"
> IFD [Thread-82]: delete "_3.fdx"
> IFD [Thread-82]: delete "_3.fdt"
> IW 4 [Thread-82]: CMS:   merge thread: done
> IW 4 [Thread-79]: now flush at close
> IW 4 [Thread-79]:   flush: segment=null docStoreSegment=null
> docStoreOffset=0 flushDocs=false flushDeletes=false  
> flushDocStores=false
> numDocs=0 numBufDelTerms=0
> IW 4 [Thread-79]:   index before flush _3:c28
> IW 4 [Thread-79]: CMS: now merge
> IW 4 [Thread-79]: CMS:   index: _3:c28
> IW 4 [Thread-79]: CMS:   no more merges pending; now return
> IW 4 [Thread-79]: at close: _3:c28
> IFD [Thread-79]: setInfoStream
> deletionPolicy 
> =org.apache.lucene.index.KeepOnlyLastCommitDeletionPolicy@dd0e8a
> IW 5 [Thread-79]: setInfoStream:
> dir=org.apache.lucene.store.FSDirectory@/tmp/content/3615.0-3618.0
> autoCommit=true
> mergePolicy=org.apache.lucene.index.LogByteSizeMergePolicy@e5a644
> mergeScheduler 
> =org.apache.lucene.index.ConcurrentMergeScheduler@106cc42
> ramBufferSizeMB=16.0 maxBuffereDocs=-1 maxBuffereDeleteTerms=-1
> maxFieldLength=10000 index=_3:c28
> IW 5 [Thread-79]: now flush at close
> IW 5 [Thread-79]:   flush: segment=_4 docStoreSegment=_4  
> docStoreOffset=0
> flushDocs=true flushDeletes=false flushDocStores=true numDocs=1
> numBufDelTerms=0
> IW 5 [Thread-79]:   index before flush _3:c28
>
> flush postings as segment _4 numDocs=1
>
> closeDocStore: 2 files to flush to segment _4 numDocs=1
>  oldRAMSize=81920 newFlushedSize=847 docs/MB=1,237.988 new/old=1.034%
> IW 5 [Thread-79]: checkpoint: wrote segments file "segments_c"
> IFD [Thread-79]: now checkpoint "segments_c" [2 segments ; isCommit  
> = true]
> IFD [Thread-79]: deleteCommits: now remove commit "segments_b"
> IFD [Thread-79]: delete "segments_b"
> IW 5 [Thread-79]: checkpoint: wrote segments file "segments_d"
> IFD [Thread-79]: now checkpoint "segments_d" [2 segments ; isCommit  
> = true]
> IFD [Thread-79]: deleteCommits: now remove commit "segments_c"
> IFD [Thread-79]: delete "segments_c"
> IFD [Thread-79]: delete "_4.fnm"
> IFD [Thread-79]: delete "_4.frq"
> IFD [Thread-79]: delete "_4.prx"
> IFD [Thread-79]: delete "_4.tis"
> IFD [Thread-79]: delete "_4.tii"
> IFD [Thread-79]: delete "_4.nrm"
> IFD [Thread-79]: delete "_4.fdx"
> IFD [Thread-79]: delete "_4.fdt"
> IW 5 [Thread-79]: LMP: findMerges: 2 segments
> IW 5 [Thread-79]: LMP:   level -1.0 to 4.197694: 2 segments
> IW 5 [Thread-79]: CMS: now merge
> IW 5 [Thread-79]: CMS:   index: _3:c28 _4:c1
> IW 5 [Thread-79]: CMS:   no more merges pending; now return
> IW 5 [Thread-79]: CMS: now merge
> IW 5 [Thread-79]: CMS:   index: _3:c28 _4:c1
> IW 5 [Thread-79]: CMS:   no more merges pending; now return
> IW 5 [Thread-79]: at close: _3:c28 _4:c1
> IFD [Thread-85]: setInfoStream
> deletionPolicy 
> =org.apache.lucene.index.KeepOnlyLastCommitDeletionPolicy@1498cd
> IW 6 [Thread-85]: setInfoStream:
> dir=org.apache.lucene.store.FSDirectory@/tmp/content/3615.0-3618.0
> autoCommit=true
> mergePolicy=org.apache.lucene.index.LogByteSizeMergePolicy@3be7e8
> mergeScheduler=org.apache.lucene.index.ConcurrentMergeScheduler@f64915
> ramBufferSizeMB=16.0 maxBuffereDocs=-1 maxBuffereDeleteTerms=-1
> maxFieldLength=10000 index=_3:c28 _4:c1
> IW 6 [Thread-85]: optimize: index now _3:c28 _4:c1
> IW 6 [Thread-85]:   flush: segment=null docStoreSegment=null
> docStoreOffset=0 flushDocs=false flushDeletes=true  
> flushDocStores=false
> numDocs=0 numBufDelTerms=1
> IW 6 [Thread-85]:   index before flush _3:c28 _4:c1
> IW 6 [Thread-85]: flush 1 buffered deleted terms and 0 deleted  
> docIDs on 2
> segments.
> IW 6 [Thread-85]: checkpoint: wrote segments file "segments_e"
> IFD [Thread-85]: now checkpoint "segments_e" [2 segments ; isCommit  
> = true]
> IFD [Thread-85]: deleteCommits: now remove commit "segments_d"
> IFD [Thread-85]: delete "segments_d"
> IW 6 [Thread-85]: LMP: findMerges: 2 segments
> IW 6 [Thread-85]: LMP:   level -1.0 to 4.1980243: 2 segments
> IW 6 [Thread-85]: CMS: now merge
> IW 6 [Thread-85]: CMS:   index: _3:c28 _4:c1
> IW 6 [Thread-85]: CMS:   no more merges pending; now return
> IW 6 [Thread-85]: add merge to pendingMerges: _3:c28 _4:c1  
> [optimize] [total
> 1 pending]
> IW 6 [Thread-85]: CMS: now merge
> IW 6 [Thread-85]: CMS:   index: _3:c28 _4:c1
> IW 6 [Thread-85]: CMS:   consider merge _3:c28 _4:c1 into _5  
> [optimize]
> IW 6 [Thread-85]: CMS:     launch new thread [Thread-89]
> IW 6 [Thread-85]: CMS:   no more merges pending; now return
> IW 6 [Thread-89]: CMS:   merge thread: start
> IW 6 [Thread-89]: now merge
>  merge=_3:c28 _4:c1 into _5 [optimize]
>  index=_3:c28 _4:c1
> IW 6 [Thread-89]: merging _3:c28 _4:c1 into _5 [optimize]
> IW 6 [Thread-89]: merge: total 28 docs
> IW 42 [Thread-86]: optimize: index now _3:c28 _4:c1
> IW 42 [Thread-86]:   flush: segment=null docStoreSegment=null
> docStoreOffset=0 flushDocs=false flushDeletes=true  
> flushDocStores=false
> numDocs=0 numBufDelTerms=1
> IW 42 [Thread-86]:   index before flush _3:c28 _4:c1
> IW 42 [Thread-86]: flush 1 buffered deleted terms and 0 deleted  
> docIDs on 2
> segments.
> IW 42 [Thread-86]: checkpoint: wrote segments file "segments_f"
> IFD [Thread-86]: now checkpoint "segments_f" [2 segments ; isCommit  
> = true]
> IFD [Thread-86]: deleteCommits: now remove commit "segments_e"
> IFD [Thread-86]: delete "segments_e"
> IFD [Thread-86]: delete "_3_1.del"
> IW 42 [Thread-86]: LMP: findMerges: 2 segments
> IW 42 [Thread-86]: LMP:   level -1.0 to 4.1980243: 2 segments
> IW 42 [Thread-86]: CMS: now merge
> IW 42 [Thread-86]: CMS:   index: _3:c28 _4:c1
> IW 42 [Thread-86]: CMS:   no more merges pending; now return
> IW 42 [Thread-86]: add merge to pendingMerges: _3:c28 _4:c1 [optimize]
> [total 1 pending]
> IW 42 [Thread-86]: CMS: now merge
> IW 42 [Thread-86]: CMS:   index: _3:c28 _4:c1
> IW 42 [Thread-86]: CMS:   consider merge _3:c28 _4:c1 into _5  
> [optimize]
> IW 6 [Thread-89]: commitMerge: _3:c28 _4:c1 into _5 [optimize]
> IW 6 [Thread-89]: commitMerge _3:c28 _4:c1 into _5 [optimize]
> IW 6 [Thread-89]: hit exception creating merged deletes file
> IFD [Thread-89]: refresh [prefix=_5]: removing newly created  
> unreferenced
> file "_5.nrm"
> IFD [Thread-89]: delete "_5.nrm"
> IW 6 [Thread-89]: hit exception during merge
> Exception in thread "Thread-89"
> org.apache.lucene.index.MergePolicy$MergeException:
> java.io.FileNotFoundException: /tmp/content/3615.0-3618.0/_3_1.del  
> (No such
> file or directory)
> 	at
> org.apache.lucene.index.ConcurrentMergeScheduler 
> $MergeThread.run(ConcurrentMergeScheduler.java:271)
> Caused by: java.io.FileNotFoundException:
> /tmp/content/3615.0-3618.0/_3_1.del (No such file or directory)
> 	at java.io.RandomAccessFile.open(Native Method)
> 	at java.io.RandomAccessFile.<init>(RandomAccessFile.java:212)
> 	at
> org.apache.lucene.store.FSDirectory$FSIndexInput 
> $Descriptor.<init>(FSDirectory.java:506)
> 	at
> org.apache.lucene.store.FSDirectory 
> $FSIndexInput.<init>(FSDirectory.java:536)
> 	at org.apache.lucene.store.FSDirectory.openInput(FSDirectory.java: 
> 445)
> 	at org.apache.lucene.store.FSDirectory.openInput(FSDirectory.java: 
> 440)
> 	at org.apache.lucene.util.BitVector.<init>(BitVector.java:180)
> 	at org.apache.lucene.index.IndexWriter.commitMerge(IndexWriter.java: 
> 2827)
> 	at org.apache.lucene.index.IndexWriter.mergeMiddle(IndexWriter.java: 
> 3295)
> 	at org.apache.lucene.index.IndexWriter.merge(IndexWriter.java:2968)
> 	at
> org.apache.lucene.index.ConcurrentMergeScheduler 
> $MergeThread.run(ConcurrentMergeScheduler.java:240)
> IW 42 [Thread-86]: CMS:     launch new thread [Thread-93]
> IW 6 [Thread-85]: now flush at close
> IW 6 [Thread-85]:   flush: segment=null docStoreSegment=null
> docStoreOffset=0 flushDocs=false flushDeletes=false  
> flushDocStores=false
> numDocs=0 numBufDelTerms=0
> IW 6 [Thread-85]:   index before flush _3:c28 _4:c1
> IW 6 [Thread-85]: CMS: now merge
> IW 6 [Thread-85]: CMS:   index: _3:c28 _4:c1
> IW 6 [Thread-85]: CMS:   no more merges pending; now return
> IW 6 [Thread-85]: close: wrote segments file "segments_f"
> IFD [Thread-85]: now checkpoint "segments_f" [2 segments ; isCommit  
> = true]
> IFD [Thread-85]: deleteCommits: now remove commit "segments_e"
> IFD [Thread-85]: delete "segments_e"
> IW 6 [Thread-85]: at close: _3:c28 _4:c1
> IFD [Thread-81]: setInfoStream
> deletionPolicy 
> =org.apache.lucene.index.KeepOnlyLastCommitDeletionPolicy@1ddcef0
> IW 7 [Thread-81]: setInfoStream:
> dir=org.apache.lucene.store.FSDirectory@/tmp/content/3615.0-3618.0
> autoCommit=true
> mergePolicy=org.apache.lucene.index.LogByteSizeMergePolicy@13d2d11
> mergeScheduler 
> =org.apache.lucene.index.ConcurrentMergeScheduler@159e0c2
> ramBufferSizeMB=16.0 maxBuffereDocs=-1 maxBuffereDeleteTerms=-1
> maxFieldLength=10000 index=_3:c28 _4:c1
> IW 7 [Thread-81]: optimize: index now _3:c28 _4:c1
> IW 7 [Thread-81]:   flush: segment=null docStoreSegment=null
> docStoreOffset=0 flushDocs=false flushDeletes=true  
> flushDocStores=false
> numDocs=0 numBufDelTerms=1
> IW 7 [Thread-81]:   index before flush _3:c28 _4:c1
> IW 7 [Thread-81]: flush 1 buffered deleted terms and 0 deleted  
> docIDs on 2
> segments.
> IW 7 [Thread-81]: hit exception flushing segment null
> IFD [Thread-81]: now checkpoint "segments_f" [2 segments ; isCommit  
> = false]
> IW 7 [Thread-81]: now flush at close
> IW 7 [Thread-81]:   flush: segment=null docStoreSegment=null
> docStoreOffset=0 flushDocs=false flushDeletes=true  
> flushDocStores=false
> numDocs=0 numBufDelTerms=1
> IW 7 [Thread-81]:   index before flush _3:c28 _4:c1
> IW 7 [Thread-81]: flush 1 buffered deleted terms and 0 deleted  
> docIDs on 2
> segments.
> IW 7 [Thread-81]: hit exception flushing segment null
> IFD [Thread-81]: now checkpoint "segments_f" [2 segments ; isCommit  
> = false]
> IW 7 [Thread-81]: hit exception while closing
> Exception in thread "Thread-85" java.lang.RuntimeException:
> com.test.search.IndexingException: java.io.IOException: background  
> merge hit
> exception: _3:c28 _4:c1 into _5 [optimize]
> 	at
> com.test.vcssearch.DefaultServiceIndexer 
> $2.run(DefaultServiceIndexer.java:245)
> 	at java.lang.Thread.run(Thread.java:595)
> Caused by: com.test.search.IndexingException: java.io.IOException:
> background merge hit exception: _3:c28 _4:c1 into _5 [optimize]
> 	at
> com 
> .test 
> .search.impl.lucene.IndexManager.removeDocuments(IndexManager.java: 
> 332)
> 	at
> com 
> .test 
> .search.impl.lucene.IndexManager.removeDocuments(IndexManager.java: 
> 216)
> 	at  
> com.test.search.impl.lucene.IndexManager.reindex(IndexManager.java: 
> 279)
> 	at  
> com.test.search.impl.lucene.IndexManager.reindex(IndexManager.java: 
> 340)
> 	at
> com.test.vcssearch.DefaultServiceIndexer 
> $2.run(DefaultServiceIndexer.java:239)
> 	... 1 more
> Caused by: java.io.IOException: background merge hit exception:  
> _3:c28 _4:c1
> into _5 [optimize]
> 	at org.apache.lucene.index.IndexWriter.optimize(IndexWriter.java: 
> 1787)
> 	at org.apache.lucene.index.IndexWriter.optimize(IndexWriter.java: 
> 1727)
> 	at org.apache.lucene.index.IndexWriter.optimize(IndexWriter.java: 
> 1707)
> 	at
> com 
> .test 
> .search.impl.lucene.IndexManager.removeDocuments(IndexManager.java: 
> 318)
> 	... 5 more
> Caused by: java.io.FileNotFoundException:
> /tmp/content/3615.0-3618.0/_3_1.del (No such file or directory)
> 	at java.io.RandomAccessFile.open(Native Method)
> 	at java.io.RandomAccessFile.<init>(RandomAccessFile.java:212)
> 	at
> org.apache.lucene.store.FSDirectory$FSIndexInput 
> $Descriptor.<init>(FSDirectory.java:506)
> 	at
> org.apache.lucene.store.FSDirectory 
> $FSIndexInput.<init>(FSDirectory.java:536)
> 	at org.apache.lucene.store.FSDirectory.openInput(FSDirectory.java: 
> 445)
> 	at org.apache.lucene.store.FSDirectory.openInput(FSDirectory.java: 
> 440)
> 	at org.apache.lucene.util.BitVector.<init>(BitVector.java:180)
> 	at org.apache.lucene.index.IndexWriter.commitMerge(IndexWriter.java: 
> 2827)
> 	at org.apache.lucene.index.IndexWriter.mergeMiddle(IndexWriter.java: 
> 3295)
> 	at org.apache.lucene.index.IndexWriter.merge(IndexWriter.java:2968)
> 	at
> org.apache.lucene.index.ConcurrentMergeScheduler 
> $MergeThread.run(ConcurrentMergeScheduler.java:240)
> IW 42 [Thread-86]: CMS:   no more merges pending; now return
> Exception in thread "Thread-81" java.lang.RuntimeException:
> com.test.search.IndexingException: java.io.FileNotFoundException:
> /tmp/content/3615.0-3618.0/_3_1.del (No such file or directory)
> 	at
> com.test.vcssearch.DefaultServiceIndexer 
> $2.run(DefaultServiceIndexer.java:245)
> 	at java.lang.Thread.run(Thread.java:595)
> Caused by: com.test.search.IndexingException:  
> java.io.FileNotFoundException:
> /tmp/content/3615.0-3618.0/_3_1.del (No such file or directory)
> 	at
> com 
> .test 
> .search.impl.lucene.IndexManager.removeDocuments(IndexManager.java: 
> 332)
> 	at
> com 
> .test 
> .search.impl.lucene.IndexManager.removeDocuments(IndexManager.java: 
> 216)
> 	at  
> com.test.search.impl.lucene.IndexManager.reindex(IndexManager.java: 
> 279)
> 	at  
> com.test.search.impl.lucene.IndexManager.reindex(IndexManager.java: 
> 340)
> 	at
> com.test.vcssearch.DefaultServiceIndexer 
> $2.run(DefaultServiceIndexer.java:239)
> 	... 1 more
> Caused by: java.io.FileNotFoundException:
> /tmp/content/3615.0-3618.0/_3_1.del (No such file or directory)
> 	at java.io.RandomAccessFile.open(Native Method)
> 	at java.io.RandomAccessFile.<init>(RandomAccessFile.java:212)
> 	at
> org.apache.lucene.store.FSDirectory$FSIndexInput 
> $Descriptor.<init>(FSDirectory.java:506)
> 	at
> org.apache.lucene.store.FSDirectory 
> $FSIndexInput.<init>(FSDirectory.java:536)
> 	at org.apache.lucene.store.FSDirectory.openInput(FSDirectory.java: 
> 445)
> 	at org.apache.lucene.store.FSDirectory.openInput(FSDirectory.java: 
> 440)
> 	at org.apache.lucene.util.BitVector.<init>(BitVector.java:180)
> 	at
> org 
> .apache 
> .lucene.index.SegmentReader.loadDeletedDocs(SegmentReader.java:352)
> 	at  
> org.apache.lucene.index.SegmentReader.initialize(SegmentReader.java: 
> 319)
> 	at org.apache.lucene.index.SegmentReader.get(SegmentReader.java:262)
> 	at org.apache.lucene.index.SegmentReader.get(SegmentReader.java:205)
> 	at  
> org.apache.lucene.index.IndexWriter.applyDeletes(IndexWriter.java: 
> 3441)
> 	at org.apache.lucene.index.IndexWriter.doFlush(IndexWriter.java:2638)
> 	at org.apache.lucene.index.IndexWriter.flush(IndexWriter.java:2523)
> 	at org.apache.lucene.index.IndexWriter.flush(IndexWriter.java:2509)
> 	at org.apache.lucene.index.IndexWriter.optimize(IndexWriter.java: 
> 1744)
> 	at org.apache.lucene.index.IndexWriter.optimize(IndexWriter.java: 
> 1727)
> 	at org.apache.lucene.index.IndexWriter.optimize(IndexWriter.java: 
> 1707)
> 	at
> com 
> .test 
> .search.impl.lucene.IndexManager.removeDocuments(IndexManager.java: 
> 318)
> 	... 5 more
>
> -- 
> View this message in context: http://www.nabble.com/FileNotFoundException-during-indexing-tp18766343p18775319.html
> Sent from the Lucene - Java Users mailing list archive at Nabble.com.
>
>
> ---------------------------------------------------------------------
> To unsubscribe, e-mail: java-user-unsubscribe@lucene.apache.org
> For additional commands, e-mail: java-user-help@lucene.apache.org
>


---------------------------------------------------------------------
To unsubscribe, e-mail: java-user-unsubscribe@lucene.apache.org
For additional commands, e-mail: java-user-help@lucene.apache.org


Re: FileNotFoundException during indexing

Posted by Wojtek212 <wo...@gmail.com>.
Here is Lucene log with first exceptions that occured (FSDirectory with
NativeFSLockFactory).

IFD [Thread-79]: setInfoStream
deletionPolicy=org.apache.lucene.index.KeepOnlyLastCommitDeletionPolicy@1a00d25
IW 4 [Thread-79]: setInfoStream:
dir=org.apache.lucene.store.FSDirectory@/tmp/content/3615.0-3618.0
autoCommit=true
mergePolicy=org.apache.lucene.index.LogByteSizeMergePolicy@bf033a
mergeScheduler=org.apache.lucene.index.ConcurrentMergeScheduler@6cf504
ramBufferSizeMB=16.0 maxBuffereDocs=-1 maxBuffereDeleteTerms=-1
maxFieldLength=10000 index=_1:c28 _2:c1
IW 4 [Thread-79]: optimize: index now _1:c28 _2:c1
IW 4 [Thread-79]:   flush: segment=null docStoreSegment=null
docStoreOffset=0 flushDocs=false flushDeletes=true flushDocStores=false
numDocs=0 numBufDelTerms=1
IW 4 [Thread-79]:   index before flush _1:c28 _2:c1
IW 4 [Thread-79]: flush 1 buffered deleted terms and 0 deleted docIDs on 2
segments.
IW 4 [Thread-79]: checkpoint: wrote segments file "segments_9"
IFD [Thread-79]: now checkpoint "segments_9" [2 segments ; isCommit = true]
IFD [Thread-79]: deleteCommits: now remove commit "segments_8"
IFD [Thread-79]: delete "segments_8"
IW 4 [Thread-79]: LMP: findMerges: 2 segments
IW 4 [Thread-79]: LMP:   level -1.0 to 4.1957335: 2 segments
IW 4 [Thread-79]: CMS: now merge
IW 4 [Thread-79]: CMS:   index: _1:c28 _2:c1
IW 4 [Thread-79]: CMS:   no more merges pending; now return
IW 4 [Thread-79]: add merge to pendingMerges: _1:c28 _2:c1 [optimize] [total
1 pending]
IW 4 [Thread-79]: CMS: now merge
IW 4 [Thread-79]: CMS:   index: _1:c28 _2:c1
IW 4 [Thread-79]: CMS:   consider merge _1:c28 _2:c1 into _3 [optimize]
IW 2 [Thread-80]: CMS:   merge thread: done
IW 4 [Thread-79]: CMS:     launch new thread [Thread-82]
IW 4 [Thread-79]: CMS:   no more merges pending; now return
IW 4 [Thread-82]: CMS:   merge thread: start
IW 4 [Thread-82]: now merge
  merge=_1:c28 _2:c1 into _3 [optimize]
  index=_1:c28 _2:c1
IW 4 [Thread-82]: merging _1:c28 _2:c1 into _3 [optimize]
IW 4 [Thread-82]: merge: total 28 docs
IW 4 [Thread-82]: commitMerge: _1:c28 _2:c1 into _3 [optimize]
IW 4 [Thread-82]: commitMerge _1:c28 _2:c1 into _3 [optimize]
IW 4 [Thread-82]: checkpoint: wrote segments file "segments_a"
IFD [Thread-82]: now checkpoint "segments_a" [1 segments ; isCommit = true]
IFD [Thread-82]: deleteCommits: now remove commit "segments_9"
IFD [Thread-82]: delete "segments_9"
IFD [Thread-82]: delete "_1.cfs"
IFD [Thread-82]: delete "_1_1.del"
IFD [Thread-82]: delete "_2.cfs"
IW 4 [Thread-82]: checkpoint: wrote segments file "segments_b"
IFD [Thread-82]: now checkpoint "segments_b" [1 segments ; isCommit = true]
IFD [Thread-82]: deleteCommits: now remove commit "segments_a"
IFD [Thread-82]: delete "segments_a"
IFD [Thread-82]: delete "_3.fnm"
IFD [Thread-82]: delete "_3.frq"
IFD [Thread-82]: delete "_3.prx"
IFD [Thread-82]: delete "_3.tis"
IFD [Thread-82]: delete "_3.tii"
IFD [Thread-82]: delete "_3.nrm"
IFD [Thread-82]: delete "_3.fdx"
IFD [Thread-82]: delete "_3.fdt"
IW 4 [Thread-82]: CMS:   merge thread: done
IW 4 [Thread-79]: now flush at close
IW 4 [Thread-79]:   flush: segment=null docStoreSegment=null
docStoreOffset=0 flushDocs=false flushDeletes=false flushDocStores=false
numDocs=0 numBufDelTerms=0
IW 4 [Thread-79]:   index before flush _3:c28
IW 4 [Thread-79]: CMS: now merge
IW 4 [Thread-79]: CMS:   index: _3:c28
IW 4 [Thread-79]: CMS:   no more merges pending; now return
IW 4 [Thread-79]: at close: _3:c28
IFD [Thread-79]: setInfoStream
deletionPolicy=org.apache.lucene.index.KeepOnlyLastCommitDeletionPolicy@dd0e8a
IW 5 [Thread-79]: setInfoStream:
dir=org.apache.lucene.store.FSDirectory@/tmp/content/3615.0-3618.0
autoCommit=true
mergePolicy=org.apache.lucene.index.LogByteSizeMergePolicy@e5a644
mergeScheduler=org.apache.lucene.index.ConcurrentMergeScheduler@106cc42
ramBufferSizeMB=16.0 maxBuffereDocs=-1 maxBuffereDeleteTerms=-1
maxFieldLength=10000 index=_3:c28
IW 5 [Thread-79]: now flush at close
IW 5 [Thread-79]:   flush: segment=_4 docStoreSegment=_4 docStoreOffset=0
flushDocs=true flushDeletes=false flushDocStores=true numDocs=1
numBufDelTerms=0
IW 5 [Thread-79]:   index before flush _3:c28

flush postings as segment _4 numDocs=1

closeDocStore: 2 files to flush to segment _4 numDocs=1
  oldRAMSize=81920 newFlushedSize=847 docs/MB=1,237.988 new/old=1.034%
IW 5 [Thread-79]: checkpoint: wrote segments file "segments_c"
IFD [Thread-79]: now checkpoint "segments_c" [2 segments ; isCommit = true]
IFD [Thread-79]: deleteCommits: now remove commit "segments_b"
IFD [Thread-79]: delete "segments_b"
IW 5 [Thread-79]: checkpoint: wrote segments file "segments_d"
IFD [Thread-79]: now checkpoint "segments_d" [2 segments ; isCommit = true]
IFD [Thread-79]: deleteCommits: now remove commit "segments_c"
IFD [Thread-79]: delete "segments_c"
IFD [Thread-79]: delete "_4.fnm"
IFD [Thread-79]: delete "_4.frq"
IFD [Thread-79]: delete "_4.prx"
IFD [Thread-79]: delete "_4.tis"
IFD [Thread-79]: delete "_4.tii"
IFD [Thread-79]: delete "_4.nrm"
IFD [Thread-79]: delete "_4.fdx"
IFD [Thread-79]: delete "_4.fdt"
IW 5 [Thread-79]: LMP: findMerges: 2 segments
IW 5 [Thread-79]: LMP:   level -1.0 to 4.197694: 2 segments
IW 5 [Thread-79]: CMS: now merge
IW 5 [Thread-79]: CMS:   index: _3:c28 _4:c1
IW 5 [Thread-79]: CMS:   no more merges pending; now return
IW 5 [Thread-79]: CMS: now merge
IW 5 [Thread-79]: CMS:   index: _3:c28 _4:c1
IW 5 [Thread-79]: CMS:   no more merges pending; now return
IW 5 [Thread-79]: at close: _3:c28 _4:c1
IFD [Thread-85]: setInfoStream
deletionPolicy=org.apache.lucene.index.KeepOnlyLastCommitDeletionPolicy@1498cd
IW 6 [Thread-85]: setInfoStream:
dir=org.apache.lucene.store.FSDirectory@/tmp/content/3615.0-3618.0
autoCommit=true
mergePolicy=org.apache.lucene.index.LogByteSizeMergePolicy@3be7e8
mergeScheduler=org.apache.lucene.index.ConcurrentMergeScheduler@f64915
ramBufferSizeMB=16.0 maxBuffereDocs=-1 maxBuffereDeleteTerms=-1
maxFieldLength=10000 index=_3:c28 _4:c1
IW 6 [Thread-85]: optimize: index now _3:c28 _4:c1
IW 6 [Thread-85]:   flush: segment=null docStoreSegment=null
docStoreOffset=0 flushDocs=false flushDeletes=true flushDocStores=false
numDocs=0 numBufDelTerms=1
IW 6 [Thread-85]:   index before flush _3:c28 _4:c1
IW 6 [Thread-85]: flush 1 buffered deleted terms and 0 deleted docIDs on 2
segments.
IW 6 [Thread-85]: checkpoint: wrote segments file "segments_e"
IFD [Thread-85]: now checkpoint "segments_e" [2 segments ; isCommit = true]
IFD [Thread-85]: deleteCommits: now remove commit "segments_d"
IFD [Thread-85]: delete "segments_d"
IW 6 [Thread-85]: LMP: findMerges: 2 segments
IW 6 [Thread-85]: LMP:   level -1.0 to 4.1980243: 2 segments
IW 6 [Thread-85]: CMS: now merge
IW 6 [Thread-85]: CMS:   index: _3:c28 _4:c1
IW 6 [Thread-85]: CMS:   no more merges pending; now return
IW 6 [Thread-85]: add merge to pendingMerges: _3:c28 _4:c1 [optimize] [total
1 pending]
IW 6 [Thread-85]: CMS: now merge
IW 6 [Thread-85]: CMS:   index: _3:c28 _4:c1
IW 6 [Thread-85]: CMS:   consider merge _3:c28 _4:c1 into _5 [optimize]
IW 6 [Thread-85]: CMS:     launch new thread [Thread-89]
IW 6 [Thread-85]: CMS:   no more merges pending; now return
IW 6 [Thread-89]: CMS:   merge thread: start
IW 6 [Thread-89]: now merge
  merge=_3:c28 _4:c1 into _5 [optimize]
  index=_3:c28 _4:c1
IW 6 [Thread-89]: merging _3:c28 _4:c1 into _5 [optimize]
IW 6 [Thread-89]: merge: total 28 docs
IW 42 [Thread-86]: optimize: index now _3:c28 _4:c1
IW 42 [Thread-86]:   flush: segment=null docStoreSegment=null
docStoreOffset=0 flushDocs=false flushDeletes=true flushDocStores=false
numDocs=0 numBufDelTerms=1
IW 42 [Thread-86]:   index before flush _3:c28 _4:c1
IW 42 [Thread-86]: flush 1 buffered deleted terms and 0 deleted docIDs on 2
segments.
IW 42 [Thread-86]: checkpoint: wrote segments file "segments_f"
IFD [Thread-86]: now checkpoint "segments_f" [2 segments ; isCommit = true]
IFD [Thread-86]: deleteCommits: now remove commit "segments_e"
IFD [Thread-86]: delete "segments_e"
IFD [Thread-86]: delete "_3_1.del"
IW 42 [Thread-86]: LMP: findMerges: 2 segments
IW 42 [Thread-86]: LMP:   level -1.0 to 4.1980243: 2 segments
IW 42 [Thread-86]: CMS: now merge
IW 42 [Thread-86]: CMS:   index: _3:c28 _4:c1
IW 42 [Thread-86]: CMS:   no more merges pending; now return
IW 42 [Thread-86]: add merge to pendingMerges: _3:c28 _4:c1 [optimize]
[total 1 pending]
IW 42 [Thread-86]: CMS: now merge
IW 42 [Thread-86]: CMS:   index: _3:c28 _4:c1
IW 42 [Thread-86]: CMS:   consider merge _3:c28 _4:c1 into _5 [optimize]
IW 6 [Thread-89]: commitMerge: _3:c28 _4:c1 into _5 [optimize]
IW 6 [Thread-89]: commitMerge _3:c28 _4:c1 into _5 [optimize]
IW 6 [Thread-89]: hit exception creating merged deletes file
IFD [Thread-89]: refresh [prefix=_5]: removing newly created unreferenced
file "_5.nrm"
IFD [Thread-89]: delete "_5.nrm"
IW 6 [Thread-89]: hit exception during merge
Exception in thread "Thread-89"
org.apache.lucene.index.MergePolicy$MergeException:
java.io.FileNotFoundException: /tmp/content/3615.0-3618.0/_3_1.del (No such
file or directory)
	at
org.apache.lucene.index.ConcurrentMergeScheduler$MergeThread.run(ConcurrentMergeScheduler.java:271)
Caused by: java.io.FileNotFoundException:
/tmp/content/3615.0-3618.0/_3_1.del (No such file or directory)
	at java.io.RandomAccessFile.open(Native Method)
	at java.io.RandomAccessFile.<init>(RandomAccessFile.java:212)
	at
org.apache.lucene.store.FSDirectory$FSIndexInput$Descriptor.<init>(FSDirectory.java:506)
	at
org.apache.lucene.store.FSDirectory$FSIndexInput.<init>(FSDirectory.java:536)
	at org.apache.lucene.store.FSDirectory.openInput(FSDirectory.java:445)
	at org.apache.lucene.store.FSDirectory.openInput(FSDirectory.java:440)
	at org.apache.lucene.util.BitVector.<init>(BitVector.java:180)
	at org.apache.lucene.index.IndexWriter.commitMerge(IndexWriter.java:2827)
	at org.apache.lucene.index.IndexWriter.mergeMiddle(IndexWriter.java:3295)
	at org.apache.lucene.index.IndexWriter.merge(IndexWriter.java:2968)
	at
org.apache.lucene.index.ConcurrentMergeScheduler$MergeThread.run(ConcurrentMergeScheduler.java:240)
IW 42 [Thread-86]: CMS:     launch new thread [Thread-93]
IW 6 [Thread-85]: now flush at close
IW 6 [Thread-85]:   flush: segment=null docStoreSegment=null
docStoreOffset=0 flushDocs=false flushDeletes=false flushDocStores=false
numDocs=0 numBufDelTerms=0
IW 6 [Thread-85]:   index before flush _3:c28 _4:c1
IW 6 [Thread-85]: CMS: now merge
IW 6 [Thread-85]: CMS:   index: _3:c28 _4:c1
IW 6 [Thread-85]: CMS:   no more merges pending; now return
IW 6 [Thread-85]: close: wrote segments file "segments_f"
IFD [Thread-85]: now checkpoint "segments_f" [2 segments ; isCommit = true]
IFD [Thread-85]: deleteCommits: now remove commit "segments_e"
IFD [Thread-85]: delete "segments_e"
IW 6 [Thread-85]: at close: _3:c28 _4:c1
IFD [Thread-81]: setInfoStream
deletionPolicy=org.apache.lucene.index.KeepOnlyLastCommitDeletionPolicy@1ddcef0
IW 7 [Thread-81]: setInfoStream:
dir=org.apache.lucene.store.FSDirectory@/tmp/content/3615.0-3618.0
autoCommit=true
mergePolicy=org.apache.lucene.index.LogByteSizeMergePolicy@13d2d11
mergeScheduler=org.apache.lucene.index.ConcurrentMergeScheduler@159e0c2
ramBufferSizeMB=16.0 maxBuffereDocs=-1 maxBuffereDeleteTerms=-1
maxFieldLength=10000 index=_3:c28 _4:c1
IW 7 [Thread-81]: optimize: index now _3:c28 _4:c1
IW 7 [Thread-81]:   flush: segment=null docStoreSegment=null
docStoreOffset=0 flushDocs=false flushDeletes=true flushDocStores=false
numDocs=0 numBufDelTerms=1
IW 7 [Thread-81]:   index before flush _3:c28 _4:c1
IW 7 [Thread-81]: flush 1 buffered deleted terms and 0 deleted docIDs on 2
segments.
IW 7 [Thread-81]: hit exception flushing segment null
IFD [Thread-81]: now checkpoint "segments_f" [2 segments ; isCommit = false]
IW 7 [Thread-81]: now flush at close
IW 7 [Thread-81]:   flush: segment=null docStoreSegment=null
docStoreOffset=0 flushDocs=false flushDeletes=true flushDocStores=false
numDocs=0 numBufDelTerms=1
IW 7 [Thread-81]:   index before flush _3:c28 _4:c1
IW 7 [Thread-81]: flush 1 buffered deleted terms and 0 deleted docIDs on 2
segments.
IW 7 [Thread-81]: hit exception flushing segment null
IFD [Thread-81]: now checkpoint "segments_f" [2 segments ; isCommit = false]
IW 7 [Thread-81]: hit exception while closing
Exception in thread "Thread-85" java.lang.RuntimeException:
com.test.search.IndexingException: java.io.IOException: background merge hit
exception: _3:c28 _4:c1 into _5 [optimize]
	at
com.test.vcssearch.DefaultServiceIndexer$2.run(DefaultServiceIndexer.java:245)
	at java.lang.Thread.run(Thread.java:595)
Caused by: com.test.search.IndexingException: java.io.IOException:
background merge hit exception: _3:c28 _4:c1 into _5 [optimize]
	at
com.test.search.impl.lucene.IndexManager.removeDocuments(IndexManager.java:332)
	at
com.test.search.impl.lucene.IndexManager.removeDocuments(IndexManager.java:216)
	at com.test.search.impl.lucene.IndexManager.reindex(IndexManager.java:279)
	at com.test.search.impl.lucene.IndexManager.reindex(IndexManager.java:340)
	at
com.test.vcssearch.DefaultServiceIndexer$2.run(DefaultServiceIndexer.java:239)
	... 1 more
Caused by: java.io.IOException: background merge hit exception: _3:c28 _4:c1
into _5 [optimize]
	at org.apache.lucene.index.IndexWriter.optimize(IndexWriter.java:1787)
	at org.apache.lucene.index.IndexWriter.optimize(IndexWriter.java:1727)
	at org.apache.lucene.index.IndexWriter.optimize(IndexWriter.java:1707)
	at
com.test.search.impl.lucene.IndexManager.removeDocuments(IndexManager.java:318)
	... 5 more
Caused by: java.io.FileNotFoundException:
/tmp/content/3615.0-3618.0/_3_1.del (No such file or directory)
	at java.io.RandomAccessFile.open(Native Method)
	at java.io.RandomAccessFile.<init>(RandomAccessFile.java:212)
	at
org.apache.lucene.store.FSDirectory$FSIndexInput$Descriptor.<init>(FSDirectory.java:506)
	at
org.apache.lucene.store.FSDirectory$FSIndexInput.<init>(FSDirectory.java:536)
	at org.apache.lucene.store.FSDirectory.openInput(FSDirectory.java:445)
	at org.apache.lucene.store.FSDirectory.openInput(FSDirectory.java:440)
	at org.apache.lucene.util.BitVector.<init>(BitVector.java:180)
	at org.apache.lucene.index.IndexWriter.commitMerge(IndexWriter.java:2827)
	at org.apache.lucene.index.IndexWriter.mergeMiddle(IndexWriter.java:3295)
	at org.apache.lucene.index.IndexWriter.merge(IndexWriter.java:2968)
	at
org.apache.lucene.index.ConcurrentMergeScheduler$MergeThread.run(ConcurrentMergeScheduler.java:240)
IW 42 [Thread-86]: CMS:   no more merges pending; now return
Exception in thread "Thread-81" java.lang.RuntimeException:
com.test.search.IndexingException: java.io.FileNotFoundException:
/tmp/content/3615.0-3618.0/_3_1.del (No such file or directory)
	at
com.test.vcssearch.DefaultServiceIndexer$2.run(DefaultServiceIndexer.java:245)
	at java.lang.Thread.run(Thread.java:595)
Caused by: com.test.search.IndexingException: java.io.FileNotFoundException:
/tmp/content/3615.0-3618.0/_3_1.del (No such file or directory)
	at
com.test.search.impl.lucene.IndexManager.removeDocuments(IndexManager.java:332)
	at
com.test.search.impl.lucene.IndexManager.removeDocuments(IndexManager.java:216)
	at com.test.search.impl.lucene.IndexManager.reindex(IndexManager.java:279)
	at com.test.search.impl.lucene.IndexManager.reindex(IndexManager.java:340)
	at
com.test.vcssearch.DefaultServiceIndexer$2.run(DefaultServiceIndexer.java:239)
	... 1 more
Caused by: java.io.FileNotFoundException:
/tmp/content/3615.0-3618.0/_3_1.del (No such file or directory)
	at java.io.RandomAccessFile.open(Native Method)
	at java.io.RandomAccessFile.<init>(RandomAccessFile.java:212)
	at
org.apache.lucene.store.FSDirectory$FSIndexInput$Descriptor.<init>(FSDirectory.java:506)
	at
org.apache.lucene.store.FSDirectory$FSIndexInput.<init>(FSDirectory.java:536)
	at org.apache.lucene.store.FSDirectory.openInput(FSDirectory.java:445)
	at org.apache.lucene.store.FSDirectory.openInput(FSDirectory.java:440)
	at org.apache.lucene.util.BitVector.<init>(BitVector.java:180)
	at
org.apache.lucene.index.SegmentReader.loadDeletedDocs(SegmentReader.java:352)
	at org.apache.lucene.index.SegmentReader.initialize(SegmentReader.java:319)
	at org.apache.lucene.index.SegmentReader.get(SegmentReader.java:262)
	at org.apache.lucene.index.SegmentReader.get(SegmentReader.java:205)
	at org.apache.lucene.index.IndexWriter.applyDeletes(IndexWriter.java:3441)
	at org.apache.lucene.index.IndexWriter.doFlush(IndexWriter.java:2638)
	at org.apache.lucene.index.IndexWriter.flush(IndexWriter.java:2523)
	at org.apache.lucene.index.IndexWriter.flush(IndexWriter.java:2509)
	at org.apache.lucene.index.IndexWriter.optimize(IndexWriter.java:1744)
	at org.apache.lucene.index.IndexWriter.optimize(IndexWriter.java:1727)
	at org.apache.lucene.index.IndexWriter.optimize(IndexWriter.java:1707)
	at
com.test.search.impl.lucene.IndexManager.removeDocuments(IndexManager.java:318)
	... 5 more

-- 
View this message in context: http://www.nabble.com/FileNotFoundException-during-indexing-tp18766343p18775319.html
Sent from the Lucene - Java Users mailing list archive at Nabble.com.


---------------------------------------------------------------------
To unsubscribe, e-mail: java-user-unsubscribe@lucene.apache.org
For additional commands, e-mail: java-user-help@lucene.apache.org


Re: FileNotFoundException during indexing

Posted by Wojtek212 <wo...@gmail.com>.
The strange thing is that when I use only FSDirectory with
SimpleFSLockFactory I don't see any exception (or I couldn't reproduce the
problem). FSDirectory with NativeFSLockFactory doesn't work as well as my
implementation of Directory and Lock (based on java.nio).

Hmmm, I don't see the reason of such behaviour...
-- 
View this message in context: http://www.nabble.com/FileNotFoundException-during-indexing-tp18766343p18773824.html
Sent from the Lucene - Java Users mailing list archive at Nabble.com.


---------------------------------------------------------------------
To unsubscribe, e-mail: java-user-unsubscribe@lucene.apache.org
For additional commands, e-mail: java-user-help@lucene.apache.org


Re: FileNotFoundException during indexing

Posted by Michael McCandless <lu...@mikemccandless.com>.
Hmmm OK.  I would stick with the NativeFSLockFactory, and never call  
IndexReader.unlock.

Can you call IndexWriter.setInfoStream, and then post the resulting  
log?  It may provide clues of what's happening.

Also, if you can narrow this to a small test case that shows the  
exception, that'd be very helpful.

Mike

Wojtek212 wrote:

>
> I've checked unlock and it is not called until exception occurs.
>
> BTW, I' ve tried to use FSDirectorectory with NativeFSLockFactory  
> and I
> didn't get
> LockObtainFailedException. I removed also this part making unlocking
> (IndexReader.unlock).
>
> The exception is:
> Exception in thread "Thread-95"
> org.apache.lucene.index.MergePolicy$MergeException:
> java.io.FileNotFoundException: /tmp/content/3615.0-3618.0/_5.cfs (No  
> such
> file or directory)
>        at
> org.apache.lucene.index.ConcurrentMergeScheduler 
> $MergeThread.run(ConcurrentMergeScheduler.java:271)
> Caused by: java.io.FileNotFoundException: /tmp/content/3615.0-3618.0/ 
> _5.cfs
> (No such file or directory)
>        at java.io.RandomAccessFile.open(Native Method)
>        at java.io.RandomAccessFile.<init>(RandomAccessFile.java:212)
>        at
> org.apache.lucene.store.FSDirectory$FSIndexInput 
> $Descriptor.<init>(FSDirectory.java:506)
>        at
> org.apache.lucene.store.FSDirectory 
> $FSIndexInput.<init>(FSDirectory.java:536)
>        at
> org.apache.lucene.store.FSDirectory.openInput(FSDirectory.java:445)
>        at
> org 
> .apache 
> .lucene.index.CompoundFileReader.<init>(CompoundFileReader.java:70)
>        at
> org.apache.lucene.index.SegmentReader.initialize(SegmentReader.java: 
> 277)
>        at  
> org.apache.lucene.index.SegmentReader.get(SegmentReader.java:262)
>        at  
> org.apache.lucene.index.SegmentReader.get(SegmentReader.java:221)
>        at
> org.apache.lucene.index.IndexWriter.mergeMiddle(IndexWriter.java:3263)
>        at org.apache.lucene.index.IndexWriter.merge(IndexWriter.java: 
> 2968)
>        at
> org.apache.lucene.index.ConcurrentMergeScheduler 
> $MergeThread.run(ConcurrentMergeScheduler.java:240)
> Exception in thread "Thread-82" java.lang.RuntimeException:
> com.testt.search.IndexingException: java.io.IOException: background  
> merge
> hit exception: _5:c27 _6:c1 into _7 [optimize]
>        at
> com.test.vcssearch.DefaultServiceIndexer 
> $2.run(DefaultServiceIndexer.java:245)
>        at java.lang.Thread.run(Thread.java:595)
> Caused by: com.test.search.IndexingException: java.io.IOException:
> background merge hit exception: _5:c27 _6:c1 into _7 [optimize]
>        at
> com 
> .test 
> .search 
> .impl 
> .lucene.LuceneIndexManager.removeDocuments(LuceneIndexManager.java: 
> 324)
>        at
> com 
> .test 
> .search 
> .impl 
> .lucene.LuceneIndexManager.removeDocuments(LuceneIndexManager.java: 
> 208)
>        at
> com 
> .test 
> .search 
> .impl.lucene.LuceneIndexManager.reindex(LuceneIndexManager.java:271)
>        at
> com 
> .test 
> .search 
> .impl.lucene.LuceneIndexManager.reindex(LuceneIndexManager.java:332)
>        at
> com.test.vcssearch.DefaultServiceIndexer 
> $2.run(DefaultServiceIndexer.java:239)
>        ... 1 more
> Caused by: java.io.IOException: background merge hit exception:  
> _5:c27 _6:c1
> into _7 [optimize]
>        at
> org.apache.lucene.index.IndexWriter.optimize(IndexWriter.java:1787)
>        at
> org.apache.lucene.index.IndexWriter.optimize(IndexWriter.java:1727)
>        at
> org.apache.lucene.index.IndexWriter.optimize(IndexWriter.java:1707)
>        at
> com 
> .test 
> .search 
> .impl 
> .lucene.LuceneIndexManager.removeDocuments(LuceneIndexManager.java: 
> 310)
>        ... 5 more
> Caused by: java.io.FileNotFoundException: /tmp/content/3615.0-3618.0/ 
> _5.cfs
> (No such file or directory)
>        at java.io.RandomAccessFile.open(Native Method)
>        at java.io.RandomAccessFile.<init>(RandomAccessFile.java:212)
>        at
> org.apache.lucene.store.FSDirectory$FSIndexInput 
> $Descriptor.<init>(FSDirectory.java:506)
>        at
> org.apache.lucene.store.FSDirectory 
> $FSIndexInput.<init>(FSDirectory.java:536)
>        at
> org.apache.lucene.store.FSDirectory.openInput(FSDirectory.java:445)
>        at
> org 
> .apache 
> .lucene.index.CompoundFileReader.<init>(CompoundFileReader.java:70)
>        at
> org.apache.lucene.index.SegmentReader.initialize(SegmentReader.java: 
> 277)
>        at  
> org.apache.lucene.index.SegmentReader.get(SegmentReader.java:262)
>        at  
> org.apache.lucene.index.SegmentReader.get(SegmentReader.java:221)
>        at
> org.apache.lucene.index.IndexWriter.mergeMiddle(IndexWriter.java:3263)
>        at org.apache.lucene.index.IndexWriter.merge(IndexWriter.java: 
> 2968)
>        at
> org.apache.lucene.index.ConcurrentMergeScheduler 
> $MergeThread.run(ConcurrentMergeScheduler.java:240)
>
>
> So if the LockObtainFailedException doesn't occur may I assume that  
> there
> are not 2 indexers writing at the same time? Mike, what do you think?
> Above test was made on lucene 2.3.2.
> -- 
> View this message in context: http://www.nabble.com/FileNotFoundException-during-indexing-tp18766343p18772749.html
> Sent from the Lucene - Java Users mailing list archive at Nabble.com.
>
>
> ---------------------------------------------------------------------
> To unsubscribe, e-mail: java-user-unsubscribe@lucene.apache.org
> For additional commands, e-mail: java-user-help@lucene.apache.org
>


---------------------------------------------------------------------
To unsubscribe, e-mail: java-user-unsubscribe@lucene.apache.org
For additional commands, e-mail: java-user-help@lucene.apache.org


Re: FileNotFoundException during indexing

Posted by Wojtek212 <wo...@gmail.com>.
I've checked unlock ant it is not called until exception occurs.

BTW, I' ve tried to use FSDirectorectory with NativeFSLockFactory and I
didn't get
LockObtainFailedException. I removed also this part making unlocking
(IndexReader.unlock).

The exception is:
Exception in thread "Thread-95"
org.apache.lucene.index.MergePolicy$MergeException:
java.io.FileNotFoundException: /tmp/content/3615.0-3618.0/_5.cfs (No such
file or directory)
        at
org.apache.lucene.index.ConcurrentMergeScheduler$MergeThread.run(ConcurrentMergeScheduler.java:271)
Caused by: java.io.FileNotFoundException: /tmp/content/3615.0-3618.0/_5.cfs
(No such file or directory)
        at java.io.RandomAccessFile.open(Native Method)
        at java.io.RandomAccessFile.<init>(RandomAccessFile.java:212)
        at
org.apache.lucene.store.FSDirectory$FSIndexInput$Descriptor.<init>(FSDirectory.java:506)
        at
org.apache.lucene.store.FSDirectory$FSIndexInput.<init>(FSDirectory.java:536)
        at
org.apache.lucene.store.FSDirectory.openInput(FSDirectory.java:445)
        at
org.apache.lucene.index.CompoundFileReader.<init>(CompoundFileReader.java:70)
        at
org.apache.lucene.index.SegmentReader.initialize(SegmentReader.java:277)
        at org.apache.lucene.index.SegmentReader.get(SegmentReader.java:262)
        at org.apache.lucene.index.SegmentReader.get(SegmentReader.java:221)
        at
org.apache.lucene.index.IndexWriter.mergeMiddle(IndexWriter.java:3263)
        at org.apache.lucene.index.IndexWriter.merge(IndexWriter.java:2968)
        at
org.apache.lucene.index.ConcurrentMergeScheduler$MergeThread.run(ConcurrentMergeScheduler.java:240)
Exception in thread "Thread-82" java.lang.RuntimeException:
com.testt.search.IndexingException: java.io.IOException: background merge
hit exception: _5:c27 _6:c1 into _7 [optimize]
        at
com.test.vcssearch.DefaultServiceIndexer$2.run(DefaultServiceIndexer.java:245)
        at java.lang.Thread.run(Thread.java:595)
Caused by: com.test.search.IndexingException: java.io.IOException:
background merge hit exception: _5:c27 _6:c1 into _7 [optimize]
        at
com.test.search.impl.lucene.LuceneIndexManager.removeDocuments(LuceneIndexManager.java:324)
        at
com.test.search.impl.lucene.LuceneIndexManager.removeDocuments(LuceneIndexManager.java:208)
        at
com.test.search.impl.lucene.LuceneIndexManager.reindex(LuceneIndexManager.java:271)
        at
com.test.search.impl.lucene.LuceneIndexManager.reindex(LuceneIndexManager.java:332)
        at
com.test.vcssearch.DefaultServiceIndexer$2.run(DefaultServiceIndexer.java:239)
        ... 1 more
Caused by: java.io.IOException: background merge hit exception: _5:c27 _6:c1
into _7 [optimize]
        at
org.apache.lucene.index.IndexWriter.optimize(IndexWriter.java:1787)
        at
org.apache.lucene.index.IndexWriter.optimize(IndexWriter.java:1727)
        at
org.apache.lucene.index.IndexWriter.optimize(IndexWriter.java:1707)
        at
com.test.search.impl.lucene.LuceneIndexManager.removeDocuments(LuceneIndexManager.java:310)
        ... 5 more
Caused by: java.io.FileNotFoundException: /tmp/content/3615.0-3618.0/_5.cfs
(No such file or directory)
        at java.io.RandomAccessFile.open(Native Method)
        at java.io.RandomAccessFile.<init>(RandomAccessFile.java:212)
        at
org.apache.lucene.store.FSDirectory$FSIndexInput$Descriptor.<init>(FSDirectory.java:506)
        at
org.apache.lucene.store.FSDirectory$FSIndexInput.<init>(FSDirectory.java:536)
        at
org.apache.lucene.store.FSDirectory.openInput(FSDirectory.java:445)
        at
org.apache.lucene.index.CompoundFileReader.<init>(CompoundFileReader.java:70)
        at
org.apache.lucene.index.SegmentReader.initialize(SegmentReader.java:277)
        at org.apache.lucene.index.SegmentReader.get(SegmentReader.java:262)
        at org.apache.lucene.index.SegmentReader.get(SegmentReader.java:221)
        at
org.apache.lucene.index.IndexWriter.mergeMiddle(IndexWriter.java:3263)
        at org.apache.lucene.index.IndexWriter.merge(IndexWriter.java:2968)
        at
org.apache.lucene.index.ConcurrentMergeScheduler$MergeThread.run(ConcurrentMergeScheduler.java:240)


So if the LockObtainFailedException doesn't occur may I assume that there
are not 2 indexers writing at the same time? Mike, what do you think? 
Above test was made on lucene 2.3.2.
-- 
View this message in context: http://www.nabble.com/FileNotFoundException-during-indexing-tp18766343p18772749.html
Sent from the Lucene - Java Users mailing list archive at Nabble.com.


---------------------------------------------------------------------
To unsubscribe, e-mail: java-user-unsubscribe@lucene.apache.org
For additional commands, e-mail: java-user-help@lucene.apache.org


Re: FileNotFoundException during indexing

Posted by Michael McCandless <lu...@mikemccandless.com>.
Another option is to switch to native locks (dir.setLockFactory(new  
NativeFSLockFactory()), at which point you will never have to call  
IndexReader.unLock because native locks are always properly released  
by the OS when the JVM exits/crashes.

If on switching to native locks, and removing the call to  
IndexReader.unlock, you see IndexWriter.open hitting  
LockObtainFailedException, then that means somehow you are trying to  
open two live writers on the same index.

Mike

Wojtek212 wrote:

>
> Hi Mike,
> I'm sharing one instance of IndexManager across all threads and as  
> I've
> noticed only this one is used during indexing.
>
> I'm unlocking before every indexing operation to make sure it would be
> possible.
> When IndexWriter is closed I assume it releases the lock and  
> finishes its
> work.
> Does IndexWriter executes some threads and doesn't wait when they are
> finished?
> It's the only one situation I can imagine that there 2 IndexWriters...
>
>
> -- 
> View this message in context: http://www.nabble.com/FileNotFoundException-during-indexing-tp18766343p18769652.html
> Sent from the Lucene - Java Users mailing list archive at Nabble.com.
>
>
> ---------------------------------------------------------------------
> To unsubscribe, e-mail: java-user-unsubscribe@lucene.apache.org
> For additional commands, e-mail: java-user-help@lucene.apache.org
>


---------------------------------------------------------------------
To unsubscribe, e-mail: java-user-unsubscribe@lucene.apache.org
For additional commands, e-mail: java-user-help@lucene.apache.org


Re: FileNotFoundException during indexing

Posted by Michael McCandless <lu...@mikemccandless.com>.
Wojtek212 wrote:

>
> Hi Mike,
> I'm sharing one instance of IndexManager across all threads and as  
> I've
> noticed only this one is used during indexing.

OK, maybe triple check this -- because that's the only way in your  
code I can see 2 IWs being live at once.

> I'm unlocking before every indexing operation to make sure it would be
> possible.

This is what makes me nervous (and why I suggest you print every time  
IndexReader.isLocked returns true, to be 100% sure it's not being  
called).

You should only very very rarely (after a JVM crash, or, if the JVM  
exits but you didn't close your IndexWriter) actually need to use  
IndexReader.unLock, and if you call it when you shouldn't (because  
another IW is in fact still "live"), disaster ensues.

> When IndexWriter is closed I assume it releases the lock and  
> finishes its
> work.
> Does IndexWriter executes some threads and doesn't wait when they are
> finished?
> It's the only one situation I can imagine that there 2 IndexWriters...

Before close() returns it finishes all threads and releases the lock.

Mike

---------------------------------------------------------------------
To unsubscribe, e-mail: java-user-unsubscribe@lucene.apache.org
For additional commands, e-mail: java-user-help@lucene.apache.org


Re: FileNotFoundException during indexing

Posted by Wojtek212 <wo...@gmail.com>.
Hi Mike,
I'm sharing one instance of IndexManager across all threads and as I've
noticed only this one is used during indexing.

I'm unlocking before every indexing operation to make sure it would be
possible.
When IndexWriter is closed I assume it releases the lock and finishes its
work.
Does IndexWriter executes some threads and doesn't wait when they are
finished?
It's the only one situation I can imagine that there 2 IndexWriters...


-- 
View this message in context: http://www.nabble.com/FileNotFoundException-during-indexing-tp18766343p18769652.html
Sent from the Lucene - Java Users mailing list archive at Nabble.com.


---------------------------------------------------------------------
To unsubscribe, e-mail: java-user-unsubscribe@lucene.apache.org
For additional commands, e-mail: java-user-help@lucene.apache.org


Re: FileNotFoundException during indexing

Posted by Michael McCandless <lu...@mikemccandless.com>.
Are you only creating one instance of IndexManager and then sharing  
that instance across all threads?

Can you put some logging/printing where you call IndexReader.unLock,  
to see how often that's happening?  That method is dangerous because  
if you unlock a still-active IndexWriter it leads to exactly this kind  
of exception.

Mike

Wojtek212 wrote:

>
> Hi,
> I'm sometimes receiving FileNotFoundExceptions during indexing.
>
> java.io.FileNotFoundException: /tmp/content/3615.0-3618.0/_3p.fnm  
> (No such
> file or directory)
> 	at
> com.test.vcssearch.DefaultServiceIndexer 
> $2.run(DefaultServiceIndexer.java:245)
> 	at java.lang.Thread.run(Thread.java:595)
> Caused by: com.test.search.IndexingException:  
> java.io.FileNotFoundException:
> /tmp/content/3615.0-3618.0/_3p.fnm (No such file or directory)
> 	at
> com 
> .test 
> .search.impl.lucene.IndexManager.removeDocuments(IndexManager.java: 
> 293)
> 	at
> com 
> .test 
> .search.impl.lucene.IndexManager.removeDocuments(IndexManager.java: 
> 199)
> 	at  
> com.test.search.impl.lucene.IndexManager.reindex(IndexManager.java: 
> 250)
> 	at  
> com.testsearch.impl.lucene.IndexManager.reindex(IndexManager.java:301)
> 	at
> com.test.vcssearch.DefaultServiceIndexer 
> $2.run(DefaultServiceIndexer.java:239)
> 	... 1 more
> Caused by: java.io.FileNotFoundException: /tmp/content/3615.0-3618.0/ 
> _3p.fnm
> (No such file or directory)
> 	at java.io.RandomAccessFile.open(Native Method)
> 	at java.io.RandomAccessFile.<init>(RandomAccessFile.java:212)
> 	at
> org.apache.lucene.store.FSIndexInput 
> $Descriptor.<init>(FSDirectory.java:497)
> 	at org.apache.lucene.store.FSIndexInput.<init>(FSDirectory.java:522)
> 	at org.apache.lucene.store.FSDirectory.openInput(FSDirectory.java: 
> 434)
> 	at
> org 
> .apache 
> .lucene.index.CompoundFileWriter.copyFile(CompoundFileWriter.java:204)
> 	at
> org 
> .apache 
> .lucene.index.CompoundFileWriter.close(CompoundFileWriter.java:169)
> 	at
> org 
> .apache 
> .lucene.index.SegmentMerger.createCompoundFile(SegmentMerger.java:153)
> 	at  
> org.apache.lucene.index.IndexWriter.mergeSegments(IndexWriter.java: 
> 1601)
> 	at org.apache.lucene.index.IndexWriter.optimize(IndexWriter.java:900)
> 	at
> com 
> .test 
> .search.impl.lucene.IndexManager.removeDocuments(IndexManager.java: 
> 282)
>
> I have a class with few methods of reindexing, adding and deleting.
> Everything is synchronized. Here is the example:
>
> public class IndexManager {
>
>   private final Object lock = new Object();
>
>   private final Analyzer analyzer;
>
>   private final FileProxy index;
>
>   IndexManager(FileProxy index,
>                      boolean create) throws IndexingException {
>       this.analyzer = new TestAnalyzer();
>       this.index = index;
>       if (!create && !index.exists()) {
>           create = true;
>       }
>
>       IndexWriter writer = null;
>       try {
>           // this checks the directory is unlocked
>           writer = createIndexWriter(create);
>       } catch (IOException e) {
>           throw new IndexingException(e);
>       } finally {
>           if (writer != null) {
>               try {
>                   writer.close();
>               } catch (IOException e) {
>                   LOGGER.error("Cannot close index writer", e);
>               }
>           }
>       }
>   }
>
>   public void reindex(Document[] documents) throws IndexingException {
>       synchronized (lock) {
>           removeDocuments(documents);
>           String[] ids = new String[documents.length];
>           for (int i = 0; i < documents.length; i++) {
>               ids[i] = documents[i].getDocumentID();
>           }
>           addDocuments(documents);
>       }
>   }
>
>   private IndexWriter createIndexWriter() throws IOException {
>       return createIndexWriter(false);
>   }
>
>   private IndexWriter createIndexWriter(boolean create) throws  
> IOException
> {
>
>       Directory directory = FileProxyDirectory.getDirectory(index,  
> create);
>       if (IndexReader.isLocked(directory)) {
>           IndexReader.unlock(directory);
>       }
>       return new IndexWriter(directory,analyzer,create);
>   }
>
>   public void addDocuments(Document[] documents)
>           throws IndexingException {
>       synchronized (lock) {
>           IndexWriter indexWriter = null;
>           try {
>               try {
>                   indexWriter =  createIndexWriter();
>                   for (int i = 0; i < documents.length; i++) {
>                       // add a document ID for future management
>                       if (documents[i].getDocumentID() == null) {
>                           String msg = EXCEPTION_LOCALIZER.format(
>                                   "document-id-not-set");
>                           LOGGER.error(msg);
>                           throw new IndexingException(msg);
>                       }
>                       LuceneDocument luceneDoc = (LuceneDocument)
> documents[i];
>
> indexWriter.addDocument(luceneDoc.getBackingDocument());
>                   }
>               } finally {
>                   if (indexWriter != null) {
>                       try {
>                           indexWriter.close();
>                       } catch (IOException e) {
>                           LOGGER.error("Cannot close index writer",  
> e);
>                       }
>                   }
>               }
>           } catch (IOException e) {
>               throw new IndexingException(e);
>           }
>       }
>   }
>
>   public boolean[] removeDocuments(String[] documentIDs) throws
> IndexingException {
>       boolean[] results = new boolean[documentIDs.length];
>
>       // Batching the removal of a group of documents is more  
> efficient due
>       // to the requirement of closing the reader
>       synchronized (lock) {
>           try {
>               IndexWriter indexWriter = null;
>               try {
>                   indexWriter = createIndexWriter();
>                   for (int i = 0; i < documentIDs.length; i++) {
>                       Term term = new  
> Term(SearchConstants.DOCUMENT_ID,
>                               documentIDs[i]);
>
>                       indexWriter.deleteDocuments(term);
>                       results[i] = true;
>                   }
>                   indexWriter.optimize();
>               } finally {
>                   if (indexWriter != null) {
>                       try {
>                           indexWriter.close();
>                       } catch (IOException e) {
>                           LOGGER.error("Cannot close index writer",  
> e);
>                       }
>                   }
>               }
>           } catch (IOException e) {
>               throw new IndexingException(e);
>           }
>       }
>       return results;
>   }
>
> }
>
> This class is used by many threads. Some of them can add
> documents, some can delete and reindex. After any operation  
> IndexWriter is
> closed.
> I'm using lucene 2.1.0 but even after upgrade to 2.3.2 there is  
> still the
> exception.
> I don't use searching during indexing but the exception occurs. Does  
> anybody
> have idea what can be a reason?
> -- 
> View this message in context: http://www.nabble.com/FileNotFoundException-during-indexing-tp18766343p18766343.html
> Sent from the Lucene - Java Users mailing list archive at Nabble.com.
>
>
> ---------------------------------------------------------------------
> To unsubscribe, e-mail: java-user-unsubscribe@lucene.apache.org
> For additional commands, e-mail: java-user-help@lucene.apache.org
>


---------------------------------------------------------------------
To unsubscribe, e-mail: java-user-unsubscribe@lucene.apache.org
For additional commands, e-mail: java-user-help@lucene.apache.org